From 25fc82c14a723cfe11dabca8f14a93f67d65e458 Mon Sep 17 00:00:00 2001 From: Bonchellon Date: Wed, 28 Jan 2026 00:55:06 +0300 Subject: [PATCH] 123 --- node_modules/.bin/color-support | 16 + node_modules/.bin/color-support.cmd | 17 + node_modules/.bin/color-support.ps1 | 28 + node_modules/.bin/mkdirp | 16 + node_modules/.bin/mkdirp.cmd | 17 + node_modules/.bin/mkdirp.ps1 | 28 + node_modules/.bin/node-pre-gyp | 16 + node_modules/.bin/node-pre-gyp.cmd | 17 + node_modules/.bin/node-pre-gyp.ps1 | 28 + node_modules/.bin/nopt | 16 + node_modules/.bin/nopt.cmd | 17 + node_modules/.bin/nopt.ps1 | 28 + node_modules/.bin/rimraf | 16 + node_modules/.bin/rimraf.cmd | 17 + node_modules/.bin/rimraf.ps1 | 28 + node_modules/.bin/semver | 16 + node_modules/.bin/semver.cmd | 17 + node_modules/.bin/semver.ps1 | 28 + node_modules/.package-lock.json | 774 ++ .../node-pre-gyp/.github/workflows/codeql.yml | 74 + .../@mapbox/node-pre-gyp/CHANGELOG.md | 510 ++ node_modules/@mapbox/node-pre-gyp/LICENSE | 27 + node_modules/@mapbox/node-pre-gyp/README.md | 742 ++ .../@mapbox/node-pre-gyp/bin/node-pre-gyp | 4 + .../@mapbox/node-pre-gyp/bin/node-pre-gyp.cmd | 2 + .../@mapbox/node-pre-gyp/contributing.md | 10 + .../@mapbox/node-pre-gyp/lib/build.js | 51 + .../@mapbox/node-pre-gyp/lib/clean.js | 31 + .../@mapbox/node-pre-gyp/lib/configure.js | 52 + node_modules/@mapbox/node-pre-gyp/lib/info.js | 38 + .../@mapbox/node-pre-gyp/lib/install.js | 235 + node_modules/@mapbox/node-pre-gyp/lib/main.js | 125 + .../@mapbox/node-pre-gyp/lib/node-pre-gyp.js | 309 + .../@mapbox/node-pre-gyp/lib/package.js | 73 + .../@mapbox/node-pre-gyp/lib/pre-binding.js | 34 + .../@mapbox/node-pre-gyp/lib/publish.js | 81 + .../@mapbox/node-pre-gyp/lib/rebuild.js | 20 + .../@mapbox/node-pre-gyp/lib/reinstall.js | 19 + .../@mapbox/node-pre-gyp/lib/reveal.js | 32 + .../@mapbox/node-pre-gyp/lib/testbinary.js | 79 + .../@mapbox/node-pre-gyp/lib/testpackage.js | 53 + .../@mapbox/node-pre-gyp/lib/unpublish.js | 41 + .../node-pre-gyp/lib/util/abi_crosswalk.json | 2602 +++++++ .../@mapbox/node-pre-gyp/lib/util/compile.js | 93 + .../node-pre-gyp/lib/util/handle_gyp_opts.js | 102 + .../@mapbox/node-pre-gyp/lib/util/napi.js | 205 + .../lib/util/nw-pre-gyp/index.html | 26 + .../lib/util/nw-pre-gyp/package.json | 9 + .../@mapbox/node-pre-gyp/lib/util/s3_setup.js | 163 + .../node-pre-gyp/lib/util/versioning.js | 335 + .../@mapbox/node-pre-gyp/package.json | 62 + node_modules/@redis/bloom/README.md | 14 + .../@redis/bloom/dist/commands/bloom/ADD.d.ts | 3 + .../@redis/bloom/dist/commands/bloom/ADD.js | 10 + .../bloom/dist/commands/bloom/CARD.d.ts | 4 + .../@redis/bloom/dist/commands/bloom/CARD.js | 9 + .../bloom/dist/commands/bloom/EXISTS.d.ts | 4 + .../bloom/dist/commands/bloom/EXISTS.js | 11 + .../bloom/dist/commands/bloom/INFO.d.ts | 23 + .../@redis/bloom/dist/commands/bloom/INFO.js | 19 + .../bloom/dist/commands/bloom/INSERT.d.ts | 11 + .../bloom/dist/commands/bloom/INSERT.js | 28 + .../bloom/dist/commands/bloom/LOADCHUNK.d.ts | 4 + .../bloom/dist/commands/bloom/LOADCHUNK.js | 8 + .../bloom/dist/commands/bloom/MADD.d.ts | 3 + .../@redis/bloom/dist/commands/bloom/MADD.js | 10 + .../bloom/dist/commands/bloom/MEXISTS.d.ts | 4 + .../bloom/dist/commands/bloom/MEXISTS.js | 11 + .../bloom/dist/commands/bloom/RESERVE.d.ts | 8 + .../bloom/dist/commands/bloom/RESERVE.js | 15 + .../bloom/dist/commands/bloom/SCANDUMP.d.ts | 13 + .../bloom/dist/commands/bloom/SCANDUMP.js | 16 + .../bloom/dist/commands/bloom/index.d.ts | 33 + .../@redis/bloom/dist/commands/bloom/index.js | 34 + .../commands/count-min-sketch/INCRBY.d.ts | 8 + .../dist/commands/count-min-sketch/INCRBY.js | 20 + .../dist/commands/count-min-sketch/INFO.d.ts | 17 + .../dist/commands/count-min-sketch/INFO.js | 17 + .../commands/count-min-sketch/INITBYDIM.d.ts | 3 + .../commands/count-min-sketch/INITBYDIM.js | 8 + .../commands/count-min-sketch/INITBYPROB.d.ts | 3 + .../commands/count-min-sketch/INITBYPROB.js | 8 + .../dist/commands/count-min-sketch/MERGE.d.ts | 9 + .../dist/commands/count-min-sketch/MERGE.js | 28 + .../dist/commands/count-min-sketch/QUERY.d.ts | 5 + .../dist/commands/count-min-sketch/QUERY.js | 10 + .../dist/commands/count-min-sketch/index.d.ts | 21 + .../dist/commands/count-min-sketch/index.js | 22 + .../bloom/dist/commands/cuckoo/ADD.d.ts | 3 + .../@redis/bloom/dist/commands/cuckoo/ADD.js | 10 + .../bloom/dist/commands/cuckoo/ADDNX.d.ts | 3 + .../bloom/dist/commands/cuckoo/ADDNX.js | 10 + .../bloom/dist/commands/cuckoo/COUNT.d.ts | 3 + .../bloom/dist/commands/cuckoo/COUNT.js | 8 + .../bloom/dist/commands/cuckoo/DEL.d.ts | 3 + .../@redis/bloom/dist/commands/cuckoo/DEL.js | 10 + .../bloom/dist/commands/cuckoo/EXISTS.d.ts | 4 + .../bloom/dist/commands/cuckoo/EXISTS.js | 11 + .../bloom/dist/commands/cuckoo/INFO.d.ts | 32 + .../@redis/bloom/dist/commands/cuckoo/INFO.js | 22 + .../bloom/dist/commands/cuckoo/INSERT.d.ts | 5 + .../bloom/dist/commands/cuckoo/INSERT.js | 11 + .../bloom/dist/commands/cuckoo/INSERTNX.d.ts | 5 + .../bloom/dist/commands/cuckoo/INSERTNX.js | 11 + .../bloom/dist/commands/cuckoo/LOADCHUNK.d.ts | 4 + .../bloom/dist/commands/cuckoo/LOADCHUNK.js | 8 + .../bloom/dist/commands/cuckoo/RESERVE.d.ts | 9 + .../bloom/dist/commands/cuckoo/RESERVE.js | 18 + .../bloom/dist/commands/cuckoo/SCANDUMP.d.ts | 12 + .../bloom/dist/commands/cuckoo/SCANDUMP.js | 15 + .../bloom/dist/commands/cuckoo/index.d.ts | 42 + .../bloom/dist/commands/cuckoo/index.js | 51 + .../@redis/bloom/dist/commands/index.d.ts | 111 + .../@redis/bloom/dist/commands/index.js | 14 + .../bloom/dist/commands/t-digest/ADD.d.ts | 4 + .../bloom/dist/commands/t-digest/ADD.js | 12 + .../bloom/dist/commands/t-digest/BYRANK.d.ts | 5 + .../bloom/dist/commands/t-digest/BYRANK.js | 15 + .../dist/commands/t-digest/BYREVRANK.d.ts | 5 + .../bloom/dist/commands/t-digest/BYREVRANK.js | 15 + .../bloom/dist/commands/t-digest/CDF.d.ts | 5 + .../bloom/dist/commands/t-digest/CDF.js | 15 + .../bloom/dist/commands/t-digest/CREATE.d.ts | 5 + .../bloom/dist/commands/t-digest/CREATE.js | 9 + .../bloom/dist/commands/t-digest/INFO.d.ts | 31 + .../bloom/dist/commands/t-digest/INFO.js | 24 + .../bloom/dist/commands/t-digest/MAX.d.ts | 5 + .../bloom/dist/commands/t-digest/MAX.js | 14 + .../bloom/dist/commands/t-digest/MERGE.d.ts | 9 + .../bloom/dist/commands/t-digest/MERGE.js | 15 + .../bloom/dist/commands/t-digest/MIN.d.ts | 5 + .../bloom/dist/commands/t-digest/MIN.js | 14 + .../dist/commands/t-digest/QUANTILE.d.ts | 5 + .../bloom/dist/commands/t-digest/QUANTILE.js | 18 + .../bloom/dist/commands/t-digest/RANK.d.ts | 5 + .../bloom/dist/commands/t-digest/RANK.js | 13 + .../bloom/dist/commands/t-digest/RESET.d.ts | 4 + .../bloom/dist/commands/t-digest/RESET.js | 8 + .../bloom/dist/commands/t-digest/REVRANK.d.ts | 5 + .../bloom/dist/commands/t-digest/REVRANK.js | 13 + .../dist/commands/t-digest/TRIMMED_MEAN.d.ts | 5 + .../dist/commands/t-digest/TRIMMED_MEAN.js | 16 + .../bloom/dist/commands/t-digest/index.d.ts | 52 + .../bloom/dist/commands/t-digest/index.js | 71 + .../@redis/bloom/dist/commands/top-k/ADD.d.ts | 4 + .../@redis/bloom/dist/commands/top-k/ADD.js | 9 + .../bloom/dist/commands/top-k/COUNT.d.ts | 5 + .../@redis/bloom/dist/commands/top-k/COUNT.js | 10 + .../bloom/dist/commands/top-k/INCRBY.d.ts | 8 + .../bloom/dist/commands/top-k/INCRBY.js | 20 + .../bloom/dist/commands/top-k/INFO.d.ts | 20 + .../@redis/bloom/dist/commands/top-k/INFO.js | 18 + .../bloom/dist/commands/top-k/LIST.d.ts | 4 + .../@redis/bloom/dist/commands/top-k/LIST.js | 9 + .../dist/commands/top-k/LIST_WITHCOUNT.d.ts | 10 + .../dist/commands/top-k/LIST_WITHCOUNT.js | 20 + .../bloom/dist/commands/top-k/QUERY.d.ts | 5 + .../@redis/bloom/dist/commands/top-k/QUERY.js | 10 + .../bloom/dist/commands/top-k/RESERVE.d.ts | 10 + .../bloom/dist/commands/top-k/RESERVE.js | 13 + .../bloom/dist/commands/top-k/index.d.ts | 27 + .../@redis/bloom/dist/commands/top-k/index.js | 28 + node_modules/@redis/bloom/dist/index.d.ts | 1 + node_modules/@redis/bloom/dist/index.js | 5 + node_modules/@redis/bloom/package.json | 29 + node_modules/@redis/client/README.md | 3 + node_modules/@redis/client/dist/index.d.ts | 13 + node_modules/@redis/client/dist/index.js | 29 + .../lib/client/RESP2/composers/buffer.d.ts | 8 + .../dist/lib/client/RESP2/composers/buffer.js | 23 + .../lib/client/RESP2/composers/interface.d.ts | 6 + .../lib/client/RESP2/composers/interface.js | 2 + .../lib/client/RESP2/composers/string.d.ts | 9 + .../dist/lib/client/RESP2/composers/string.js | 31 + .../client/dist/lib/client/RESP2/decoder.d.ts | 35 + .../client/dist/lib/client/RESP2/decoder.js | 250 + .../client/dist/lib/client/RESP2/encoder.d.ts | 2 + .../client/dist/lib/client/RESP2/encoder.js | 23 + .../dist/lib/client/commands-queue.d.ts | 42 + .../client/dist/lib/client/commands-queue.js | 197 + .../client/dist/lib/client/commands.d.ts | 816 +++ .../@redis/client/dist/lib/client/commands.js | 375 + .../@redis/client/dist/lib/client/index.d.ts | 148 + .../@redis/client/dist/lib/client/index.js | 563 ++ .../client/dist/lib/client/multi-command.d.ts | 39 + .../client/dist/lib/client/multi-command.js | 130 + .../client/dist/lib/client/pub-sub.d.ts | 50 + .../@redis/client/dist/lib/client/pub-sub.js | 306 + .../@redis/client/dist/lib/client/socket.d.ts | 52 + .../@redis/client/dist/lib/client/socket.js | 233 + .../dist/lib/cluster/cluster-slots.d.ts | 60 + .../client/dist/lib/cluster/cluster-slots.js | 434 ++ .../client/dist/lib/cluster/commands.d.ts | 669 ++ .../client/dist/lib/cluster/commands.js | 670 ++ .../@redis/client/dist/lib/cluster/index.d.ts | 93 + .../@redis/client/dist/lib/cluster/index.js | 254 + .../dist/lib/cluster/multi-command.d.ts | 36 + .../client/dist/lib/cluster/multi-command.js | 80 + .../client/dist/lib/command-options.d.ts | 7 + .../@redis/client/dist/lib/command-options.js | 13 + .../@redis/client/dist/lib/commander.d.ts | 30 + .../@redis/client/dist/lib/commander.js | 103 + .../client/dist/lib/commands/ACL_CAT.d.ts | 3 + .../client/dist/lib/commands/ACL_CAT.js | 11 + .../client/dist/lib/commands/ACL_DELUSER.d.ts | 3 + .../client/dist/lib/commands/ACL_DELUSER.js | 8 + .../client/dist/lib/commands/ACL_DRYRUN.d.ts | 4 + .../client/dist/lib/commands/ACL_DRYRUN.js | 13 + .../client/dist/lib/commands/ACL_GENPASS.d.ts | 3 + .../client/dist/lib/commands/ACL_GENPASS.js | 11 + .../client/dist/lib/commands/ACL_GETUSER.d.ts | 26 + .../client/dist/lib/commands/ACL_GETUSER.js | 18 + .../client/dist/lib/commands/ACL_LIST.d.ts | 3 + .../client/dist/lib/commands/ACL_LIST.js | 7 + .../client/dist/lib/commands/ACL_LOAD.d.ts | 3 + .../client/dist/lib/commands/ACL_LOAD.js | 7 + .../client/dist/lib/commands/ACL_LOG.d.ts | 29 + .../client/dist/lib/commands/ACL_LOG.js | 23 + .../dist/lib/commands/ACL_LOG_RESET.d.ts | 3 + .../client/dist/lib/commands/ACL_LOG_RESET.js | 7 + .../client/dist/lib/commands/ACL_SAVE.d.ts | 3 + .../client/dist/lib/commands/ACL_SAVE.js | 7 + .../client/dist/lib/commands/ACL_SETUSER.d.ts | 3 + .../client/dist/lib/commands/ACL_SETUSER.js | 8 + .../client/dist/lib/commands/ACL_USERS.d.ts | 3 + .../client/dist/lib/commands/ACL_USERS.js | 7 + .../client/dist/lib/commands/ACL_WHOAMI.d.ts | 3 + .../client/dist/lib/commands/ACL_WHOAMI.js | 7 + .../client/dist/lib/commands/APPEND.d.ts | 4 + .../@redis/client/dist/lib/commands/APPEND.js | 8 + .../client/dist/lib/commands/ASKING.d.ts | 3 + .../@redis/client/dist/lib/commands/ASKING.js | 7 + .../@redis/client/dist/lib/commands/AUTH.d.ts | 7 + .../@redis/client/dist/lib/commands/AUTH.js | 10 + .../dist/lib/commands/BGREWRITEAOF.d.ts | 3 + .../client/dist/lib/commands/BGREWRITEAOF.js | 7 + .../client/dist/lib/commands/BGSAVE.d.ts | 7 + .../@redis/client/dist/lib/commands/BGSAVE.js | 11 + .../client/dist/lib/commands/BITCOUNT.d.ts | 11 + .../client/dist/lib/commands/BITCOUNT.js | 16 + .../client/dist/lib/commands/BITFIELD.d.ts | 26 + .../client/dist/lib/commands/BITFIELD.js | 25 + .../client/dist/lib/commands/BITFIELD_RO.d.ts | 7 + .../client/dist/lib/commands/BITFIELD_RO.js | 13 + .../client/dist/lib/commands/BITOP.d.ts | 6 + .../@redis/client/dist/lib/commands/BITOP.js | 9 + .../client/dist/lib/commands/BITPOS.d.ts | 6 + .../@redis/client/dist/lib/commands/BITPOS.js | 19 + .../client/dist/lib/commands/BLMOVE.d.ts | 5 + .../@redis/client/dist/lib/commands/BLMOVE.js | 15 + .../client/dist/lib/commands/BLMPOP.d.ts | 5 + .../@redis/client/dist/lib/commands/BLMPOP.js | 11 + .../client/dist/lib/commands/BLPOP.d.ts | 10 + .../@redis/client/dist/lib/commands/BLPOP.js | 20 + .../client/dist/lib/commands/BRPOP.d.ts | 4 + .../@redis/client/dist/lib/commands/BRPOP.js | 13 + .../client/dist/lib/commands/BRPOPLPUSH.d.ts | 4 + .../client/dist/lib/commands/BRPOPLPUSH.js | 8 + .../client/dist/lib/commands/BZMPOP.d.ts | 5 + .../@redis/client/dist/lib/commands/BZMPOP.js | 11 + .../client/dist/lib/commands/BZPOPMAX.d.ts | 10 + .../client/dist/lib/commands/BZPOPMAX.js | 21 + .../client/dist/lib/commands/BZPOPMIN.d.ts | 4 + .../client/dist/lib/commands/BZPOPMIN.js | 13 + .../dist/lib/commands/CLIENT_CACHING.d.ts | 4 + .../dist/lib/commands/CLIENT_CACHING.js | 11 + .../dist/lib/commands/CLIENT_GETNAME.d.ts | 3 + .../dist/lib/commands/CLIENT_GETNAME.js | 7 + .../dist/lib/commands/CLIENT_GETREDIR.d.ts | 3 + .../dist/lib/commands/CLIENT_GETREDIR.js | 7 + .../client/dist/lib/commands/CLIENT_ID.d.ts | 3 + .../client/dist/lib/commands/CLIENT_ID.js | 8 + .../client/dist/lib/commands/CLIENT_INFO.d.ts | 33 + .../client/dist/lib/commands/CLIENT_INFO.js | 57 + .../client/dist/lib/commands/CLIENT_KILL.d.ts | 38 + .../client/dist/lib/commands/CLIENT_KILL.js | 58 + .../client/dist/lib/commands/CLIENT_LIST.d.ts | 15 + .../client/dist/lib/commands/CLIENT_LIST.js | 28 + .../dist/lib/commands/CLIENT_NO-EVICT.d.ts | 4 + .../dist/lib/commands/CLIENT_NO-EVICT.js | 11 + .../dist/lib/commands/CLIENT_NO-TOUCH.d.ts | 4 + .../dist/lib/commands/CLIENT_NO-TOUCH.js | 11 + .../dist/lib/commands/CLIENT_PAUSE.d.ts | 4 + .../client/dist/lib/commands/CLIENT_PAUSE.js | 15 + .../dist/lib/commands/CLIENT_SETNAME.d.ts | 3 + .../dist/lib/commands/CLIENT_SETNAME.js | 7 + .../dist/lib/commands/CLIENT_TRACKING.d.ts | 20 + .../dist/lib/commands/CLIENT_TRACKING.js | 48 + .../lib/commands/CLIENT_TRACKINGINFO.d.ts | 17 + .../dist/lib/commands/CLIENT_TRACKINGINFO.js | 15 + .../dist/lib/commands/CLIENT_UNPAUSE.d.ts | 4 + .../dist/lib/commands/CLIENT_UNPAUSE.js | 7 + .../dist/lib/commands/CLUSTER_ADDSLOTS.d.ts | 3 + .../dist/lib/commands/CLUSTER_ADDSLOTS.js | 8 + .../lib/commands/CLUSTER_ADDSLOTSRANGE.d.ts | 4 + .../lib/commands/CLUSTER_ADDSLOTSRANGE.js | 8 + .../dist/lib/commands/CLUSTER_BUMPEPOCH.d.ts | 2 + .../dist/lib/commands/CLUSTER_BUMPEPOCH.js | 7 + .../CLUSTER_COUNT-FAILURE-REPORTS.d.ts | 2 + .../commands/CLUSTER_COUNT-FAILURE-REPORTS.js | 7 + .../lib/commands/CLUSTER_COUNTKEYSINSLOT.d.ts | 2 + .../lib/commands/CLUSTER_COUNTKEYSINSLOT.js | 7 + .../dist/lib/commands/CLUSTER_DELSLOTS.d.ts | 3 + .../dist/lib/commands/CLUSTER_DELSLOTS.js | 8 + .../lib/commands/CLUSTER_DELSLOTSRANGE.d.ts | 4 + .../lib/commands/CLUSTER_DELSLOTSRANGE.js | 8 + .../dist/lib/commands/CLUSTER_FAILOVER.d.ts | 6 + .../dist/lib/commands/CLUSTER_FAILOVER.js | 16 + .../dist/lib/commands/CLUSTER_FLUSHSLOTS.d.ts | 2 + .../dist/lib/commands/CLUSTER_FLUSHSLOTS.js | 7 + .../dist/lib/commands/CLUSTER_FORGET.d.ts | 2 + .../dist/lib/commands/CLUSTER_FORGET.js | 7 + .../lib/commands/CLUSTER_GETKEYSINSLOT.d.ts | 2 + .../lib/commands/CLUSTER_GETKEYSINSLOT.js | 7 + .../dist/lib/commands/CLUSTER_INFO.d.ts | 21 + .../client/dist/lib/commands/CLUSTER_INFO.js | 32 + .../dist/lib/commands/CLUSTER_KEYSLOT.d.ts | 2 + .../dist/lib/commands/CLUSTER_KEYSLOT.js | 7 + .../dist/lib/commands/CLUSTER_LINKS.d.ts | 25 + .../client/dist/lib/commands/CLUSTER_LINKS.js | 18 + .../dist/lib/commands/CLUSTER_MEET.d.ts | 2 + .../client/dist/lib/commands/CLUSTER_MEET.js | 7 + .../dist/lib/commands/CLUSTER_MYID.d.ts | 2 + .../client/dist/lib/commands/CLUSTER_MYID.js | 7 + .../dist/lib/commands/CLUSTER_MYSHARDID.d.ts | 4 + .../dist/lib/commands/CLUSTER_MYSHARDID.js | 8 + .../dist/lib/commands/CLUSTER_NODES.d.ts | 28 + .../client/dist/lib/commands/CLUSTER_NODES.js | 74 + .../dist/lib/commands/CLUSTER_REPLICAS.d.ts | 2 + .../dist/lib/commands/CLUSTER_REPLICAS.js | 9 + .../dist/lib/commands/CLUSTER_REPLICATE.d.ts | 2 + .../dist/lib/commands/CLUSTER_REPLICATE.js | 7 + .../dist/lib/commands/CLUSTER_RESET.d.ts | 2 + .../client/dist/lib/commands/CLUSTER_RESET.js | 11 + .../dist/lib/commands/CLUSTER_SAVECONFIG.d.ts | 2 + .../dist/lib/commands/CLUSTER_SAVECONFIG.js | 7 + .../commands/CLUSTER_SET-CONFIG-EPOCH.d.ts | 2 + .../lib/commands/CLUSTER_SET-CONFIG-EPOCH.js | 7 + .../dist/lib/commands/CLUSTER_SETSLOT.d.ts | 8 + .../dist/lib/commands/CLUSTER_SETSLOT.js | 18 + .../dist/lib/commands/CLUSTER_SLOTS.d.ts | 22 + .../client/dist/lib/commands/CLUSTER_SLOTS.js | 26 + .../client/dist/lib/commands/COMMAND.d.ts | 5 + .../client/dist/lib/commands/COMMAND.js | 13 + .../dist/lib/commands/COMMAND_COUNT.d.ts | 4 + .../client/dist/lib/commands/COMMAND_COUNT.js | 8 + .../dist/lib/commands/COMMAND_GETKEYS.d.ts | 4 + .../dist/lib/commands/COMMAND_GETKEYS.js | 8 + .../lib/commands/COMMAND_GETKEYSANDFLAGS.d.ts | 13 + .../lib/commands/COMMAND_GETKEYSANDFLAGS.js | 15 + .../dist/lib/commands/COMMAND_INFO.d.ts | 5 + .../client/dist/lib/commands/COMMAND_INFO.js | 13 + .../dist/lib/commands/COMMAND_LIST.d.ts | 14 + .../client/dist/lib/commands/COMMAND_LIST.js | 18 + .../client/dist/lib/commands/CONFIG_GET.d.ts | 2 + .../client/dist/lib/commands/CONFIG_GET.js | 9 + .../dist/lib/commands/CONFIG_RESETSTAT.d.ts | 2 + .../dist/lib/commands/CONFIG_RESETSTAT.js | 7 + .../dist/lib/commands/CONFIG_REWRITE.d.ts | 2 + .../dist/lib/commands/CONFIG_REWRITE.js | 7 + .../client/dist/lib/commands/CONFIG_SET.d.ts | 6 + .../client/dist/lib/commands/CONFIG_SET.js | 16 + .../@redis/client/dist/lib/commands/COPY.d.ts | 8 + .../@redis/client/dist/lib/commands/COPY.js | 17 + .../client/dist/lib/commands/DBSIZE.d.ts | 3 + .../@redis/client/dist/lib/commands/DBSIZE.js | 8 + .../@redis/client/dist/lib/commands/DECR.d.ts | 4 + .../@redis/client/dist/lib/commands/DECR.js | 8 + .../client/dist/lib/commands/DECRBY.d.ts | 4 + .../@redis/client/dist/lib/commands/DECRBY.js | 8 + .../@redis/client/dist/lib/commands/DEL.d.ts | 4 + .../@redis/client/dist/lib/commands/DEL.js | 9 + .../client/dist/lib/commands/DISCARD.d.ts | 3 + .../client/dist/lib/commands/DISCARD.js | 7 + .../@redis/client/dist/lib/commands/DUMP.d.ts | 4 + .../@redis/client/dist/lib/commands/DUMP.js | 8 + .../@redis/client/dist/lib/commands/ECHO.d.ts | 4 + .../@redis/client/dist/lib/commands/ECHO.js | 8 + .../@redis/client/dist/lib/commands/EVAL.d.ts | 3 + .../@redis/client/dist/lib/commands/EVAL.js | 9 + .../client/dist/lib/commands/EVALSHA.d.ts | 3 + .../client/dist/lib/commands/EVALSHA.js | 9 + .../client/dist/lib/commands/EVALSHA_RO.d.ts | 4 + .../client/dist/lib/commands/EVALSHA_RO.js | 10 + .../client/dist/lib/commands/EVAL_RO.d.ts | 4 + .../client/dist/lib/commands/EVAL_RO.js | 10 + .../client/dist/lib/commands/EXISTS.d.ts | 5 + .../@redis/client/dist/lib/commands/EXISTS.js | 10 + .../client/dist/lib/commands/EXPIRE.d.ts | 4 + .../@redis/client/dist/lib/commands/EXPIRE.js | 14 + .../client/dist/lib/commands/EXPIREAT.d.ts | 4 + .../client/dist/lib/commands/EXPIREAT.js | 19 + .../client/dist/lib/commands/EXPIRETIME.d.ts | 4 + .../client/dist/lib/commands/EXPIRETIME.js | 8 + .../client/dist/lib/commands/FAILOVER.d.ts | 12 + .../client/dist/lib/commands/FAILOVER.js | 20 + .../client/dist/lib/commands/FCALL.d.ts | 3 + .../@redis/client/dist/lib/commands/FCALL.js | 9 + .../client/dist/lib/commands/FCALL_RO.d.ts | 4 + .../client/dist/lib/commands/FCALL_RO.js | 10 + .../client/dist/lib/commands/FLUSHALL.d.ts | 6 + .../client/dist/lib/commands/FLUSHALL.js | 16 + .../client/dist/lib/commands/FLUSHDB.d.ts | 3 + .../client/dist/lib/commands/FLUSHDB.js | 11 + .../dist/lib/commands/FUNCTION_DELETE.d.ts | 3 + .../dist/lib/commands/FUNCTION_DELETE.js | 7 + .../dist/lib/commands/FUNCTION_DUMP.d.ts | 3 + .../client/dist/lib/commands/FUNCTION_DUMP.js | 7 + .../dist/lib/commands/FUNCTION_FLUSH.d.ts | 3 + .../dist/lib/commands/FUNCTION_FLUSH.js | 11 + .../dist/lib/commands/FUNCTION_KILL.d.ts | 3 + .../client/dist/lib/commands/FUNCTION_KILL.js | 7 + .../dist/lib/commands/FUNCTION_LIST.d.ts | 4 + .../client/dist/lib/commands/FUNCTION_LIST.js | 16 + .../lib/commands/FUNCTION_LIST_WITHCODE.d.ts | 13 + .../lib/commands/FUNCTION_LIST_WITHCODE.js | 18 + .../dist/lib/commands/FUNCTION_LOAD.d.ts | 7 + .../client/dist/lib/commands/FUNCTION_LOAD.js | 12 + .../dist/lib/commands/FUNCTION_RESTORE.d.ts | 3 + .../dist/lib/commands/FUNCTION_RESTORE.js | 11 + .../dist/lib/commands/FUNCTION_STATS.d.ts | 28 + .../dist/lib/commands/FUNCTION_STATS.js | 25 + .../client/dist/lib/commands/GEOADD.d.ts | 20 + .../@redis/client/dist/lib/commands/GEOADD.js | 21 + .../client/dist/lib/commands/GEODIST.d.ts | 6 + .../client/dist/lib/commands/GEODIST.js | 17 + .../client/dist/lib/commands/GEOHASH.d.ts | 5 + .../client/dist/lib/commands/GEOHASH.js | 10 + .../client/dist/lib/commands/GEOPOS.d.ts | 11 + .../@redis/client/dist/lib/commands/GEOPOS.js | 17 + .../client/dist/lib/commands/GEORADIUS.d.ts | 6 + .../client/dist/lib/commands/GEORADIUS.js | 10 + .../dist/lib/commands/GEORADIUSBYMEMBER.d.ts | 6 + .../dist/lib/commands/GEORADIUSBYMEMBER.js | 10 + .../lib/commands/GEORADIUSBYMEMBERSTORE.d.ts | 5 + .../lib/commands/GEORADIUSBYMEMBERSTORE.js | 11 + .../lib/commands/GEORADIUSBYMEMBER_RO.d.ts | 6 + .../dist/lib/commands/GEORADIUSBYMEMBER_RO.js | 10 + .../commands/GEORADIUSBYMEMBER_RO_WITH.d.ts | 5 + .../lib/commands/GEORADIUSBYMEMBER_RO_WITH.js | 16 + .../lib/commands/GEORADIUSBYMEMBER_WITH.d.ts | 5 + .../lib/commands/GEORADIUSBYMEMBER_WITH.js | 16 + .../dist/lib/commands/GEORADIUSSTORE.d.ts | 5 + .../dist/lib/commands/GEORADIUSSTORE.js | 11 + .../dist/lib/commands/GEORADIUS_RO.d.ts | 6 + .../client/dist/lib/commands/GEORADIUS_RO.js | 10 + .../dist/lib/commands/GEORADIUS_RO_WITH.d.ts | 5 + .../dist/lib/commands/GEORADIUS_RO_WITH.js | 16 + .../dist/lib/commands/GEORADIUS_WITH.d.ts | 5 + .../dist/lib/commands/GEORADIUS_WITH.js | 16 + .../client/dist/lib/commands/GEOSEARCH.d.ts | 6 + .../client/dist/lib/commands/GEOSEARCH.js | 10 + .../dist/lib/commands/GEOSEARCHSTORE.d.ts | 8 + .../dist/lib/commands/GEOSEARCHSTORE.js | 22 + .../dist/lib/commands/GEOSEARCH_WITH.d.ts | 5 + .../dist/lib/commands/GEOSEARCH_WITH.js | 16 + .../@redis/client/dist/lib/commands/GET.d.ts | 5 + .../@redis/client/dist/lib/commands/GET.js | 9 + .../client/dist/lib/commands/GETBIT.d.ts | 6 + .../@redis/client/dist/lib/commands/GETBIT.js | 9 + .../client/dist/lib/commands/GETDEL.d.ts | 4 + .../@redis/client/dist/lib/commands/GETDEL.js | 8 + .../client/dist/lib/commands/GETEX.d.ts | 16 + .../@redis/client/dist/lib/commands/GETEX.js | 25 + .../client/dist/lib/commands/GETRANGE.d.ts | 5 + .../client/dist/lib/commands/GETRANGE.js | 9 + .../client/dist/lib/commands/GETSET.d.ts | 4 + .../@redis/client/dist/lib/commands/GETSET.js | 8 + .../@redis/client/dist/lib/commands/HDEL.d.ts | 4 + .../@redis/client/dist/lib/commands/HDEL.js | 9 + .../client/dist/lib/commands/HELLO.d.ts | 35 + .../@redis/client/dist/lib/commands/HELLO.js | 29 + .../client/dist/lib/commands/HEXISTS.d.ts | 4 + .../client/dist/lib/commands/HEXISTS.js | 10 + .../client/dist/lib/commands/HEXPIRE.d.ts | 23 + .../client/dist/lib/commands/HEXPIRE.js | 32 + .../client/dist/lib/commands/HEXPIREAT.d.ts | 5 + .../client/dist/lib/commands/HEXPIREAT.js | 18 + .../client/dist/lib/commands/HEXPIRETIME.d.ts | 13 + .../client/dist/lib/commands/HEXPIRETIME.js | 18 + .../@redis/client/dist/lib/commands/HGET.d.ts | 5 + .../@redis/client/dist/lib/commands/HGET.js | 9 + .../client/dist/lib/commands/HGETALL.d.ts | 6 + .../client/dist/lib/commands/HGETALL.js | 12 + .../client/dist/lib/commands/HINCRBY.d.ts | 4 + .../client/dist/lib/commands/HINCRBY.js | 8 + .../dist/lib/commands/HINCRBYFLOAT.d.ts | 4 + .../client/dist/lib/commands/HINCRBYFLOAT.js | 8 + .../client/dist/lib/commands/HKEYS.d.ts | 4 + .../@redis/client/dist/lib/commands/HKEYS.js | 8 + .../@redis/client/dist/lib/commands/HLEN.d.ts | 4 + .../@redis/client/dist/lib/commands/HLEN.js | 8 + .../client/dist/lib/commands/HMGET.d.ts | 5 + .../@redis/client/dist/lib/commands/HMGET.js | 10 + .../client/dist/lib/commands/HPERSIST.d.ts | 4 + .../client/dist/lib/commands/HPERSIST.js | 9 + .../client/dist/lib/commands/HPEXPIRE.d.ts | 5 + .../client/dist/lib/commands/HPEXPIRE.js | 14 + .../client/dist/lib/commands/HPEXPIREAT.d.ts | 6 + .../client/dist/lib/commands/HPEXPIREAT.js | 15 + .../dist/lib/commands/HPEXPIRETIME.d.ts | 5 + .../client/dist/lib/commands/HPEXPIRETIME.js | 10 + .../client/dist/lib/commands/HPTTL.d.ts | 5 + .../@redis/client/dist/lib/commands/HPTTL.js | 10 + .../client/dist/lib/commands/HRANDFIELD.d.ts | 5 + .../client/dist/lib/commands/HRANDFIELD.js | 9 + .../dist/lib/commands/HRANDFIELD_COUNT.d.ts | 4 + .../dist/lib/commands/HRANDFIELD_COUNT.js | 14 + .../commands/HRANDFIELD_COUNT_WITHVALUES.d.ts | 4 + .../commands/HRANDFIELD_COUNT_WITHVALUES.js | 16 + .../client/dist/lib/commands/HSCAN.d.ts | 16 + .../@redis/client/dist/lib/commands/HSCAN.js | 27 + .../dist/lib/commands/HSCAN_NOVALUES.d.ts | 10 + .../dist/lib/commands/HSCAN_NOVALUES.js | 20 + .../@redis/client/dist/lib/commands/HSET.d.ts | 12 + .../@redis/client/dist/lib/commands/HSET.js | 45 + .../client/dist/lib/commands/HSETNX.d.ts | 4 + .../@redis/client/dist/lib/commands/HSETNX.js | 10 + .../client/dist/lib/commands/HSTRLEN.d.ts | 4 + .../client/dist/lib/commands/HSTRLEN.js | 8 + .../@redis/client/dist/lib/commands/HTTL.d.ts | 5 + .../@redis/client/dist/lib/commands/HTTL.js | 10 + .../client/dist/lib/commands/HVALS.d.ts | 4 + .../@redis/client/dist/lib/commands/HVALS.js | 8 + .../@redis/client/dist/lib/commands/INCR.d.ts | 4 + .../@redis/client/dist/lib/commands/INCR.js | 8 + .../client/dist/lib/commands/INCRBY.d.ts | 4 + .../@redis/client/dist/lib/commands/INCRBY.js | 8 + .../client/dist/lib/commands/INCRBYFLOAT.d.ts | 4 + .../client/dist/lib/commands/INCRBYFLOAT.js | 8 + .../@redis/client/dist/lib/commands/INFO.d.ts | 3 + .../@redis/client/dist/lib/commands/INFO.js | 12 + .../@redis/client/dist/lib/commands/KEYS.d.ts | 3 + .../@redis/client/dist/lib/commands/KEYS.js | 7 + .../client/dist/lib/commands/LASTSAVE.d.ts | 3 + .../client/dist/lib/commands/LASTSAVE.js | 12 + .../dist/lib/commands/LATENCY_DOCTOR.d.ts | 2 + .../dist/lib/commands/LATENCY_DOCTOR.js | 7 + .../dist/lib/commands/LATENCY_GRAPH.d.ts | 4 + .../client/dist/lib/commands/LATENCY_GRAPH.js | 7 + .../dist/lib/commands/LATENCY_HISTORY.d.ts | 6 + .../dist/lib/commands/LATENCY_HISTORY.js | 7 + .../dist/lib/commands/LATENCY_LATEST.d.ts | 8 + .../dist/lib/commands/LATENCY_LATEST.js | 7 + .../@redis/client/dist/lib/commands/LCS.d.ts | 6 + .../@redis/client/dist/lib/commands/LCS.js | 13 + .../client/dist/lib/commands/LCS_IDX.d.ts | 21 + .../client/dist/lib/commands/LCS_IDX.js | 24 + .../lib/commands/LCS_IDX_WITHMATCHLEN.d.ts | 23 + .../dist/lib/commands/LCS_IDX_WITHMATCHLEN.js | 25 + .../client/dist/lib/commands/LCS_LEN.d.ts | 4 + .../client/dist/lib/commands/LCS_LEN.js | 13 + .../client/dist/lib/commands/LINDEX.d.ts | 5 + .../@redis/client/dist/lib/commands/LINDEX.js | 9 + .../client/dist/lib/commands/LINSERT.d.ts | 6 + .../client/dist/lib/commands/LINSERT.js | 14 + .../@redis/client/dist/lib/commands/LLEN.d.ts | 5 + .../@redis/client/dist/lib/commands/LLEN.js | 9 + .../client/dist/lib/commands/LMOVE.d.ts | 5 + .../@redis/client/dist/lib/commands/LMOVE.js | 14 + .../client/dist/lib/commands/LMPOP.d.ts | 8 + .../@redis/client/dist/lib/commands/LMPOP.js | 9 + .../client/dist/lib/commands/LOLWUT.d.ts | 4 + .../@redis/client/dist/lib/commands/LOLWUT.js | 12 + .../@redis/client/dist/lib/commands/LPOP.d.ts | 4 + .../@redis/client/dist/lib/commands/LPOP.js | 8 + .../client/dist/lib/commands/LPOP_COUNT.d.ts | 4 + .../client/dist/lib/commands/LPOP_COUNT.js | 8 + .../@redis/client/dist/lib/commands/LPOS.d.ts | 9 + .../@redis/client/dist/lib/commands/LPOS.js | 16 + .../client/dist/lib/commands/LPOS_COUNT.d.ts | 5 + .../client/dist/lib/commands/LPOS_COUNT.js | 18 + .../client/dist/lib/commands/LPUSH.d.ts | 4 + .../@redis/client/dist/lib/commands/LPUSH.js | 9 + .../client/dist/lib/commands/LPUSHX.d.ts | 4 + .../@redis/client/dist/lib/commands/LPUSHX.js | 9 + .../client/dist/lib/commands/LRANGE.d.ts | 5 + .../@redis/client/dist/lib/commands/LRANGE.js | 14 + .../@redis/client/dist/lib/commands/LREM.d.ts | 4 + .../@redis/client/dist/lib/commands/LREM.js | 13 + .../@redis/client/dist/lib/commands/LSET.d.ts | 4 + .../@redis/client/dist/lib/commands/LSET.js | 13 + .../client/dist/lib/commands/LTRIM.d.ts | 4 + .../@redis/client/dist/lib/commands/LTRIM.js | 13 + .../dist/lib/commands/MEMORY_DOCTOR.d.ts | 2 + .../client/dist/lib/commands/MEMORY_DOCTOR.js | 7 + .../lib/commands/MEMORY_MALLOC-STATS.d.ts | 2 + .../dist/lib/commands/MEMORY_MALLOC-STATS.js | 7 + .../dist/lib/commands/MEMORY_PURGE.d.ts | 2 + .../client/dist/lib/commands/MEMORY_PURGE.js | 7 + .../dist/lib/commands/MEMORY_STATS.d.ts | 36 + .../client/dist/lib/commands/MEMORY_STATS.js | 56 + .../dist/lib/commands/MEMORY_USAGE.d.ts | 8 + .../client/dist/lib/commands/MEMORY_USAGE.js | 13 + .../@redis/client/dist/lib/commands/MGET.d.ts | 5 + .../@redis/client/dist/lib/commands/MGET.js | 9 + .../client/dist/lib/commands/MIGRATE.d.ts | 10 + .../client/dist/lib/commands/MIGRATE.js | 32 + .../client/dist/lib/commands/MODULE_LIST.d.ts | 2 + .../client/dist/lib/commands/MODULE_LIST.js | 7 + .../client/dist/lib/commands/MODULE_LOAD.d.ts | 2 + .../client/dist/lib/commands/MODULE_LOAD.js | 11 + .../dist/lib/commands/MODULE_UNLOAD.d.ts | 2 + .../client/dist/lib/commands/MODULE_UNLOAD.js | 7 + .../@redis/client/dist/lib/commands/MOVE.d.ts | 3 + .../@redis/client/dist/lib/commands/MOVE.js | 10 + .../@redis/client/dist/lib/commands/MSET.d.ts | 5 + .../@redis/client/dist/lib/commands/MSET.js | 17 + .../client/dist/lib/commands/MSETNX.d.ts | 5 + .../@redis/client/dist/lib/commands/MSETNX.js | 19 + .../dist/lib/commands/OBJECT_ENCODING.d.ts | 5 + .../dist/lib/commands/OBJECT_ENCODING.js | 9 + .../client/dist/lib/commands/OBJECT_FREQ.d.ts | 5 + .../client/dist/lib/commands/OBJECT_FREQ.js | 9 + .../dist/lib/commands/OBJECT_IDLETIME.d.ts | 5 + .../dist/lib/commands/OBJECT_IDLETIME.js | 9 + .../dist/lib/commands/OBJECT_REFCOUNT.d.ts | 5 + .../dist/lib/commands/OBJECT_REFCOUNT.js | 9 + .../client/dist/lib/commands/PERSIST.d.ts | 4 + .../client/dist/lib/commands/PERSIST.js | 10 + .../client/dist/lib/commands/PEXPIRE.d.ts | 4 + .../client/dist/lib/commands/PEXPIRE.js | 14 + .../client/dist/lib/commands/PEXPIREAT.d.ts | 4 + .../client/dist/lib/commands/PEXPIREAT.js | 19 + .../client/dist/lib/commands/PEXPIRETIME.d.ts | 4 + .../client/dist/lib/commands/PEXPIRETIME.js | 8 + .../client/dist/lib/commands/PFADD.d.ts | 4 + .../@redis/client/dist/lib/commands/PFADD.js | 11 + .../client/dist/lib/commands/PFCOUNT.d.ts | 4 + .../client/dist/lib/commands/PFCOUNT.js | 9 + .../client/dist/lib/commands/PFMERGE.d.ts | 4 + .../client/dist/lib/commands/PFMERGE.js | 9 + .../@redis/client/dist/lib/commands/PING.d.ts | 3 + .../@redis/client/dist/lib/commands/PING.js | 11 + .../client/dist/lib/commands/PSETEX.d.ts | 4 + .../@redis/client/dist/lib/commands/PSETEX.js | 13 + .../@redis/client/dist/lib/commands/PTTL.d.ts | 5 + .../@redis/client/dist/lib/commands/PTTL.js | 9 + .../client/dist/lib/commands/PUBLISH.d.ts | 4 + .../client/dist/lib/commands/PUBLISH.js | 8 + .../dist/lib/commands/PUBSUB_CHANNELS.d.ts | 3 + .../dist/lib/commands/PUBSUB_CHANNELS.js | 12 + .../dist/lib/commands/PUBSUB_NUMPAT.d.ts | 3 + .../client/dist/lib/commands/PUBSUB_NUMPAT.js | 8 + .../dist/lib/commands/PUBSUB_NUMSUB.d.ts | 4 + .../client/dist/lib/commands/PUBSUB_NUMSUB.js | 20 + .../lib/commands/PUBSUB_SHARDCHANNELS.d.ts | 4 + .../dist/lib/commands/PUBSUB_SHARDCHANNELS.js | 11 + .../dist/lib/commands/PUBSUB_SHARDNUMSUB.d.ts | 4 + .../dist/lib/commands/PUBSUB_SHARDNUMSUB.js | 20 + .../client/dist/lib/commands/RANDOMKEY.d.ts | 4 + .../client/dist/lib/commands/RANDOMKEY.js | 8 + .../client/dist/lib/commands/READONLY.d.ts | 2 + .../client/dist/lib/commands/READONLY.js | 7 + .../client/dist/lib/commands/READWRITE.d.ts | 2 + .../client/dist/lib/commands/READWRITE.js | 7 + .../client/dist/lib/commands/RENAME.d.ts | 4 + .../@redis/client/dist/lib/commands/RENAME.js | 8 + .../client/dist/lib/commands/RENAMENX.d.ts | 4 + .../client/dist/lib/commands/RENAMENX.js | 10 + .../client/dist/lib/commands/REPLICAOF.d.ts | 2 + .../client/dist/lib/commands/REPLICAOF.js | 7 + .../dist/lib/commands/RESTORE-ASKING.d.ts | 2 + .../dist/lib/commands/RESTORE-ASKING.js | 7 + .../client/dist/lib/commands/RESTORE.d.ts | 11 + .../client/dist/lib/commands/RESTORE.js | 21 + .../@redis/client/dist/lib/commands/ROLE.d.ts | 32 + .../@redis/client/dist/lib/commands/ROLE.js | 38 + .../@redis/client/dist/lib/commands/RPOP.d.ts | 4 + .../@redis/client/dist/lib/commands/RPOP.js | 8 + .../client/dist/lib/commands/RPOPLPUSH.d.ts | 4 + .../client/dist/lib/commands/RPOPLPUSH.js | 8 + .../client/dist/lib/commands/RPOP_COUNT.d.ts | 4 + .../client/dist/lib/commands/RPOP_COUNT.js | 8 + .../client/dist/lib/commands/RPUSH.d.ts | 4 + .../@redis/client/dist/lib/commands/RPUSH.js | 9 + .../client/dist/lib/commands/RPUSHX.d.ts | 4 + .../@redis/client/dist/lib/commands/RPUSHX.js | 9 + .../@redis/client/dist/lib/commands/SADD.d.ts | 4 + .../@redis/client/dist/lib/commands/SADD.js | 9 + .../@redis/client/dist/lib/commands/SAVE.d.ts | 3 + .../@redis/client/dist/lib/commands/SAVE.js | 7 + .../@redis/client/dist/lib/commands/SCAN.d.ts | 14 + .../@redis/client/dist/lib/commands/SCAN.js | 20 + .../client/dist/lib/commands/SCARD.d.ts | 3 + .../@redis/client/dist/lib/commands/SCARD.js | 8 + .../dist/lib/commands/SCRIPT_DEBUG.d.ts | 2 + .../client/dist/lib/commands/SCRIPT_DEBUG.js | 7 + .../dist/lib/commands/SCRIPT_EXISTS.d.ts | 3 + .../client/dist/lib/commands/SCRIPT_EXISTS.js | 10 + .../dist/lib/commands/SCRIPT_FLUSH.d.ts | 2 + .../client/dist/lib/commands/SCRIPT_FLUSH.js | 11 + .../client/dist/lib/commands/SCRIPT_KILL.d.ts | 2 + .../client/dist/lib/commands/SCRIPT_KILL.js | 7 + .../client/dist/lib/commands/SCRIPT_LOAD.d.ts | 2 + .../client/dist/lib/commands/SCRIPT_LOAD.js | 7 + .../client/dist/lib/commands/SDIFF.d.ts | 5 + .../@redis/client/dist/lib/commands/SDIFF.js | 10 + .../client/dist/lib/commands/SDIFFSTORE.d.ts | 4 + .../client/dist/lib/commands/SDIFFSTORE.js | 9 + .../@redis/client/dist/lib/commands/SET.d.ts | 23 + .../@redis/client/dist/lib/commands/SET.js | 37 + .../client/dist/lib/commands/SETBIT.d.ts | 5 + .../@redis/client/dist/lib/commands/SETBIT.js | 8 + .../client/dist/lib/commands/SETEX.d.ts | 4 + .../@redis/client/dist/lib/commands/SETEX.js | 13 + .../client/dist/lib/commands/SETNX.d.ts | 4 + .../@redis/client/dist/lib/commands/SETNX.js | 10 + .../client/dist/lib/commands/SETRANGE.d.ts | 4 + .../client/dist/lib/commands/SETRANGE.js | 8 + .../client/dist/lib/commands/SHUTDOWN.d.ts | 2 + .../client/dist/lib/commands/SHUTDOWN.js | 11 + .../client/dist/lib/commands/SINTER.d.ts | 5 + .../@redis/client/dist/lib/commands/SINTER.js | 10 + .../client/dist/lib/commands/SINTERCARD.d.ts | 5 + .../client/dist/lib/commands/SINTERCARD.js | 14 + .../client/dist/lib/commands/SINTERSTORE.d.ts | 4 + .../client/dist/lib/commands/SINTERSTORE.js | 9 + .../client/dist/lib/commands/SISMEMBER.d.ts | 4 + .../client/dist/lib/commands/SISMEMBER.js | 10 + .../client/dist/lib/commands/SMEMBERS.d.ts | 4 + .../client/dist/lib/commands/SMEMBERS.js | 8 + .../client/dist/lib/commands/SMISMEMBER.d.ts | 4 + .../client/dist/lib/commands/SMISMEMBER.js | 10 + .../client/dist/lib/commands/SMOVE.d.ts | 4 + .../@redis/client/dist/lib/commands/SMOVE.js | 10 + .../@redis/client/dist/lib/commands/SORT.d.ts | 5 + .../@redis/client/dist/lib/commands/SORT.js | 9 + .../client/dist/lib/commands/SORT_RO.d.ts | 6 + .../client/dist/lib/commands/SORT_RO.js | 10 + .../client/dist/lib/commands/SORT_STORE.d.ts | 5 + .../client/dist/lib/commands/SORT_STORE.js | 11 + .../@redis/client/dist/lib/commands/SPOP.d.ts | 4 + .../@redis/client/dist/lib/commands/SPOP.js | 12 + .../client/dist/lib/commands/SPUBLISH.d.ts | 5 + .../client/dist/lib/commands/SPUBLISH.js | 9 + .../client/dist/lib/commands/SRANDMEMBER.d.ts | 4 + .../client/dist/lib/commands/SRANDMEMBER.js | 8 + .../dist/lib/commands/SRANDMEMBER_COUNT.d.ts | 4 + .../dist/lib/commands/SRANDMEMBER_COUNT.js | 13 + .../@redis/client/dist/lib/commands/SREM.d.ts | 4 + .../@redis/client/dist/lib/commands/SREM.js | 9 + .../client/dist/lib/commands/SSCAN.d.ts | 12 + .../@redis/client/dist/lib/commands/SSCAN.js | 20 + .../client/dist/lib/commands/STRLEN.d.ts | 5 + .../@redis/client/dist/lib/commands/STRLEN.js | 9 + .../client/dist/lib/commands/SUNION.d.ts | 5 + .../@redis/client/dist/lib/commands/SUNION.js | 10 + .../client/dist/lib/commands/SUNIONSTORE.d.ts | 4 + .../client/dist/lib/commands/SUNIONSTORE.js | 9 + .../client/dist/lib/commands/SWAPDB.d.ts | 2 + .../@redis/client/dist/lib/commands/SWAPDB.js | 7 + .../@redis/client/dist/lib/commands/TIME.d.ts | 6 + .../@redis/client/dist/lib/commands/TIME.js | 13 + .../client/dist/lib/commands/TOUCH.d.ts | 4 + .../@redis/client/dist/lib/commands/TOUCH.js | 9 + .../@redis/client/dist/lib/commands/TTL.d.ts | 5 + .../@redis/client/dist/lib/commands/TTL.js | 9 + .../@redis/client/dist/lib/commands/TYPE.d.ts | 5 + .../@redis/client/dist/lib/commands/TYPE.js | 9 + .../client/dist/lib/commands/UNLINK.d.ts | 4 + .../@redis/client/dist/lib/commands/UNLINK.js | 9 + .../client/dist/lib/commands/UNWATCH.d.ts | 2 + .../client/dist/lib/commands/UNWATCH.js | 7 + .../@redis/client/dist/lib/commands/WAIT.d.ts | 3 + .../@redis/client/dist/lib/commands/WAIT.js | 8 + .../client/dist/lib/commands/WATCH.d.ts | 4 + .../@redis/client/dist/lib/commands/WATCH.js | 9 + .../@redis/client/dist/lib/commands/XACK.d.ts | 4 + .../@redis/client/dist/lib/commands/XACK.js | 9 + .../@redis/client/dist/lib/commands/XADD.d.ts | 14 + .../@redis/client/dist/lib/commands/XADD.js | 28 + .../client/dist/lib/commands/XAUTOCLAIM.d.ts | 14 + .../client/dist/lib/commands/XAUTOCLAIM.js | 20 + .../dist/lib/commands/XAUTOCLAIM_JUSTID.d.ts | 10 + .../dist/lib/commands/XAUTOCLAIM_JUSTID.js | 20 + .../client/dist/lib/commands/XCLAIM.d.ts | 10 + .../@redis/client/dist/lib/commands/XCLAIM.js | 24 + .../dist/lib/commands/XCLAIM_JUSTID.d.ts | 5 + .../client/dist/lib/commands/XCLAIM_JUSTID.js | 13 + .../@redis/client/dist/lib/commands/XDEL.d.ts | 4 + .../@redis/client/dist/lib/commands/XDEL.js | 9 + .../dist/lib/commands/XGROUP_CREATE.d.ts | 8 + .../client/dist/lib/commands/XGROUP_CREATE.js | 12 + .../lib/commands/XGROUP_CREATECONSUMER.d.ts | 4 + .../lib/commands/XGROUP_CREATECONSUMER.js | 10 + .../dist/lib/commands/XGROUP_DELCONSUMER.d.ts | 4 + .../dist/lib/commands/XGROUP_DELCONSUMER.js | 8 + .../dist/lib/commands/XGROUP_DESTROY.d.ts | 4 + .../dist/lib/commands/XGROUP_DESTROY.js | 10 + .../dist/lib/commands/XGROUP_SETID.d.ts | 4 + .../client/dist/lib/commands/XGROUP_SETID.js | 8 + .../dist/lib/commands/XINFO_CONSUMERS.d.ts | 12 + .../dist/lib/commands/XINFO_CONSUMERS.js | 18 + .../dist/lib/commands/XINFO_GROUPS.d.ts | 12 + .../client/dist/lib/commands/XINFO_GROUPS.js | 18 + .../dist/lib/commands/XINFO_STREAM.d.ts | 16 + .../client/dist/lib/commands/XINFO_STREAM.js | 46 + .../@redis/client/dist/lib/commands/XLEN.d.ts | 5 + .../@redis/client/dist/lib/commands/XLEN.js | 9 + .../client/dist/lib/commands/XPENDING.d.ts | 24 + .../client/dist/lib/commands/XPENDING.js | 21 + .../dist/lib/commands/XPENDING_RANGE.d.ts | 22 + .../dist/lib/commands/XPENDING_RANGE.js | 26 + .../client/dist/lib/commands/XRANGE.d.ts | 8 + .../@redis/client/dist/lib/commands/XRANGE.js | 15 + .../client/dist/lib/commands/XREAD.d.ts | 13 + .../@redis/client/dist/lib/commands/XREAD.js | 28 + .../client/dist/lib/commands/XREADGROUP.d.ts | 14 + .../client/dist/lib/commands/XREADGROUP.js | 31 + .../client/dist/lib/commands/XREVRANGE.d.ts | 8 + .../client/dist/lib/commands/XREVRANGE.js | 15 + .../client/dist/lib/commands/XSETID.d.ts | 9 + .../@redis/client/dist/lib/commands/XSETID.js | 15 + .../client/dist/lib/commands/XTRIM.d.ts | 9 + .../@redis/client/dist/lib/commands/XTRIM.js | 16 + .../@redis/client/dist/lib/commands/ZADD.d.ts | 24 + .../@redis/client/dist/lib/commands/ZADD.js | 35 + .../client/dist/lib/commands/ZCARD.d.ts | 5 + .../@redis/client/dist/lib/commands/ZCARD.js | 9 + .../client/dist/lib/commands/ZCOUNT.d.ts | 5 + .../@redis/client/dist/lib/commands/ZCOUNT.js | 15 + .../client/dist/lib/commands/ZDIFF.d.ts | 5 + .../@redis/client/dist/lib/commands/ZDIFF.js | 10 + .../client/dist/lib/commands/ZDIFFSTORE.d.ts | 4 + .../client/dist/lib/commands/ZDIFFSTORE.js | 9 + .../dist/lib/commands/ZDIFF_WITHSCORES.d.ts | 5 + .../dist/lib/commands/ZDIFF_WITHSCORES.js | 16 + .../client/dist/lib/commands/ZINCRBY.d.ts | 4 + .../client/dist/lib/commands/ZINCRBY.js | 16 + .../client/dist/lib/commands/ZINTER.d.ts | 10 + .../@redis/client/dist/lib/commands/ZINTER.js | 17 + .../client/dist/lib/commands/ZINTERCARD.d.ts | 5 + .../client/dist/lib/commands/ZINTERCARD.js | 14 + .../client/dist/lib/commands/ZINTERSTORE.d.ts | 9 + .../client/dist/lib/commands/ZINTERSTORE.js | 16 + .../dist/lib/commands/ZINTER_WITHSCORES.d.ts | 5 + .../dist/lib/commands/ZINTER_WITHSCORES.js | 16 + .../client/dist/lib/commands/ZLEXCOUNT.d.ts | 5 + .../client/dist/lib/commands/ZLEXCOUNT.js | 14 + .../client/dist/lib/commands/ZMPOP.d.ts | 14 + .../@redis/client/dist/lib/commands/ZMPOP.js | 16 + .../client/dist/lib/commands/ZMSCORE.d.ts | 5 + .../client/dist/lib/commands/ZMSCORE.js | 12 + .../client/dist/lib/commands/ZPOPMAX.d.ts | 4 + .../client/dist/lib/commands/ZPOPMAX.js | 13 + .../dist/lib/commands/ZPOPMAX_COUNT.d.ts | 4 + .../client/dist/lib/commands/ZPOPMAX_COUNT.js | 15 + .../client/dist/lib/commands/ZPOPMIN.d.ts | 4 + .../client/dist/lib/commands/ZPOPMIN.js | 13 + .../dist/lib/commands/ZPOPMIN_COUNT.d.ts | 4 + .../client/dist/lib/commands/ZPOPMIN_COUNT.js | 15 + .../client/dist/lib/commands/ZRANDMEMBER.d.ts | 5 + .../client/dist/lib/commands/ZRANDMEMBER.js | 9 + .../dist/lib/commands/ZRANDMEMBER_COUNT.d.ts | 4 + .../dist/lib/commands/ZRANDMEMBER_COUNT.js | 14 + .../ZRANDMEMBER_COUNT_WITHSCORES.d.ts | 5 + .../commands/ZRANDMEMBER_COUNT_WITHSCORES.js | 16 + .../client/dist/lib/commands/ZRANGE.d.ts | 14 + .../@redis/client/dist/lib/commands/ZRANGE.js | 30 + .../client/dist/lib/commands/ZRANGEBYLEX.d.ts | 11 + .../client/dist/lib/commands/ZRANGEBYLEX.js | 19 + .../dist/lib/commands/ZRANGEBYSCORE.d.ts | 11 + .../client/dist/lib/commands/ZRANGEBYSCORE.js | 19 + .../commands/ZRANGEBYSCORE_WITHSCORES.d.ts | 5 + .../lib/commands/ZRANGEBYSCORE_WITHSCORES.js | 16 + .../client/dist/lib/commands/ZRANGESTORE.d.ts | 14 + .../client/dist/lib/commands/ZRANGESTORE.js | 40 + .../dist/lib/commands/ZRANGE_WITHSCORES.d.ts | 5 + .../dist/lib/commands/ZRANGE_WITHSCORES.js | 16 + .../client/dist/lib/commands/ZRANK.d.ts | 5 + .../@redis/client/dist/lib/commands/ZRANK.js | 9 + .../@redis/client/dist/lib/commands/ZREM.d.ts | 4 + .../@redis/client/dist/lib/commands/ZREM.js | 9 + .../dist/lib/commands/ZREMRANGEBYLEX.d.ts | 4 + .../dist/lib/commands/ZREMRANGEBYLEX.js | 14 + .../dist/lib/commands/ZREMRANGEBYRANK.d.ts | 4 + .../dist/lib/commands/ZREMRANGEBYRANK.js | 8 + .../dist/lib/commands/ZREMRANGEBYSCORE.d.ts | 4 + .../dist/lib/commands/ZREMRANGEBYSCORE.js | 14 + .../client/dist/lib/commands/ZREVRANK.d.ts | 5 + .../client/dist/lib/commands/ZREVRANK.js | 9 + .../client/dist/lib/commands/ZSCAN.d.ts | 12 + .../@redis/client/dist/lib/commands/ZSCAN.js | 27 + .../client/dist/lib/commands/ZSCORE.d.ts | 5 + .../@redis/client/dist/lib/commands/ZSCORE.js | 11 + .../client/dist/lib/commands/ZUNION.d.ts | 10 + .../@redis/client/dist/lib/commands/ZUNION.js | 17 + .../client/dist/lib/commands/ZUNIONSTORE.d.ts | 9 + .../client/dist/lib/commands/ZUNIONSTORE.js | 16 + .../dist/lib/commands/ZUNION_WITHSCORES.d.ts | 5 + .../dist/lib/commands/ZUNION_WITHSCORES.js | 16 + .../lib/commands/generic-transformers.d.ts | 221 + .../dist/lib/commands/generic-transformers.js | 413 ++ .../client/dist/lib/commands/index.d.ts | 49 + .../@redis/client/dist/lib/commands/index.js | 2 + .../@redis/client/dist/lib/errors.d.ts | 39 + node_modules/@redis/client/dist/lib/errors.js | 103 + .../@redis/client/dist/lib/lua-script.d.ts | 10 + .../@redis/client/dist/lib/lua-script.js | 15 + .../@redis/client/dist/lib/multi-command.d.ts | 16 + .../@redis/client/dist/lib/multi-command.js | 78 + .../@redis/client/dist/lib/utils.d.ts | 1 + node_modules/@redis/client/dist/lib/utils.js | 7 + node_modules/@redis/client/dist/package.json | 52 + node_modules/@redis/client/package.json | 52 + node_modules/@redis/graph/README.md | 34 + .../graph/dist/commands/CONFIG_GET.d.ts | 8 + .../@redis/graph/dist/commands/CONFIG_GET.js | 8 + .../graph/dist/commands/CONFIG_SET.d.ts | 2 + .../@redis/graph/dist/commands/CONFIG_SET.js | 12 + .../@redis/graph/dist/commands/DELETE.d.ts | 3 + .../@redis/graph/dist/commands/DELETE.js | 8 + .../@redis/graph/dist/commands/EXPLAIN.d.ts | 4 + .../@redis/graph/dist/commands/EXPLAIN.js | 9 + .../@redis/graph/dist/commands/LIST.d.ts | 3 + .../@redis/graph/dist/commands/LIST.js | 8 + .../@redis/graph/dist/commands/PROFILE.d.ts | 4 + .../@redis/graph/dist/commands/PROFILE.js | 9 + .../@redis/graph/dist/commands/QUERY.d.ts | 25 + .../@redis/graph/dist/commands/QUERY.js | 21 + .../@redis/graph/dist/commands/RO_QUERY.d.ts | 6 + .../@redis/graph/dist/commands/RO_QUERY.js | 13 + .../@redis/graph/dist/commands/SLOWLOG.d.ts | 17 + .../@redis/graph/dist/commands/SLOWLOG.js | 18 + .../@redis/graph/dist/commands/index.d.ts | 41 + .../@redis/graph/dist/commands/index.js | 88 + node_modules/@redis/graph/dist/graph.d.ts | 19 + node_modules/@redis/graph/dist/graph.js | 173 + node_modules/@redis/graph/dist/index.d.ts | 2 + node_modules/@redis/graph/dist/index.js | 7 + node_modules/@redis/graph/package.json | 41 + node_modules/@redis/json/README.md | 80 + .../@redis/json/dist/commands/ARRAPPEND.d.ts | 4 + .../@redis/json/dist/commands/ARRAPPEND.js | 13 + .../@redis/json/dist/commands/ARRINDEX.d.ts | 5 + .../@redis/json/dist/commands/ARRINDEX.js | 17 + .../@redis/json/dist/commands/ARRINSERT.d.ts | 4 + .../@redis/json/dist/commands/ARRINSERT.js | 13 + .../@redis/json/dist/commands/ARRLEN.d.ts | 4 + .../@redis/json/dist/commands/ARRLEN.js | 13 + .../@redis/json/dist/commands/ARRPOP.d.ts | 4 + .../@redis/json/dist/commands/ARRPOP.js | 25 + .../@redis/json/dist/commands/ARRTRIM.d.ts | 3 + .../@redis/json/dist/commands/ARRTRIM.js | 8 + .../json/dist/commands/DEBUG_MEMORY.d.ts | 3 + .../@redis/json/dist/commands/DEBUG_MEMORY.js | 12 + .../@redis/json/dist/commands/DEL.d.ts | 3 + node_modules/@redis/json/dist/commands/DEL.js | 12 + .../@redis/json/dist/commands/FORGET.d.ts | 3 + .../@redis/json/dist/commands/FORGET.js | 12 + .../@redis/json/dist/commands/GET.d.ts | 12 + node_modules/@redis/json/dist/commands/GET.js | 28 + .../@redis/json/dist/commands/MERGE.d.ts | 4 + .../@redis/json/dist/commands/MERGE.js | 9 + .../@redis/json/dist/commands/MGET.d.ts | 5 + .../@redis/json/dist/commands/MGET.js | 18 + .../@redis/json/dist/commands/MSET.d.ts | 11 + .../@redis/json/dist/commands/MSET.js | 18 + .../@redis/json/dist/commands/NUMINCRBY.d.ts | 3 + .../@redis/json/dist/commands/NUMINCRBY.js | 10 + .../@redis/json/dist/commands/NUMMULTBY.d.ts | 3 + .../@redis/json/dist/commands/NUMMULTBY.js | 10 + .../@redis/json/dist/commands/OBJKEYS.d.ts | 3 + .../@redis/json/dist/commands/OBJKEYS.js | 12 + .../@redis/json/dist/commands/OBJLEN.d.ts | 3 + .../@redis/json/dist/commands/OBJLEN.js | 12 + .../@redis/json/dist/commands/RESP.d.ts | 5 + .../@redis/json/dist/commands/RESP.js | 12 + .../@redis/json/dist/commands/SET.d.ts | 11 + node_modules/@redis/json/dist/commands/SET.js | 16 + .../@redis/json/dist/commands/STRAPPEND.d.ts | 6 + .../@redis/json/dist/commands/STRAPPEND.js | 16 + .../@redis/json/dist/commands/STRLEN.d.ts | 4 + .../@redis/json/dist/commands/STRLEN.js | 13 + .../@redis/json/dist/commands/TYPE.d.ts | 3 + .../@redis/json/dist/commands/TYPE.js | 12 + .../@redis/json/dist/commands/index.d.ts | 80 + .../@redis/json/dist/commands/index.js | 89 + node_modules/@redis/json/dist/index.d.ts | 1 + node_modules/@redis/json/dist/index.js | 5 + node_modules/@redis/json/package.json | 41 + node_modules/@redis/search/README.md | 119 + .../search/dist/commands/AGGREGATE.d.ts | 118 + .../@redis/search/dist/commands/AGGREGATE.js | 170 + .../dist/commands/AGGREGATE_WITHCURSOR.d.ts | 14 + .../dist/commands/AGGREGATE_WITHCURSOR.js | 23 + .../@redis/search/dist/commands/ALIASADD.d.ts | 2 + .../@redis/search/dist/commands/ALIASADD.js | 7 + .../@redis/search/dist/commands/ALIASDEL.d.ts | 2 + .../@redis/search/dist/commands/ALIASDEL.js | 7 + .../search/dist/commands/ALIASUPDATE.d.ts | 2 + .../search/dist/commands/ALIASUPDATE.js | 7 + .../@redis/search/dist/commands/ALTER.d.ts | 3 + .../@redis/search/dist/commands/ALTER.js | 10 + .../search/dist/commands/CONFIG_GET.d.ts | 6 + .../@redis/search/dist/commands/CONFIG_GET.js | 15 + .../search/dist/commands/CONFIG_SET.d.ts | 2 + .../@redis/search/dist/commands/CONFIG_SET.js | 7 + .../@redis/search/dist/commands/CREATE.d.ts | 21 + .../@redis/search/dist/commands/CREATE.js | 56 + .../search/dist/commands/CURSOR_DEL.d.ts | 4 + .../@redis/search/dist/commands/CURSOR_DEL.js | 13 + .../search/dist/commands/CURSOR_READ.d.ts | 8 + .../search/dist/commands/CURSOR_READ.js | 20 + .../@redis/search/dist/commands/DICTADD.d.ts | 3 + .../@redis/search/dist/commands/DICTADD.js | 8 + .../@redis/search/dist/commands/DICTDEL.d.ts | 3 + .../@redis/search/dist/commands/DICTDEL.js | 8 + .../@redis/search/dist/commands/DICTDUMP.d.ts | 2 + .../@redis/search/dist/commands/DICTDUMP.js | 7 + .../search/dist/commands/DROPINDEX.d.ts | 6 + .../@redis/search/dist/commands/DROPINDEX.js | 11 + .../@redis/search/dist/commands/EXPLAIN.d.ts | 9 + .../@redis/search/dist/commands/EXPLAIN.js | 14 + .../search/dist/commands/EXPLAINCLI.d.ts | 3 + .../@redis/search/dist/commands/EXPLAINCLI.js | 8 + .../@redis/search/dist/commands/INFO.d.ts | 120 + .../@redis/search/dist/commands/INFO.js | 51 + .../dist/commands/PROFILE_AGGREGATE.d.ts | 7 + .../search/dist/commands/PROFILE_AGGREGATE.js | 23 + .../search/dist/commands/PROFILE_SEARCH.d.ts | 8 + .../search/dist/commands/PROFILE_SEARCH.js | 22 + .../@redis/search/dist/commands/SEARCH.d.ts | 41 + .../@redis/search/dist/commands/SEARCH.js | 43 + .../dist/commands/SEARCH_NOCONTENT.d.ts | 10 + .../search/dist/commands/SEARCH_NOCONTENT.js | 18 + .../search/dist/commands/SPELLCHECK.d.ts | 24 + .../@redis/search/dist/commands/SPELLCHECK.js | 37 + .../@redis/search/dist/commands/SUGADD.d.ts | 7 + .../@redis/search/dist/commands/SUGADD.js | 14 + .../@redis/search/dist/commands/SUGDEL.d.ts | 2 + .../@redis/search/dist/commands/SUGDEL.js | 9 + .../@redis/search/dist/commands/SUGGET.d.ts | 7 + .../@redis/search/dist/commands/SUGGET.js | 15 + .../dist/commands/SUGGET_WITHPAYLOADS.d.ts | 8 + .../dist/commands/SUGGET_WITHPAYLOADS.js | 26 + .../dist/commands/SUGGET_WITHSCORES.d.ts | 8 + .../search/dist/commands/SUGGET_WITHSCORES.js | 26 + .../SUGGET_WITHSCORES_WITHPAYLOADS.d.ts | 7 + .../SUGGET_WITHSCORES_WITHPAYLOADS.js | 28 + .../@redis/search/dist/commands/SUGLEN.d.ts | 3 + .../@redis/search/dist/commands/SUGLEN.js | 8 + .../@redis/search/dist/commands/SYNDUMP.d.ts | 2 + .../@redis/search/dist/commands/SYNDUMP.js | 7 + .../search/dist/commands/SYNUPDATE.d.ts | 7 + .../@redis/search/dist/commands/SYNUPDATE.js | 12 + .../@redis/search/dist/commands/TAGVALS.d.ts | 2 + .../@redis/search/dist/commands/TAGVALS.js | 7 + .../@redis/search/dist/commands/_LIST.d.ts | 2 + .../@redis/search/dist/commands/_LIST.js | 7 + .../@redis/search/dist/commands/index.d.ts | 267 + .../@redis/search/dist/commands/index.js | 458 ++ node_modules/@redis/search/dist/index.d.ts | 4 + node_modules/@redis/search/dist/index.js | 13 + node_modules/@redis/search/package.json | 41 + node_modules/@redis/time-series/README.md | 151 + .../@redis/time-series/dist/commands/ADD.d.ts | 17 + .../@redis/time-series/dist/commands/ADD.js | 23 + .../time-series/dist/commands/ALTER.d.ts | 13 + .../@redis/time-series/dist/commands/ALTER.js | 15 + .../time-series/dist/commands/CREATE.d.ts | 14 + .../time-series/dist/commands/CREATE.js | 16 + .../time-series/dist/commands/CREATERULE.d.ts | 4 + .../time-series/dist/commands/CREATERULE.js | 19 + .../time-series/dist/commands/DECRBY.d.ts | 5 + .../time-series/dist/commands/DECRBY.js | 9 + .../@redis/time-series/dist/commands/DEL.d.ts | 5 + .../@redis/time-series/dist/commands/DEL.js | 14 + .../time-series/dist/commands/DELETERULE.d.ts | 3 + .../time-series/dist/commands/DELETERULE.js | 12 + .../@redis/time-series/dist/commands/GET.d.ts | 10 + .../@redis/time-series/dist/commands/GET.js | 16 + .../time-series/dist/commands/INCRBY.d.ts | 5 + .../time-series/dist/commands/INCRBY.js | 9 + .../time-series/dist/commands/INFO.d.ts | 52 + .../@redis/time-series/dist/commands/INFO.js | 33 + .../time-series/dist/commands/INFO_DEBUG.d.ts | 32 + .../time-series/dist/commands/INFO_DEBUG.js | 26 + .../time-series/dist/commands/MADD.d.ts | 10 + .../@redis/time-series/dist/commands/MADD.js | 13 + .../time-series/dist/commands/MGET.d.ts | 17 + .../@redis/time-series/dist/commands/MGET.js | 17 + .../dist/commands/MGET_WITHLABELS.d.ts | 13 + .../dist/commands/MGET_WITHLABELS.js | 19 + .../time-series/dist/commands/MRANGE.d.ts | 5 + .../time-series/dist/commands/MRANGE.js | 11 + .../dist/commands/MRANGE_WITHLABELS.d.ts | 5 + .../dist/commands/MRANGE_WITHLABELS.js | 11 + .../time-series/dist/commands/MREVRANGE.d.ts | 5 + .../time-series/dist/commands/MREVRANGE.js | 11 + .../dist/commands/MREVRANGE_WITHLABELS.d.ts | 5 + .../dist/commands/MREVRANGE_WITHLABELS.js | 11 + .../time-series/dist/commands/QUERYINDEX.d.ts | 5 + .../time-series/dist/commands/QUERYINDEX.js | 9 + .../time-series/dist/commands/RANGE.d.ts | 6 + .../@redis/time-series/dist/commands/RANGE.js | 14 + .../time-series/dist/commands/REVRANGE.d.ts | 6 + .../time-series/dist/commands/REVRANGE.js | 14 + .../time-series/dist/commands/index.d.ts | 193 + .../@redis/time-series/dist/commands/index.js | 313 + .../@redis/time-series/dist/index.d.ts | 2 + node_modules/@redis/time-series/dist/index.js | 11 + node_modules/@redis/time-series/package.json | 41 + node_modules/abbrev/LICENSE | 46 + node_modules/abbrev/README.md | 23 + node_modules/abbrev/abbrev.js | 61 + node_modules/abbrev/package.json | 21 + node_modules/agent-base/README.md | 145 + node_modules/agent-base/dist/src/index.d.ts | 78 + node_modules/agent-base/dist/src/index.js | 203 + node_modules/agent-base/dist/src/index.js.map | 1 + .../agent-base/dist/src/promisify.d.ts | 4 + node_modules/agent-base/dist/src/promisify.js | 18 + .../agent-base/dist/src/promisify.js.map | 1 + .../agent-base/node_modules/debug/LICENSE | 20 + .../agent-base/node_modules/debug/README.md | 481 ++ .../node_modules/debug/package.json | 64 + .../node_modules/debug/src/browser.js | 272 + .../node_modules/debug/src/common.js | 292 + .../node_modules/debug/src/index.js | 10 + .../agent-base/node_modules/debug/src/node.js | 263 + .../agent-base/node_modules/ms/index.js | 162 + .../agent-base/node_modules/ms/license.md | 21 + .../agent-base/node_modules/ms/package.json | 38 + .../agent-base/node_modules/ms/readme.md | 59 + node_modules/agent-base/package.json | 64 + node_modules/agent-base/src/index.ts | 345 + node_modules/agent-base/src/promisify.ts | 33 + node_modules/ansi-regex/index.d.ts | 37 + node_modules/ansi-regex/index.js | 10 + node_modules/ansi-regex/license | 9 + node_modules/ansi-regex/package.json | 55 + node_modules/ansi-regex/readme.md | 78 + node_modules/aproba/LICENSE | 14 + node_modules/aproba/README.md | 94 + node_modules/aproba/index.js | 105 + node_modules/aproba/package.json | 35 + node_modules/are-we-there-yet/LICENSE.md | 18 + node_modules/are-we-there-yet/README.md | 208 + node_modules/are-we-there-yet/lib/index.js | 4 + .../are-we-there-yet/lib/tracker-base.js | 11 + .../are-we-there-yet/lib/tracker-group.js | 116 + .../are-we-there-yet/lib/tracker-stream.js | 36 + node_modules/are-we-there-yet/lib/tracker.js | 32 + node_modules/are-we-there-yet/package.json | 53 + .../balanced-match/.github/FUNDING.yml | 2 + node_modules/balanced-match/LICENSE.md | 21 + node_modules/balanced-match/README.md | 97 + node_modules/balanced-match/index.js | 62 + node_modules/balanced-match/package.json | 48 + node_modules/bcrypt/.editorconfig | 19 + node_modules/bcrypt/.github/workflows/ci.yaml | 59 + node_modules/bcrypt/.travis.yml | 62 + node_modules/bcrypt/CHANGELOG.md | 178 + node_modules/bcrypt/ISSUE_TEMPLATE.md | 18 + node_modules/bcrypt/LICENSE | 19 + node_modules/bcrypt/Makefile | 19 + node_modules/bcrypt/README.md | 388 + node_modules/bcrypt/SECURITY.md | 15 + node_modules/bcrypt/appveyor.yml | 39 + node_modules/bcrypt/bcrypt.js | 236 + node_modules/bcrypt/binding.gyp | 61 + node_modules/bcrypt/examples/async_compare.js | 28 + .../bcrypt/examples/forever_gen_salt.js | 8 + .../lib/binding/napi-v3/bcrypt_lib.node | Bin 0 -> 190464 bytes node_modules/bcrypt/package.json | 67 + node_modules/bcrypt/promises.js | 42 + node_modules/bcrypt/src/bcrypt.cc | 315 + node_modules/bcrypt/src/bcrypt_node.cc | 288 + node_modules/bcrypt/src/blowfish.cc | 679 ++ node_modules/bcrypt/src/node_blf.h | 132 + node_modules/bcrypt/test-docker.sh | 15 + node_modules/bcrypt/test/async.test.js | 209 + .../bcrypt/test/implementation.test.js | 48 + node_modules/bcrypt/test/promise.test.js | 168 + node_modules/bcrypt/test/repetitions.test.js | 46 + node_modules/bcrypt/test/sync.test.js | 125 + node_modules/brace-expansion/LICENSE | 21 + node_modules/brace-expansion/README.md | 129 + node_modules/brace-expansion/index.js | 201 + node_modules/brace-expansion/package.json | 50 + node_modules/chownr/LICENSE | 15 + node_modules/chownr/README.md | 3 + node_modules/chownr/chownr.js | 167 + node_modules/chownr/package.json | 32 + node_modules/cluster-key-slot/.eslintrc | 16 + node_modules/cluster-key-slot/LICENSE | 13 + node_modules/cluster-key-slot/README.md | 61 + node_modules/cluster-key-slot/index.d.ts | 10 + node_modules/cluster-key-slot/lib/index.js | 166 + node_modules/cluster-key-slot/package.json | 56 + node_modules/color-support/LICENSE | 15 + node_modules/color-support/README.md | 129 + node_modules/color-support/bin.js | 3 + node_modules/color-support/browser.js | 14 + node_modules/color-support/index.js | 134 + node_modules/color-support/package.json | 36 + node_modules/concat-map/.travis.yml | 4 + node_modules/concat-map/LICENSE | 18 + node_modules/concat-map/README.markdown | 62 + node_modules/concat-map/example/map.js | 6 + node_modules/concat-map/index.js | 13 + node_modules/concat-map/package.json | 43 + node_modules/concat-map/test/map.js | 39 + node_modules/connect-redis/.eslintrc | 26 + .../connect-redis/.github/release-drafter.yml | 30 + .../connect-redis/.github/workflows/build.yml | 23 + .../.github/workflows/release.yml | 13 + .../connect-redis/.github/workflows/stale.yml | 17 + node_modules/connect-redis/.prettierrc | 5 + .../connect-redis/dist/cjs/index.d.ts | 48 + node_modules/connect-redis/dist/cjs/index.js | 179 + .../connect-redis/dist/cjs/index_test.d.ts | 1 + .../connect-redis/dist/cjs/index_test.js | 124 + .../connect-redis/dist/esm/index.d.ts | 48 + node_modules/connect-redis/dist/esm/index.js | 177 + .../connect-redis/dist/esm/index_test.d.ts | 1 + .../connect-redis/dist/esm/index_test.js | 96 + .../connect-redis/dist/esm/package.json | 1 + node_modules/connect-redis/index.ts | 208 + node_modules/connect-redis/index_test.ts | 131 + node_modules/connect-redis/license | 21 + node_modules/connect-redis/package.json | 64 + node_modules/connect-redis/readme.md | 139 + node_modules/connect-redis/tsconfig.esm.json | 7 + node_modules/connect-redis/tsconfig.json | 18 + node_modules/console-control-strings/LICENSE | 13 + .../console-control-strings/README.md | 145 + .../console-control-strings/README.md~ | 140 + node_modules/console-control-strings/index.js | 125 + .../console-control-strings/package.json | 27 + node_modules/delegates/.npmignore | 1 + node_modules/delegates/History.md | 22 + node_modules/delegates/License | 20 + node_modules/delegates/Makefile | 8 + node_modules/delegates/Readme.md | 94 + node_modules/delegates/index.js | 121 + node_modules/delegates/package.json | 13 + node_modules/delegates/test/index.js | 94 + node_modules/detect-libc/LICENSE | 201 + node_modules/detect-libc/README.md | 163 + node_modules/detect-libc/index.d.ts | 14 + node_modules/detect-libc/lib/detect-libc.js | 313 + node_modules/detect-libc/lib/elf.js | 39 + node_modules/detect-libc/lib/filesystem.js | 51 + node_modules/detect-libc/lib/process.js | 24 + node_modules/detect-libc/package.json | 44 + node_modules/emoji-regex/LICENSE-MIT.txt | 20 + node_modules/emoji-regex/README.md | 73 + node_modules/emoji-regex/es2015/index.js | 6 + node_modules/emoji-regex/es2015/text.js | 6 + node_modules/emoji-regex/index.d.ts | 23 + node_modules/emoji-regex/index.js | 6 + node_modules/emoji-regex/package.json | 50 + node_modules/emoji-regex/text.js | 6 + node_modules/express-session/LICENSE | 24 + node_modules/express-session/README.md | 1061 +++ node_modules/express-session/index.js | 700 ++ node_modules/express-session/package.json | 50 + .../express-session/session/cookie.js | 152 + .../express-session/session/memory.js | 187 + .../express-session/session/session.js | 143 + node_modules/express-session/session/store.js | 102 + node_modules/fs-minipass/LICENSE | 15 + node_modules/fs-minipass/README.md | 70 + node_modules/fs-minipass/index.js | 422 ++ .../fs-minipass/node_modules/minipass/LICENSE | 15 + .../node_modules/minipass/README.md | 728 ++ .../node_modules/minipass/index.d.ts | 155 + .../node_modules/minipass/index.js | 649 ++ .../node_modules/minipass/package.json | 56 + node_modules/fs-minipass/package.json | 39 + node_modules/fs.realpath/LICENSE | 43 + node_modules/fs.realpath/README.md | 33 + node_modules/fs.realpath/index.js | 66 + node_modules/fs.realpath/old.js | 303 + node_modules/fs.realpath/package.json | 26 + node_modules/gauge/CHANGELOG.md | 163 + node_modules/gauge/LICENSE | 13 + node_modules/gauge/README.md | 402 ++ node_modules/gauge/base-theme.js | 14 + node_modules/gauge/error.js | 24 + node_modules/gauge/has-color.js | 4 + node_modules/gauge/index.js | 233 + node_modules/gauge/package.json | 66 + node_modules/gauge/plumbing.js | 48 + node_modules/gauge/process.js | 3 + node_modules/gauge/progress-bar.js | 35 + node_modules/gauge/render-template.js | 178 + node_modules/gauge/set-immediate.js | 7 + node_modules/gauge/set-interval.js | 3 + node_modules/gauge/spin.js | 5 + node_modules/gauge/template-item.js | 72 + node_modules/gauge/theme-set.js | 114 + node_modules/gauge/themes.js | 56 + node_modules/gauge/wide-truncate.js | 25 + node_modules/generic-pool/README.md | 407 ++ node_modules/generic-pool/index.d.ts | 51 + node_modules/generic-pool/index.js | 13 + .../generic-pool/lib/DefaultEvictor.js | 23 + node_modules/generic-pool/lib/Deferred.js | 49 + node_modules/generic-pool/lib/Deque.js | 106 + .../generic-pool/lib/DequeIterator.js | 20 + .../generic-pool/lib/DoublyLinkedList.js | 94 + .../lib/DoublyLinkedListIterator.js | 100 + node_modules/generic-pool/lib/Pool.js | 744 ++ node_modules/generic-pool/lib/PoolDefaults.js | 34 + node_modules/generic-pool/lib/PoolOptions.js | 109 + .../generic-pool/lib/PooledResource.js | 49 + .../lib/PooledResourceStateEnum.js | 11 + .../generic-pool/lib/PriorityQueue.js | 69 + node_modules/generic-pool/lib/Queue.js | 35 + node_modules/generic-pool/lib/ResourceLoan.js | 29 + .../generic-pool/lib/ResourceRequest.js | 76 + node_modules/generic-pool/lib/errors.js | 27 + .../generic-pool/lib/factoryValidator.js | 16 + node_modules/generic-pool/lib/utils.js | 13 + node_modules/generic-pool/package.json | 297 + node_modules/glob/LICENSE | 21 + node_modules/glob/README.md | 378 + node_modules/glob/common.js | 238 + node_modules/glob/glob.js | 790 +++ node_modules/glob/package.json | 55 + node_modules/glob/sync.js | 486 ++ node_modules/has-unicode/LICENSE | 14 + node_modules/has-unicode/README.md | 43 + node_modules/has-unicode/index.js | 16 + node_modules/has-unicode/package.json | 30 + node_modules/https-proxy-agent/README.md | 137 + .../https-proxy-agent/dist/agent.d.ts | 30 + node_modules/https-proxy-agent/dist/agent.js | 177 + .../https-proxy-agent/dist/agent.js.map | 1 + .../https-proxy-agent/dist/index.d.ts | 23 + node_modules/https-proxy-agent/dist/index.js | 14 + .../https-proxy-agent/dist/index.js.map | 1 + .../dist/parse-proxy-response.d.ts | 7 + .../dist/parse-proxy-response.js | 66 + .../dist/parse-proxy-response.js.map | 1 + .../node_modules/debug/LICENSE | 20 + .../node_modules/debug/README.md | 481 ++ .../node_modules/debug/package.json | 64 + .../node_modules/debug/src/browser.js | 272 + .../node_modules/debug/src/common.js | 292 + .../node_modules/debug/src/index.js | 10 + .../node_modules/debug/src/node.js | 263 + .../node_modules/ms/index.js | 162 + .../node_modules/ms/license.md | 21 + .../node_modules/ms/package.json | 38 + .../node_modules/ms/readme.md | 59 + node_modules/https-proxy-agent/package.json | 56 + node_modules/inflight/LICENSE | 15 + node_modules/inflight/README.md | 37 + node_modules/inflight/inflight.js | 54 + node_modules/inflight/package.json | 29 + .../is-fullwidth-code-point/index.d.ts | 17 + node_modules/is-fullwidth-code-point/index.js | 50 + node_modules/is-fullwidth-code-point/license | 9 + .../is-fullwidth-code-point/package.json | 42 + .../is-fullwidth-code-point/readme.md | 39 + node_modules/make-dir/index.d.ts | 66 + node_modules/make-dir/index.js | 156 + node_modules/make-dir/license | 9 + .../make-dir/node_modules/.bin/semver | 16 + .../make-dir/node_modules/.bin/semver.cmd | 17 + .../make-dir/node_modules/.bin/semver.ps1 | 28 + .../make-dir/node_modules/semver/LICENSE | 15 + .../make-dir/node_modules/semver/README.md | 443 ++ .../node_modules/semver/bin/semver.js | 174 + .../make-dir/node_modules/semver/package.json | 38 + .../make-dir/node_modules/semver/range.bnf | 16 + .../make-dir/node_modules/semver/semver.js | 1643 +++++ node_modules/make-dir/package.json | 59 + node_modules/make-dir/readme.md | 125 + node_modules/minimatch/LICENSE | 15 + node_modules/minimatch/README.md | 230 + node_modules/minimatch/minimatch.js | 947 +++ node_modules/minimatch/package.json | 33 + node_modules/minipass/LICENSE | 15 + node_modules/minipass/README.md | 769 ++ node_modules/minipass/index.d.ts | 152 + node_modules/minipass/index.js | 702 ++ node_modules/minipass/index.mjs | 702 ++ node_modules/minipass/package.json | 76 + node_modules/minizlib/LICENSE | 26 + node_modules/minizlib/README.md | 60 + node_modules/minizlib/constants.js | 115 + node_modules/minizlib/index.js | 348 + .../minizlib/node_modules/minipass/LICENSE | 15 + .../minizlib/node_modules/minipass/README.md | 728 ++ .../minizlib/node_modules/minipass/index.d.ts | 155 + .../minizlib/node_modules/minipass/index.js | 649 ++ .../node_modules/minipass/package.json | 56 + node_modules/minizlib/package.json | 42 + node_modules/mkdirp/CHANGELOG.md | 15 + node_modules/mkdirp/LICENSE | 21 + node_modules/mkdirp/bin/cmd.js | 68 + node_modules/mkdirp/index.js | 31 + node_modules/mkdirp/lib/find-made.js | 29 + node_modules/mkdirp/lib/mkdirp-manual.js | 64 + node_modules/mkdirp/lib/mkdirp-native.js | 39 + node_modules/mkdirp/lib/opts-arg.js | 23 + node_modules/mkdirp/lib/path-arg.js | 29 + node_modules/mkdirp/lib/use-native.js | 10 + node_modules/mkdirp/package.json | 44 + node_modules/mkdirp/readme.markdown | 266 + node_modules/node-addon-api/LICENSE.md | 13 + node_modules/node-addon-api/README.md | 317 + node_modules/node-addon-api/common.gypi | 21 + node_modules/node-addon-api/except.gypi | 25 + node_modules/node-addon-api/index.js | 11 + .../node-addon-api/napi-inl.deprecated.h | 186 + node_modules/node-addon-api/napi-inl.h | 6303 +++++++++++++++++ node_modules/node-addon-api/napi.h | 3114 ++++++++ node_modules/node-addon-api/node_api.gyp | 9 + node_modules/node-addon-api/noexcept.gypi | 26 + node_modules/node-addon-api/nothing.c | 0 .../node-addon-api/package-support.json | 21 + node_modules/node-addon-api/package.json | 456 ++ node_modules/node-addon-api/tools/README.md | 73 + .../node-addon-api/tools/check-napi.js | 99 + .../node-addon-api/tools/clang-format.js | 71 + .../node-addon-api/tools/conversion.js | 301 + .../node-addon-api/tools/eslint-format.js | 79 + node_modules/node-fetch/LICENSE.md | 22 + node_modules/node-fetch/README.md | 634 ++ node_modules/node-fetch/browser.js | 25 + node_modules/node-fetch/lib/index.es.js | 1777 +++++ node_modules/node-fetch/lib/index.js | 1787 +++++ node_modules/node-fetch/lib/index.mjs | 1775 +++++ node_modules/node-fetch/package.json | 89 + node_modules/nopt/CHANGELOG.md | 58 + node_modules/nopt/LICENSE | 15 + node_modules/nopt/README.md | 213 + node_modules/nopt/bin/nopt.js | 54 + node_modules/nopt/lib/nopt.js | 441 ++ node_modules/nopt/package.json | 34 + node_modules/npmlog/LICENSE | 15 + node_modules/npmlog/README.md | 216 + node_modules/npmlog/log.js | 403 ++ node_modules/npmlog/package.json | 33 + node_modules/on-headers/HISTORY.md | 26 + node_modules/on-headers/LICENSE | 22 + node_modules/on-headers/README.md | 81 + node_modules/on-headers/index.js | 180 + node_modules/on-headers/package.json | 44 + node_modules/once/LICENSE | 15 + node_modules/once/README.md | 79 + node_modules/once/once.js | 42 + node_modules/once/package.json | 33 + node_modules/path-is-absolute/index.js | 20 + node_modules/path-is-absolute/license | 21 + node_modules/path-is-absolute/package.json | 43 + node_modules/path-is-absolute/readme.md | 59 + node_modules/random-bytes/HISTORY.md | 4 + node_modules/random-bytes/LICENSE | 21 + node_modules/random-bytes/README.md | 77 + node_modules/random-bytes/index.js | 101 + node_modules/random-bytes/package.json | 36 + node_modules/readable-stream/CONTRIBUTING.md | 38 + node_modules/readable-stream/GOVERNANCE.md | 136 + node_modules/readable-stream/LICENSE | 47 + node_modules/readable-stream/README.md | 106 + .../readable-stream/errors-browser.js | 127 + node_modules/readable-stream/errors.js | 116 + .../readable-stream/experimentalWarning.js | 17 + .../readable-stream/lib/_stream_duplex.js | 126 + .../lib/_stream_passthrough.js | 37 + .../readable-stream/lib/_stream_readable.js | 1027 +++ .../readable-stream/lib/_stream_transform.js | 190 + .../readable-stream/lib/_stream_writable.js | 641 ++ .../lib/internal/streams/async_iterator.js | 180 + .../lib/internal/streams/buffer_list.js | 183 + .../lib/internal/streams/destroy.js | 96 + .../lib/internal/streams/end-of-stream.js | 86 + .../lib/internal/streams/from-browser.js | 3 + .../lib/internal/streams/from.js | 52 + .../lib/internal/streams/pipeline.js | 86 + .../lib/internal/streams/state.js | 22 + .../lib/internal/streams/stream-browser.js | 1 + .../lib/internal/streams/stream.js | 1 + node_modules/readable-stream/package.json | 68 + .../readable-stream/readable-browser.js | 9 + node_modules/readable-stream/readable.js | 16 + node_modules/redis/LICENSE | 21 + node_modules/redis/README.md | 384 + node_modules/redis/dist/index.d.ts | 302 + node_modules/redis/dist/index.js | 56 + node_modules/redis/package.json | 50 + node_modules/rimraf/CHANGELOG.md | 65 + node_modules/rimraf/LICENSE | 15 + node_modules/rimraf/README.md | 101 + node_modules/rimraf/bin.js | 68 + node_modules/rimraf/package.json | 32 + node_modules/rimraf/rimraf.js | 360 + node_modules/semver/LICENSE | 15 + node_modules/semver/README.md | 664 ++ node_modules/semver/bin/semver.js | 191 + node_modules/semver/classes/comparator.js | 143 + node_modules/semver/classes/index.js | 7 + node_modules/semver/classes/range.js | 557 ++ node_modules/semver/classes/semver.js | 333 + node_modules/semver/functions/clean.js | 8 + node_modules/semver/functions/cmp.js | 54 + node_modules/semver/functions/coerce.js | 62 + .../semver/functions/compare-build.js | 9 + .../semver/functions/compare-loose.js | 5 + node_modules/semver/functions/compare.js | 7 + node_modules/semver/functions/diff.js | 60 + node_modules/semver/functions/eq.js | 5 + node_modules/semver/functions/gt.js | 5 + node_modules/semver/functions/gte.js | 5 + node_modules/semver/functions/inc.js | 21 + node_modules/semver/functions/lt.js | 5 + node_modules/semver/functions/lte.js | 5 + node_modules/semver/functions/major.js | 5 + node_modules/semver/functions/minor.js | 5 + node_modules/semver/functions/neq.js | 5 + node_modules/semver/functions/parse.js | 18 + node_modules/semver/functions/patch.js | 5 + node_modules/semver/functions/prerelease.js | 8 + node_modules/semver/functions/rcompare.js | 5 + node_modules/semver/functions/rsort.js | 5 + node_modules/semver/functions/satisfies.js | 12 + node_modules/semver/functions/sort.js | 5 + node_modules/semver/functions/valid.js | 8 + node_modules/semver/index.js | 91 + node_modules/semver/internal/constants.js | 37 + node_modules/semver/internal/debug.js | 11 + node_modules/semver/internal/identifiers.js | 29 + node_modules/semver/internal/lrucache.js | 42 + node_modules/semver/internal/parse-options.js | 17 + node_modules/semver/internal/re.js | 223 + node_modules/semver/package.json | 78 + node_modules/semver/preload.js | 4 + node_modules/semver/range.bnf | 16 + node_modules/semver/ranges/gtr.js | 6 + node_modules/semver/ranges/intersects.js | 9 + node_modules/semver/ranges/ltr.js | 6 + node_modules/semver/ranges/max-satisfying.js | 27 + node_modules/semver/ranges/min-satisfying.js | 26 + node_modules/semver/ranges/min-version.js | 63 + node_modules/semver/ranges/outside.js | 82 + node_modules/semver/ranges/simplify.js | 49 + node_modules/semver/ranges/subset.js | 249 + node_modules/semver/ranges/to-comparators.js | 10 + node_modules/semver/ranges/valid.js | 13 + node_modules/set-blocking/CHANGELOG.md | 26 + node_modules/set-blocking/LICENSE.txt | 14 + node_modules/set-blocking/README.md | 31 + node_modules/set-blocking/index.js | 7 + node_modules/set-blocking/package.json | 42 + node_modules/signal-exit/LICENSE.txt | 16 + node_modules/signal-exit/README.md | 39 + node_modules/signal-exit/index.js | 202 + node_modules/signal-exit/package.json | 38 + node_modules/signal-exit/signals.js | 53 + node_modules/string-width/index.d.ts | 29 + node_modules/string-width/index.js | 47 + node_modules/string-width/license | 9 + node_modules/string-width/package.json | 56 + node_modules/string-width/readme.md | 50 + node_modules/string_decoder/LICENSE | 48 + node_modules/string_decoder/README.md | 47 + .../string_decoder/lib/string_decoder.js | 296 + node_modules/string_decoder/package.json | 34 + node_modules/strip-ansi/index.d.ts | 17 + node_modules/strip-ansi/index.js | 4 + node_modules/strip-ansi/license | 9 + node_modules/strip-ansi/package.json | 54 + node_modules/strip-ansi/readme.md | 46 + node_modules/tar/LICENSE | 15 + node_modules/tar/README.md | 1080 +++ node_modules/tar/index.js | 18 + node_modules/tar/lib/create.js | 111 + node_modules/tar/lib/extract.js | 113 + node_modules/tar/lib/get-write-flag.js | 20 + node_modules/tar/lib/header.js | 304 + node_modules/tar/lib/high-level-opt.js | 29 + node_modules/tar/lib/large-numbers.js | 104 + node_modules/tar/lib/list.js | 139 + node_modules/tar/lib/mkdir.js | 229 + node_modules/tar/lib/mode-fix.js | 27 + node_modules/tar/lib/normalize-unicode.js | 12 + .../tar/lib/normalize-windows-path.js | 8 + node_modules/tar/lib/pack.js | 432 ++ node_modules/tar/lib/parse.js | 552 ++ node_modules/tar/lib/path-reservations.js | 156 + node_modules/tar/lib/pax.js | 150 + node_modules/tar/lib/read-entry.js | 107 + node_modules/tar/lib/replace.js | 246 + node_modules/tar/lib/strip-absolute-path.js | 24 + .../tar/lib/strip-trailing-slashes.js | 13 + node_modules/tar/lib/types.js | 44 + node_modules/tar/lib/unpack.js | 923 +++ node_modules/tar/lib/update.js | 40 + node_modules/tar/lib/warn-mixin.js | 24 + node_modules/tar/lib/winchars.js | 23 + node_modules/tar/lib/write-entry.js | 546 ++ node_modules/tar/package.json | 70 + node_modules/tr46/.npmignore | 4 + node_modules/tr46/index.js | 193 + node_modules/tr46/lib/.gitkeep | 0 node_modules/tr46/lib/mappingTable.json | 1 + node_modules/tr46/package.json | 31 + node_modules/uid-safe/HISTORY.md | 61 + node_modules/uid-safe/LICENSE | 22 + node_modules/uid-safe/README.md | 77 + node_modules/uid-safe/index.js | 107 + node_modules/uid-safe/package.json | 46 + node_modules/util-deprecate/History.md | 16 + node_modules/util-deprecate/LICENSE | 24 + node_modules/util-deprecate/README.md | 53 + node_modules/util-deprecate/browser.js | 67 + node_modules/util-deprecate/node.js | 6 + node_modules/util-deprecate/package.json | 27 + node_modules/webidl-conversions/LICENSE.md | 12 + node_modules/webidl-conversions/README.md | 53 + node_modules/webidl-conversions/lib/index.js | 189 + node_modules/webidl-conversions/package.json | 23 + node_modules/whatwg-url/LICENSE.txt | 21 + node_modules/whatwg-url/README.md | 67 + node_modules/whatwg-url/lib/URL-impl.js | 200 + node_modules/whatwg-url/lib/URL.js | 196 + node_modules/whatwg-url/lib/public-api.js | 11 + .../whatwg-url/lib/url-state-machine.js | 1297 ++++ node_modules/whatwg-url/lib/utils.js | 20 + node_modules/whatwg-url/package.json | 32 + node_modules/wide-align/LICENSE | 14 + node_modules/wide-align/README.md | 47 + node_modules/wide-align/align.js | 65 + node_modules/wide-align/package.json | 33 + node_modules/wrappy/LICENSE | 15 + node_modules/wrappy/README.md | 36 + node_modules/wrappy/package.json | 29 + node_modules/wrappy/wrappy.js | 33 + node_modules/yallist/LICENSE | 15 + node_modules/yallist/README.md | 204 + node_modules/yallist/iterator.js | 8 + node_modules/yallist/package.json | 29 + node_modules/yallist/yallist.js | 426 ++ package-lock.json | 778 ++ 1642 files changed, 101122 insertions(+) create mode 100644 node_modules/.bin/color-support create mode 100644 node_modules/.bin/color-support.cmd create mode 100644 node_modules/.bin/color-support.ps1 create mode 100644 node_modules/.bin/mkdirp create mode 100644 node_modules/.bin/mkdirp.cmd create mode 100644 node_modules/.bin/mkdirp.ps1 create mode 100644 node_modules/.bin/node-pre-gyp create mode 100644 node_modules/.bin/node-pre-gyp.cmd create mode 100644 node_modules/.bin/node-pre-gyp.ps1 create mode 100644 node_modules/.bin/nopt create mode 100644 node_modules/.bin/nopt.cmd create mode 100644 node_modules/.bin/nopt.ps1 create mode 100644 node_modules/.bin/rimraf create mode 100644 node_modules/.bin/rimraf.cmd create mode 100644 node_modules/.bin/rimraf.ps1 create mode 100644 node_modules/.bin/semver create mode 100644 node_modules/.bin/semver.cmd create mode 100644 node_modules/.bin/semver.ps1 create mode 100644 node_modules/@mapbox/node-pre-gyp/.github/workflows/codeql.yml create mode 100644 node_modules/@mapbox/node-pre-gyp/CHANGELOG.md create mode 100644 node_modules/@mapbox/node-pre-gyp/LICENSE create mode 100644 node_modules/@mapbox/node-pre-gyp/README.md create mode 100644 node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp create mode 100644 node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp.cmd create mode 100644 node_modules/@mapbox/node-pre-gyp/contributing.md create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/build.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/clean.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/configure.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/info.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/install.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/main.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/node-pre-gyp.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/package.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/pre-binding.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/publish.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/rebuild.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/reinstall.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/reveal.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/testbinary.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/testpackage.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/unpublish.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/util/abi_crosswalk.json create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/util/compile.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/util/handle_gyp_opts.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/util/napi.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/index.html create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/package.json create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/util/s3_setup.js create mode 100644 node_modules/@mapbox/node-pre-gyp/lib/util/versioning.js create mode 100644 node_modules/@mapbox/node-pre-gyp/package.json create mode 100644 node_modules/@redis/bloom/README.md create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/ADD.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/ADD.js create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/CARD.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/CARD.js create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/EXISTS.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/EXISTS.js create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/INFO.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/INFO.js create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/INSERT.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/INSERT.js create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/LOADCHUNK.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/LOADCHUNK.js create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/MADD.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/MADD.js create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/MEXISTS.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/MEXISTS.js create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/RESERVE.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/RESERVE.js create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/SCANDUMP.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/SCANDUMP.js create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/index.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/bloom/index.js create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/INCRBY.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/INCRBY.js create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/INFO.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/INFO.js create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYDIM.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYDIM.js create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYPROB.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYPROB.js create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/MERGE.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/MERGE.js create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/QUERY.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/QUERY.js create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/index.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/count-min-sketch/index.js create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/ADD.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/ADD.js create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/ADDNX.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/ADDNX.js create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/COUNT.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/COUNT.js create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/DEL.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/DEL.js create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/EXISTS.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/EXISTS.js create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/INFO.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/INFO.js create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/INSERT.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/INSERT.js create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/INSERTNX.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/INSERTNX.js create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/LOADCHUNK.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/LOADCHUNK.js create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/RESERVE.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/RESERVE.js create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/SCANDUMP.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/SCANDUMP.js create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/index.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/cuckoo/index.js create mode 100644 node_modules/@redis/bloom/dist/commands/index.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/index.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/ADD.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/ADD.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/BYRANK.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/BYRANK.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/BYREVRANK.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/BYREVRANK.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/CDF.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/CDF.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/CREATE.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/CREATE.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/INFO.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/INFO.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/MAX.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/MAX.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/MERGE.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/MERGE.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/MIN.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/MIN.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/QUANTILE.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/QUANTILE.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/RANK.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/RANK.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/RESET.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/RESET.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/REVRANK.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/REVRANK.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/TRIMMED_MEAN.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/TRIMMED_MEAN.js create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/index.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/t-digest/index.js create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/ADD.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/ADD.js create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/COUNT.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/COUNT.js create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/INCRBY.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/INCRBY.js create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/INFO.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/INFO.js create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/LIST.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/LIST.js create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/LIST_WITHCOUNT.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/LIST_WITHCOUNT.js create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/QUERY.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/QUERY.js create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/RESERVE.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/RESERVE.js create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/index.d.ts create mode 100644 node_modules/@redis/bloom/dist/commands/top-k/index.js create mode 100644 node_modules/@redis/bloom/dist/index.d.ts create mode 100644 node_modules/@redis/bloom/dist/index.js create mode 100644 node_modules/@redis/bloom/package.json create mode 100644 node_modules/@redis/client/README.md create mode 100644 node_modules/@redis/client/dist/index.d.ts create mode 100644 node_modules/@redis/client/dist/index.js create mode 100644 node_modules/@redis/client/dist/lib/client/RESP2/composers/buffer.d.ts create mode 100644 node_modules/@redis/client/dist/lib/client/RESP2/composers/buffer.js create mode 100644 node_modules/@redis/client/dist/lib/client/RESP2/composers/interface.d.ts create mode 100644 node_modules/@redis/client/dist/lib/client/RESP2/composers/interface.js create mode 100644 node_modules/@redis/client/dist/lib/client/RESP2/composers/string.d.ts create mode 100644 node_modules/@redis/client/dist/lib/client/RESP2/composers/string.js create mode 100644 node_modules/@redis/client/dist/lib/client/RESP2/decoder.d.ts create mode 100644 node_modules/@redis/client/dist/lib/client/RESP2/decoder.js create mode 100644 node_modules/@redis/client/dist/lib/client/RESP2/encoder.d.ts create mode 100644 node_modules/@redis/client/dist/lib/client/RESP2/encoder.js create mode 100644 node_modules/@redis/client/dist/lib/client/commands-queue.d.ts create mode 100644 node_modules/@redis/client/dist/lib/client/commands-queue.js create mode 100644 node_modules/@redis/client/dist/lib/client/commands.d.ts create mode 100644 node_modules/@redis/client/dist/lib/client/commands.js create mode 100644 node_modules/@redis/client/dist/lib/client/index.d.ts create mode 100644 node_modules/@redis/client/dist/lib/client/index.js create mode 100644 node_modules/@redis/client/dist/lib/client/multi-command.d.ts create mode 100644 node_modules/@redis/client/dist/lib/client/multi-command.js create mode 100644 node_modules/@redis/client/dist/lib/client/pub-sub.d.ts create mode 100644 node_modules/@redis/client/dist/lib/client/pub-sub.js create mode 100644 node_modules/@redis/client/dist/lib/client/socket.d.ts create mode 100644 node_modules/@redis/client/dist/lib/client/socket.js create mode 100644 node_modules/@redis/client/dist/lib/cluster/cluster-slots.d.ts create mode 100644 node_modules/@redis/client/dist/lib/cluster/cluster-slots.js create mode 100644 node_modules/@redis/client/dist/lib/cluster/commands.d.ts create mode 100644 node_modules/@redis/client/dist/lib/cluster/commands.js create mode 100644 node_modules/@redis/client/dist/lib/cluster/index.d.ts create mode 100644 node_modules/@redis/client/dist/lib/cluster/index.js create mode 100644 node_modules/@redis/client/dist/lib/cluster/multi-command.d.ts create mode 100644 node_modules/@redis/client/dist/lib/cluster/multi-command.js create mode 100644 node_modules/@redis/client/dist/lib/command-options.d.ts create mode 100644 node_modules/@redis/client/dist/lib/command-options.js create mode 100644 node_modules/@redis/client/dist/lib/commander.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commander.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_CAT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_CAT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_DELUSER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_DELUSER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_DRYRUN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_DRYRUN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_GENPASS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_GENPASS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_GETUSER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_GETUSER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_LIST.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_LIST.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_LOAD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_LOAD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_LOG.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_LOG.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_LOG_RESET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_LOG_RESET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_SAVE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_SAVE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_SETUSER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_SETUSER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_USERS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_USERS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_WHOAMI.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ACL_WHOAMI.js create mode 100644 node_modules/@redis/client/dist/lib/commands/APPEND.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/APPEND.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ASKING.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ASKING.js create mode 100644 node_modules/@redis/client/dist/lib/commands/AUTH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/AUTH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BGREWRITEAOF.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BGREWRITEAOF.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BGSAVE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BGSAVE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BITCOUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BITCOUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BITFIELD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BITFIELD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BITFIELD_RO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BITFIELD_RO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BITOP.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BITOP.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BITPOS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BITPOS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BLMOVE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BLMOVE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BLMPOP.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BLMPOP.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BLPOP.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BLPOP.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BRPOP.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BRPOP.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BRPOPLPUSH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BRPOPLPUSH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BZMPOP.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BZMPOP.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BZPOPMAX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BZPOPMAX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/BZPOPMIN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/BZPOPMIN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_CACHING.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_CACHING.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_GETNAME.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_GETNAME.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_GETREDIR.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_GETREDIR.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_ID.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_ID.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_INFO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_INFO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_KILL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_KILL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_LIST.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_LIST.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_NO-EVICT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_NO-EVICT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_NO-TOUCH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_NO-TOUCH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_PAUSE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_PAUSE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_SETNAME.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_SETNAME.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKING.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKING.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKINGINFO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKINGINFO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_UNPAUSE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLIENT_UNPAUSE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTSRANGE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTSRANGE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_BUMPEPOCH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_BUMPEPOCH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNTKEYSINSLOT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNTKEYSINSLOT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTSRANGE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTSRANGE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_FAILOVER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_FAILOVER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_FLUSHSLOTS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_FLUSHSLOTS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_FORGET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_FORGET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_GETKEYSINSLOT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_GETKEYSINSLOT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_INFO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_INFO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_KEYSLOT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_KEYSLOT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_LINKS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_LINKS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_MEET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_MEET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_MYID.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_MYID.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_MYSHARDID.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_MYSHARDID.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_NODES.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_NODES.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICAS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICAS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICATE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICATE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_RESET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_RESET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_SAVECONFIG.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_SAVECONFIG.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_SET-CONFIG-EPOCH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_SET-CONFIG-EPOCH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_SETSLOT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_SETSLOT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_SLOTS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CLUSTER_SLOTS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/COMMAND.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/COMMAND.js create mode 100644 node_modules/@redis/client/dist/lib/commands/COMMAND_COUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/COMMAND_COUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYSANDFLAGS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYSANDFLAGS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/COMMAND_INFO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/COMMAND_INFO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/COMMAND_LIST.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/COMMAND_LIST.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CONFIG_GET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CONFIG_GET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CONFIG_RESETSTAT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CONFIG_RESETSTAT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CONFIG_REWRITE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CONFIG_REWRITE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/CONFIG_SET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/CONFIG_SET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/COPY.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/COPY.js create mode 100644 node_modules/@redis/client/dist/lib/commands/DBSIZE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/DBSIZE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/DECR.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/DECR.js create mode 100644 node_modules/@redis/client/dist/lib/commands/DECRBY.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/DECRBY.js create mode 100644 node_modules/@redis/client/dist/lib/commands/DEL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/DEL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/DISCARD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/DISCARD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/DUMP.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/DUMP.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ECHO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ECHO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/EVAL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/EVAL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/EVALSHA.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/EVALSHA.js create mode 100644 node_modules/@redis/client/dist/lib/commands/EVALSHA_RO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/EVALSHA_RO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/EVAL_RO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/EVAL_RO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/EXISTS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/EXISTS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/EXPIRE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/EXPIRE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/EXPIREAT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/EXPIREAT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/EXPIRETIME.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/EXPIRETIME.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FAILOVER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FAILOVER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FCALL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FCALL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FCALL_RO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FCALL_RO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FLUSHALL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FLUSHALL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FLUSHDB.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FLUSHDB.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_DELETE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_DELETE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_DUMP.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_DUMP.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_FLUSH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_FLUSH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_KILL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_KILL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST_WITHCODE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST_WITHCODE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_LOAD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_LOAD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_RESTORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_RESTORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_STATS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/FUNCTION_STATS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEOADD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEOADD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEODIST.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEODIST.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEOHASH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEOHASH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEOPOS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEOPOS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBERSTORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBERSTORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO_WITH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO_WITH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_WITH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_WITH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUSSTORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUSSTORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO_WITH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO_WITH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUS_WITH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEORADIUS_WITH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEOSEARCH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEOSEARCH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEOSEARCHSTORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEOSEARCHSTORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GEOSEARCH_WITH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GEOSEARCH_WITH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GETBIT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GETBIT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GETDEL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GETDEL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GETEX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GETEX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GETRANGE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GETRANGE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/GETSET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/GETSET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HDEL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HDEL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HELLO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HELLO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HEXISTS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HEXISTS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HEXPIRE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HEXPIRE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HEXPIREAT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HEXPIREAT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HEXPIRETIME.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HEXPIRETIME.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HGET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HGET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HGETALL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HGETALL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HINCRBY.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HINCRBY.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HINCRBYFLOAT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HINCRBYFLOAT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HKEYS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HKEYS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HLEN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HLEN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HMGET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HMGET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HPERSIST.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HPERSIST.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HPEXPIRE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HPEXPIRE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HPEXPIREAT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HPEXPIREAT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HPEXPIRETIME.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HPEXPIRETIME.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HPTTL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HPTTL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HRANDFIELD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HRANDFIELD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT_WITHVALUES.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT_WITHVALUES.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HSCAN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HSCAN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HSCAN_NOVALUES.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HSCAN_NOVALUES.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HSET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HSET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HSETNX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HSETNX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HSTRLEN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HSTRLEN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HTTL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HTTL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/HVALS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/HVALS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/INCR.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/INCR.js create mode 100644 node_modules/@redis/client/dist/lib/commands/INCRBY.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/INCRBY.js create mode 100644 node_modules/@redis/client/dist/lib/commands/INCRBYFLOAT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/INCRBYFLOAT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/INFO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/INFO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/KEYS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/KEYS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LASTSAVE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LASTSAVE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LATENCY_DOCTOR.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LATENCY_DOCTOR.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LATENCY_GRAPH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LATENCY_GRAPH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LATENCY_HISTORY.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LATENCY_HISTORY.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LATENCY_LATEST.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LATENCY_LATEST.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LCS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LCS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LCS_IDX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LCS_IDX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LCS_IDX_WITHMATCHLEN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LCS_IDX_WITHMATCHLEN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LCS_LEN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LCS_LEN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LINDEX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LINDEX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LINSERT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LINSERT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LLEN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LLEN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LMOVE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LMOVE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LMPOP.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LMPOP.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LOLWUT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LOLWUT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LPOP.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LPOP.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LPOP_COUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LPOP_COUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LPOS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LPOS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LPOS_COUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LPOS_COUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LPUSH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LPUSH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LPUSHX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LPUSHX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LRANGE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LRANGE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LREM.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LREM.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LSET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LSET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/LTRIM.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/LTRIM.js create mode 100644 node_modules/@redis/client/dist/lib/commands/MEMORY_DOCTOR.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/MEMORY_DOCTOR.js create mode 100644 node_modules/@redis/client/dist/lib/commands/MEMORY_MALLOC-STATS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/MEMORY_MALLOC-STATS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/MEMORY_PURGE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/MEMORY_PURGE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/MEMORY_STATS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/MEMORY_STATS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/MEMORY_USAGE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/MEMORY_USAGE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/MGET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/MGET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/MIGRATE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/MIGRATE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/MODULE_LIST.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/MODULE_LIST.js create mode 100644 node_modules/@redis/client/dist/lib/commands/MODULE_LOAD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/MODULE_LOAD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/MODULE_UNLOAD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/MODULE_UNLOAD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/MOVE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/MOVE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/MSET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/MSET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/MSETNX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/MSETNX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/OBJECT_ENCODING.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/OBJECT_ENCODING.js create mode 100644 node_modules/@redis/client/dist/lib/commands/OBJECT_FREQ.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/OBJECT_FREQ.js create mode 100644 node_modules/@redis/client/dist/lib/commands/OBJECT_IDLETIME.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/OBJECT_IDLETIME.js create mode 100644 node_modules/@redis/client/dist/lib/commands/OBJECT_REFCOUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/OBJECT_REFCOUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PERSIST.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PERSIST.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PEXPIRE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PEXPIRE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PEXPIREAT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PEXPIREAT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PEXPIRETIME.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PEXPIRETIME.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PFADD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PFADD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PFCOUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PFCOUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PFMERGE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PFMERGE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PING.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PING.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PSETEX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PSETEX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PTTL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PTTL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PUBLISH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PUBLISH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PUBSUB_CHANNELS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PUBSUB_CHANNELS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMPAT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMPAT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMSUB.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMSUB.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDCHANNELS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDCHANNELS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDNUMSUB.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDNUMSUB.js create mode 100644 node_modules/@redis/client/dist/lib/commands/RANDOMKEY.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/RANDOMKEY.js create mode 100644 node_modules/@redis/client/dist/lib/commands/READONLY.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/READONLY.js create mode 100644 node_modules/@redis/client/dist/lib/commands/READWRITE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/READWRITE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/RENAME.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/RENAME.js create mode 100644 node_modules/@redis/client/dist/lib/commands/RENAMENX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/RENAMENX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/REPLICAOF.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/REPLICAOF.js create mode 100644 node_modules/@redis/client/dist/lib/commands/RESTORE-ASKING.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/RESTORE-ASKING.js create mode 100644 node_modules/@redis/client/dist/lib/commands/RESTORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/RESTORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ROLE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ROLE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/RPOP.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/RPOP.js create mode 100644 node_modules/@redis/client/dist/lib/commands/RPOPLPUSH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/RPOPLPUSH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/RPOP_COUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/RPOP_COUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/RPUSH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/RPUSH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/RPUSHX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/RPUSHX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SADD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SADD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SAVE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SAVE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SCAN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SCAN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SCARD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SCARD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SCRIPT_DEBUG.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SCRIPT_DEBUG.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SCRIPT_EXISTS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SCRIPT_EXISTS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SCRIPT_FLUSH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SCRIPT_FLUSH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SCRIPT_KILL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SCRIPT_KILL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SCRIPT_LOAD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SCRIPT_LOAD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SDIFF.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SDIFF.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SDIFFSTORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SDIFFSTORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SET.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SET.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SETBIT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SETBIT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SETEX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SETEX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SETNX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SETNX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SETRANGE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SETRANGE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SHUTDOWN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SHUTDOWN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SINTER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SINTER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SINTERCARD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SINTERCARD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SINTERSTORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SINTERSTORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SISMEMBER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SISMEMBER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SMEMBERS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SMEMBERS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SMISMEMBER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SMISMEMBER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SMOVE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SMOVE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SORT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SORT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SORT_RO.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SORT_RO.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SORT_STORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SORT_STORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SPOP.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SPOP.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SPUBLISH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SPUBLISH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SRANDMEMBER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SRANDMEMBER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SRANDMEMBER_COUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SRANDMEMBER_COUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SREM.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SREM.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SSCAN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SSCAN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/STRLEN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/STRLEN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SUNION.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SUNION.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SUNIONSTORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SUNIONSTORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/SWAPDB.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/SWAPDB.js create mode 100644 node_modules/@redis/client/dist/lib/commands/TIME.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/TIME.js create mode 100644 node_modules/@redis/client/dist/lib/commands/TOUCH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/TOUCH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/TTL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/TTL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/TYPE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/TYPE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/UNLINK.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/UNLINK.js create mode 100644 node_modules/@redis/client/dist/lib/commands/UNWATCH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/UNWATCH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/WAIT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/WAIT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/WATCH.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/WATCH.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XACK.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XACK.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XADD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XADD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM_JUSTID.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM_JUSTID.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XCLAIM.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XCLAIM.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XCLAIM_JUSTID.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XCLAIM_JUSTID.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XDEL.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XDEL.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XGROUP_CREATE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XGROUP_CREATE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XGROUP_CREATECONSUMER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XGROUP_CREATECONSUMER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XGROUP_DELCONSUMER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XGROUP_DELCONSUMER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XGROUP_DESTROY.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XGROUP_DESTROY.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XGROUP_SETID.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XGROUP_SETID.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XINFO_CONSUMERS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XINFO_CONSUMERS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XINFO_GROUPS.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XINFO_GROUPS.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XINFO_STREAM.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XINFO_STREAM.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XLEN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XLEN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XPENDING.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XPENDING.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XPENDING_RANGE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XPENDING_RANGE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XRANGE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XRANGE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XREAD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XREAD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XREADGROUP.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XREADGROUP.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XREVRANGE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XREVRANGE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XSETID.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XSETID.js create mode 100644 node_modules/@redis/client/dist/lib/commands/XTRIM.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/XTRIM.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZADD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZADD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZCARD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZCARD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZCOUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZCOUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZDIFF.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZDIFF.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZDIFFSTORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZDIFFSTORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZDIFF_WITHSCORES.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZDIFF_WITHSCORES.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZINCRBY.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZINCRBY.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZINTER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZINTER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZINTERCARD.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZINTERCARD.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZINTERSTORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZINTERSTORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZINTER_WITHSCORES.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZINTER_WITHSCORES.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZLEXCOUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZLEXCOUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZMPOP.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZMPOP.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZMSCORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZMSCORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZPOPMAX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZPOPMAX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZPOPMAX_COUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZPOPMAX_COUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZPOPMIN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZPOPMIN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZPOPMIN_COUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZPOPMIN_COUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANGE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANGE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANGEBYLEX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANGEBYLEX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE_WITHSCORES.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE_WITHSCORES.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANGESTORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANGESTORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANGE_WITHSCORES.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANGE_WITHSCORES.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANK.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZRANK.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZREM.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZREM.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYLEX.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYLEX.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYRANK.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYRANK.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYSCORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYSCORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZREVRANK.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZREVRANK.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZSCAN.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZSCAN.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZSCORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZSCORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZUNION.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZUNION.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZUNIONSTORE.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZUNIONSTORE.js create mode 100644 node_modules/@redis/client/dist/lib/commands/ZUNION_WITHSCORES.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/ZUNION_WITHSCORES.js create mode 100644 node_modules/@redis/client/dist/lib/commands/generic-transformers.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/generic-transformers.js create mode 100644 node_modules/@redis/client/dist/lib/commands/index.d.ts create mode 100644 node_modules/@redis/client/dist/lib/commands/index.js create mode 100644 node_modules/@redis/client/dist/lib/errors.d.ts create mode 100644 node_modules/@redis/client/dist/lib/errors.js create mode 100644 node_modules/@redis/client/dist/lib/lua-script.d.ts create mode 100644 node_modules/@redis/client/dist/lib/lua-script.js create mode 100644 node_modules/@redis/client/dist/lib/multi-command.d.ts create mode 100644 node_modules/@redis/client/dist/lib/multi-command.js create mode 100644 node_modules/@redis/client/dist/lib/utils.d.ts create mode 100644 node_modules/@redis/client/dist/lib/utils.js create mode 100644 node_modules/@redis/client/dist/package.json create mode 100644 node_modules/@redis/client/package.json create mode 100644 node_modules/@redis/graph/README.md create mode 100644 node_modules/@redis/graph/dist/commands/CONFIG_GET.d.ts create mode 100644 node_modules/@redis/graph/dist/commands/CONFIG_GET.js create mode 100644 node_modules/@redis/graph/dist/commands/CONFIG_SET.d.ts create mode 100644 node_modules/@redis/graph/dist/commands/CONFIG_SET.js create mode 100644 node_modules/@redis/graph/dist/commands/DELETE.d.ts create mode 100644 node_modules/@redis/graph/dist/commands/DELETE.js create mode 100644 node_modules/@redis/graph/dist/commands/EXPLAIN.d.ts create mode 100644 node_modules/@redis/graph/dist/commands/EXPLAIN.js create mode 100644 node_modules/@redis/graph/dist/commands/LIST.d.ts create mode 100644 node_modules/@redis/graph/dist/commands/LIST.js create mode 100644 node_modules/@redis/graph/dist/commands/PROFILE.d.ts create mode 100644 node_modules/@redis/graph/dist/commands/PROFILE.js create mode 100644 node_modules/@redis/graph/dist/commands/QUERY.d.ts create mode 100644 node_modules/@redis/graph/dist/commands/QUERY.js create mode 100644 node_modules/@redis/graph/dist/commands/RO_QUERY.d.ts create mode 100644 node_modules/@redis/graph/dist/commands/RO_QUERY.js create mode 100644 node_modules/@redis/graph/dist/commands/SLOWLOG.d.ts create mode 100644 node_modules/@redis/graph/dist/commands/SLOWLOG.js create mode 100644 node_modules/@redis/graph/dist/commands/index.d.ts create mode 100644 node_modules/@redis/graph/dist/commands/index.js create mode 100644 node_modules/@redis/graph/dist/graph.d.ts create mode 100644 node_modules/@redis/graph/dist/graph.js create mode 100644 node_modules/@redis/graph/dist/index.d.ts create mode 100644 node_modules/@redis/graph/dist/index.js create mode 100644 node_modules/@redis/graph/package.json create mode 100644 node_modules/@redis/json/README.md create mode 100644 node_modules/@redis/json/dist/commands/ARRAPPEND.d.ts create mode 100644 node_modules/@redis/json/dist/commands/ARRAPPEND.js create mode 100644 node_modules/@redis/json/dist/commands/ARRINDEX.d.ts create mode 100644 node_modules/@redis/json/dist/commands/ARRINDEX.js create mode 100644 node_modules/@redis/json/dist/commands/ARRINSERT.d.ts create mode 100644 node_modules/@redis/json/dist/commands/ARRINSERT.js create mode 100644 node_modules/@redis/json/dist/commands/ARRLEN.d.ts create mode 100644 node_modules/@redis/json/dist/commands/ARRLEN.js create mode 100644 node_modules/@redis/json/dist/commands/ARRPOP.d.ts create mode 100644 node_modules/@redis/json/dist/commands/ARRPOP.js create mode 100644 node_modules/@redis/json/dist/commands/ARRTRIM.d.ts create mode 100644 node_modules/@redis/json/dist/commands/ARRTRIM.js create mode 100644 node_modules/@redis/json/dist/commands/DEBUG_MEMORY.d.ts create mode 100644 node_modules/@redis/json/dist/commands/DEBUG_MEMORY.js create mode 100644 node_modules/@redis/json/dist/commands/DEL.d.ts create mode 100644 node_modules/@redis/json/dist/commands/DEL.js create mode 100644 node_modules/@redis/json/dist/commands/FORGET.d.ts create mode 100644 node_modules/@redis/json/dist/commands/FORGET.js create mode 100644 node_modules/@redis/json/dist/commands/GET.d.ts create mode 100644 node_modules/@redis/json/dist/commands/GET.js create mode 100644 node_modules/@redis/json/dist/commands/MERGE.d.ts create mode 100644 node_modules/@redis/json/dist/commands/MERGE.js create mode 100644 node_modules/@redis/json/dist/commands/MGET.d.ts create mode 100644 node_modules/@redis/json/dist/commands/MGET.js create mode 100644 node_modules/@redis/json/dist/commands/MSET.d.ts create mode 100644 node_modules/@redis/json/dist/commands/MSET.js create mode 100644 node_modules/@redis/json/dist/commands/NUMINCRBY.d.ts create mode 100644 node_modules/@redis/json/dist/commands/NUMINCRBY.js create mode 100644 node_modules/@redis/json/dist/commands/NUMMULTBY.d.ts create mode 100644 node_modules/@redis/json/dist/commands/NUMMULTBY.js create mode 100644 node_modules/@redis/json/dist/commands/OBJKEYS.d.ts create mode 100644 node_modules/@redis/json/dist/commands/OBJKEYS.js create mode 100644 node_modules/@redis/json/dist/commands/OBJLEN.d.ts create mode 100644 node_modules/@redis/json/dist/commands/OBJLEN.js create mode 100644 node_modules/@redis/json/dist/commands/RESP.d.ts create mode 100644 node_modules/@redis/json/dist/commands/RESP.js create mode 100644 node_modules/@redis/json/dist/commands/SET.d.ts create mode 100644 node_modules/@redis/json/dist/commands/SET.js create mode 100644 node_modules/@redis/json/dist/commands/STRAPPEND.d.ts create mode 100644 node_modules/@redis/json/dist/commands/STRAPPEND.js create mode 100644 node_modules/@redis/json/dist/commands/STRLEN.d.ts create mode 100644 node_modules/@redis/json/dist/commands/STRLEN.js create mode 100644 node_modules/@redis/json/dist/commands/TYPE.d.ts create mode 100644 node_modules/@redis/json/dist/commands/TYPE.js create mode 100644 node_modules/@redis/json/dist/commands/index.d.ts create mode 100644 node_modules/@redis/json/dist/commands/index.js create mode 100644 node_modules/@redis/json/dist/index.d.ts create mode 100644 node_modules/@redis/json/dist/index.js create mode 100644 node_modules/@redis/json/package.json create mode 100644 node_modules/@redis/search/README.md create mode 100644 node_modules/@redis/search/dist/commands/AGGREGATE.d.ts create mode 100644 node_modules/@redis/search/dist/commands/AGGREGATE.js create mode 100644 node_modules/@redis/search/dist/commands/AGGREGATE_WITHCURSOR.d.ts create mode 100644 node_modules/@redis/search/dist/commands/AGGREGATE_WITHCURSOR.js create mode 100644 node_modules/@redis/search/dist/commands/ALIASADD.d.ts create mode 100644 node_modules/@redis/search/dist/commands/ALIASADD.js create mode 100644 node_modules/@redis/search/dist/commands/ALIASDEL.d.ts create mode 100644 node_modules/@redis/search/dist/commands/ALIASDEL.js create mode 100644 node_modules/@redis/search/dist/commands/ALIASUPDATE.d.ts create mode 100644 node_modules/@redis/search/dist/commands/ALIASUPDATE.js create mode 100644 node_modules/@redis/search/dist/commands/ALTER.d.ts create mode 100644 node_modules/@redis/search/dist/commands/ALTER.js create mode 100644 node_modules/@redis/search/dist/commands/CONFIG_GET.d.ts create mode 100644 node_modules/@redis/search/dist/commands/CONFIG_GET.js create mode 100644 node_modules/@redis/search/dist/commands/CONFIG_SET.d.ts create mode 100644 node_modules/@redis/search/dist/commands/CONFIG_SET.js create mode 100644 node_modules/@redis/search/dist/commands/CREATE.d.ts create mode 100644 node_modules/@redis/search/dist/commands/CREATE.js create mode 100644 node_modules/@redis/search/dist/commands/CURSOR_DEL.d.ts create mode 100644 node_modules/@redis/search/dist/commands/CURSOR_DEL.js create mode 100644 node_modules/@redis/search/dist/commands/CURSOR_READ.d.ts create mode 100644 node_modules/@redis/search/dist/commands/CURSOR_READ.js create mode 100644 node_modules/@redis/search/dist/commands/DICTADD.d.ts create mode 100644 node_modules/@redis/search/dist/commands/DICTADD.js create mode 100644 node_modules/@redis/search/dist/commands/DICTDEL.d.ts create mode 100644 node_modules/@redis/search/dist/commands/DICTDEL.js create mode 100644 node_modules/@redis/search/dist/commands/DICTDUMP.d.ts create mode 100644 node_modules/@redis/search/dist/commands/DICTDUMP.js create mode 100644 node_modules/@redis/search/dist/commands/DROPINDEX.d.ts create mode 100644 node_modules/@redis/search/dist/commands/DROPINDEX.js create mode 100644 node_modules/@redis/search/dist/commands/EXPLAIN.d.ts create mode 100644 node_modules/@redis/search/dist/commands/EXPLAIN.js create mode 100644 node_modules/@redis/search/dist/commands/EXPLAINCLI.d.ts create mode 100644 node_modules/@redis/search/dist/commands/EXPLAINCLI.js create mode 100644 node_modules/@redis/search/dist/commands/INFO.d.ts create mode 100644 node_modules/@redis/search/dist/commands/INFO.js create mode 100644 node_modules/@redis/search/dist/commands/PROFILE_AGGREGATE.d.ts create mode 100644 node_modules/@redis/search/dist/commands/PROFILE_AGGREGATE.js create mode 100644 node_modules/@redis/search/dist/commands/PROFILE_SEARCH.d.ts create mode 100644 node_modules/@redis/search/dist/commands/PROFILE_SEARCH.js create mode 100644 node_modules/@redis/search/dist/commands/SEARCH.d.ts create mode 100644 node_modules/@redis/search/dist/commands/SEARCH.js create mode 100644 node_modules/@redis/search/dist/commands/SEARCH_NOCONTENT.d.ts create mode 100644 node_modules/@redis/search/dist/commands/SEARCH_NOCONTENT.js create mode 100644 node_modules/@redis/search/dist/commands/SPELLCHECK.d.ts create mode 100644 node_modules/@redis/search/dist/commands/SPELLCHECK.js create mode 100644 node_modules/@redis/search/dist/commands/SUGADD.d.ts create mode 100644 node_modules/@redis/search/dist/commands/SUGADD.js create mode 100644 node_modules/@redis/search/dist/commands/SUGDEL.d.ts create mode 100644 node_modules/@redis/search/dist/commands/SUGDEL.js create mode 100644 node_modules/@redis/search/dist/commands/SUGGET.d.ts create mode 100644 node_modules/@redis/search/dist/commands/SUGGET.js create mode 100644 node_modules/@redis/search/dist/commands/SUGGET_WITHPAYLOADS.d.ts create mode 100644 node_modules/@redis/search/dist/commands/SUGGET_WITHPAYLOADS.js create mode 100644 node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES.d.ts create mode 100644 node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES.js create mode 100644 node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES_WITHPAYLOADS.d.ts create mode 100644 node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES_WITHPAYLOADS.js create mode 100644 node_modules/@redis/search/dist/commands/SUGLEN.d.ts create mode 100644 node_modules/@redis/search/dist/commands/SUGLEN.js create mode 100644 node_modules/@redis/search/dist/commands/SYNDUMP.d.ts create mode 100644 node_modules/@redis/search/dist/commands/SYNDUMP.js create mode 100644 node_modules/@redis/search/dist/commands/SYNUPDATE.d.ts create mode 100644 node_modules/@redis/search/dist/commands/SYNUPDATE.js create mode 100644 node_modules/@redis/search/dist/commands/TAGVALS.d.ts create mode 100644 node_modules/@redis/search/dist/commands/TAGVALS.js create mode 100644 node_modules/@redis/search/dist/commands/_LIST.d.ts create mode 100644 node_modules/@redis/search/dist/commands/_LIST.js create mode 100644 node_modules/@redis/search/dist/commands/index.d.ts create mode 100644 node_modules/@redis/search/dist/commands/index.js create mode 100644 node_modules/@redis/search/dist/index.d.ts create mode 100644 node_modules/@redis/search/dist/index.js create mode 100644 node_modules/@redis/search/package.json create mode 100644 node_modules/@redis/time-series/README.md create mode 100644 node_modules/@redis/time-series/dist/commands/ADD.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/ADD.js create mode 100644 node_modules/@redis/time-series/dist/commands/ALTER.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/ALTER.js create mode 100644 node_modules/@redis/time-series/dist/commands/CREATE.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/CREATE.js create mode 100644 node_modules/@redis/time-series/dist/commands/CREATERULE.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/CREATERULE.js create mode 100644 node_modules/@redis/time-series/dist/commands/DECRBY.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/DECRBY.js create mode 100644 node_modules/@redis/time-series/dist/commands/DEL.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/DEL.js create mode 100644 node_modules/@redis/time-series/dist/commands/DELETERULE.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/DELETERULE.js create mode 100644 node_modules/@redis/time-series/dist/commands/GET.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/GET.js create mode 100644 node_modules/@redis/time-series/dist/commands/INCRBY.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/INCRBY.js create mode 100644 node_modules/@redis/time-series/dist/commands/INFO.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/INFO.js create mode 100644 node_modules/@redis/time-series/dist/commands/INFO_DEBUG.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/INFO_DEBUG.js create mode 100644 node_modules/@redis/time-series/dist/commands/MADD.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/MADD.js create mode 100644 node_modules/@redis/time-series/dist/commands/MGET.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/MGET.js create mode 100644 node_modules/@redis/time-series/dist/commands/MGET_WITHLABELS.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/MGET_WITHLABELS.js create mode 100644 node_modules/@redis/time-series/dist/commands/MRANGE.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/MRANGE.js create mode 100644 node_modules/@redis/time-series/dist/commands/MRANGE_WITHLABELS.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/MRANGE_WITHLABELS.js create mode 100644 node_modules/@redis/time-series/dist/commands/MREVRANGE.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/MREVRANGE.js create mode 100644 node_modules/@redis/time-series/dist/commands/MREVRANGE_WITHLABELS.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/MREVRANGE_WITHLABELS.js create mode 100644 node_modules/@redis/time-series/dist/commands/QUERYINDEX.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/QUERYINDEX.js create mode 100644 node_modules/@redis/time-series/dist/commands/RANGE.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/RANGE.js create mode 100644 node_modules/@redis/time-series/dist/commands/REVRANGE.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/REVRANGE.js create mode 100644 node_modules/@redis/time-series/dist/commands/index.d.ts create mode 100644 node_modules/@redis/time-series/dist/commands/index.js create mode 100644 node_modules/@redis/time-series/dist/index.d.ts create mode 100644 node_modules/@redis/time-series/dist/index.js create mode 100644 node_modules/@redis/time-series/package.json create mode 100644 node_modules/abbrev/LICENSE create mode 100644 node_modules/abbrev/README.md create mode 100644 node_modules/abbrev/abbrev.js create mode 100644 node_modules/abbrev/package.json create mode 100644 node_modules/agent-base/README.md create mode 100644 node_modules/agent-base/dist/src/index.d.ts create mode 100644 node_modules/agent-base/dist/src/index.js create mode 100644 node_modules/agent-base/dist/src/index.js.map create mode 100644 node_modules/agent-base/dist/src/promisify.d.ts create mode 100644 node_modules/agent-base/dist/src/promisify.js create mode 100644 node_modules/agent-base/dist/src/promisify.js.map create mode 100644 node_modules/agent-base/node_modules/debug/LICENSE create mode 100644 node_modules/agent-base/node_modules/debug/README.md create mode 100644 node_modules/agent-base/node_modules/debug/package.json create mode 100644 node_modules/agent-base/node_modules/debug/src/browser.js create mode 100644 node_modules/agent-base/node_modules/debug/src/common.js create mode 100644 node_modules/agent-base/node_modules/debug/src/index.js create mode 100644 node_modules/agent-base/node_modules/debug/src/node.js create mode 100644 node_modules/agent-base/node_modules/ms/index.js create mode 100644 node_modules/agent-base/node_modules/ms/license.md create mode 100644 node_modules/agent-base/node_modules/ms/package.json create mode 100644 node_modules/agent-base/node_modules/ms/readme.md create mode 100644 node_modules/agent-base/package.json create mode 100644 node_modules/agent-base/src/index.ts create mode 100644 node_modules/agent-base/src/promisify.ts create mode 100644 node_modules/ansi-regex/index.d.ts create mode 100644 node_modules/ansi-regex/index.js create mode 100644 node_modules/ansi-regex/license create mode 100644 node_modules/ansi-regex/package.json create mode 100644 node_modules/ansi-regex/readme.md create mode 100644 node_modules/aproba/LICENSE create mode 100644 node_modules/aproba/README.md create mode 100644 node_modules/aproba/index.js create mode 100644 node_modules/aproba/package.json create mode 100644 node_modules/are-we-there-yet/LICENSE.md create mode 100644 node_modules/are-we-there-yet/README.md create mode 100644 node_modules/are-we-there-yet/lib/index.js create mode 100644 node_modules/are-we-there-yet/lib/tracker-base.js create mode 100644 node_modules/are-we-there-yet/lib/tracker-group.js create mode 100644 node_modules/are-we-there-yet/lib/tracker-stream.js create mode 100644 node_modules/are-we-there-yet/lib/tracker.js create mode 100644 node_modules/are-we-there-yet/package.json create mode 100644 node_modules/balanced-match/.github/FUNDING.yml create mode 100644 node_modules/balanced-match/LICENSE.md create mode 100644 node_modules/balanced-match/README.md create mode 100644 node_modules/balanced-match/index.js create mode 100644 node_modules/balanced-match/package.json create mode 100644 node_modules/bcrypt/.editorconfig create mode 100644 node_modules/bcrypt/.github/workflows/ci.yaml create mode 100644 node_modules/bcrypt/.travis.yml create mode 100644 node_modules/bcrypt/CHANGELOG.md create mode 100644 node_modules/bcrypt/ISSUE_TEMPLATE.md create mode 100644 node_modules/bcrypt/LICENSE create mode 100644 node_modules/bcrypt/Makefile create mode 100644 node_modules/bcrypt/README.md create mode 100644 node_modules/bcrypt/SECURITY.md create mode 100644 node_modules/bcrypt/appveyor.yml create mode 100644 node_modules/bcrypt/bcrypt.js create mode 100644 node_modules/bcrypt/binding.gyp create mode 100644 node_modules/bcrypt/examples/async_compare.js create mode 100644 node_modules/bcrypt/examples/forever_gen_salt.js create mode 100644 node_modules/bcrypt/lib/binding/napi-v3/bcrypt_lib.node create mode 100644 node_modules/bcrypt/package.json create mode 100644 node_modules/bcrypt/promises.js create mode 100644 node_modules/bcrypt/src/bcrypt.cc create mode 100644 node_modules/bcrypt/src/bcrypt_node.cc create mode 100644 node_modules/bcrypt/src/blowfish.cc create mode 100644 node_modules/bcrypt/src/node_blf.h create mode 100644 node_modules/bcrypt/test-docker.sh create mode 100644 node_modules/bcrypt/test/async.test.js create mode 100644 node_modules/bcrypt/test/implementation.test.js create mode 100644 node_modules/bcrypt/test/promise.test.js create mode 100644 node_modules/bcrypt/test/repetitions.test.js create mode 100644 node_modules/bcrypt/test/sync.test.js create mode 100644 node_modules/brace-expansion/LICENSE create mode 100644 node_modules/brace-expansion/README.md create mode 100644 node_modules/brace-expansion/index.js create mode 100644 node_modules/brace-expansion/package.json create mode 100644 node_modules/chownr/LICENSE create mode 100644 node_modules/chownr/README.md create mode 100644 node_modules/chownr/chownr.js create mode 100644 node_modules/chownr/package.json create mode 100644 node_modules/cluster-key-slot/.eslintrc create mode 100644 node_modules/cluster-key-slot/LICENSE create mode 100644 node_modules/cluster-key-slot/README.md create mode 100644 node_modules/cluster-key-slot/index.d.ts create mode 100644 node_modules/cluster-key-slot/lib/index.js create mode 100644 node_modules/cluster-key-slot/package.json create mode 100644 node_modules/color-support/LICENSE create mode 100644 node_modules/color-support/README.md create mode 100644 node_modules/color-support/bin.js create mode 100644 node_modules/color-support/browser.js create mode 100644 node_modules/color-support/index.js create mode 100644 node_modules/color-support/package.json create mode 100644 node_modules/concat-map/.travis.yml create mode 100644 node_modules/concat-map/LICENSE create mode 100644 node_modules/concat-map/README.markdown create mode 100644 node_modules/concat-map/example/map.js create mode 100644 node_modules/concat-map/index.js create mode 100644 node_modules/concat-map/package.json create mode 100644 node_modules/concat-map/test/map.js create mode 100644 node_modules/connect-redis/.eslintrc create mode 100644 node_modules/connect-redis/.github/release-drafter.yml create mode 100644 node_modules/connect-redis/.github/workflows/build.yml create mode 100644 node_modules/connect-redis/.github/workflows/release.yml create mode 100644 node_modules/connect-redis/.github/workflows/stale.yml create mode 100644 node_modules/connect-redis/.prettierrc create mode 100644 node_modules/connect-redis/dist/cjs/index.d.ts create mode 100644 node_modules/connect-redis/dist/cjs/index.js create mode 100644 node_modules/connect-redis/dist/cjs/index_test.d.ts create mode 100644 node_modules/connect-redis/dist/cjs/index_test.js create mode 100644 node_modules/connect-redis/dist/esm/index.d.ts create mode 100644 node_modules/connect-redis/dist/esm/index.js create mode 100644 node_modules/connect-redis/dist/esm/index_test.d.ts create mode 100644 node_modules/connect-redis/dist/esm/index_test.js create mode 100644 node_modules/connect-redis/dist/esm/package.json create mode 100644 node_modules/connect-redis/index.ts create mode 100644 node_modules/connect-redis/index_test.ts create mode 100644 node_modules/connect-redis/license create mode 100644 node_modules/connect-redis/package.json create mode 100644 node_modules/connect-redis/readme.md create mode 100644 node_modules/connect-redis/tsconfig.esm.json create mode 100644 node_modules/connect-redis/tsconfig.json create mode 100644 node_modules/console-control-strings/LICENSE create mode 100644 node_modules/console-control-strings/README.md create mode 100644 node_modules/console-control-strings/README.md~ create mode 100644 node_modules/console-control-strings/index.js create mode 100644 node_modules/console-control-strings/package.json create mode 100644 node_modules/delegates/.npmignore create mode 100644 node_modules/delegates/History.md create mode 100644 node_modules/delegates/License create mode 100644 node_modules/delegates/Makefile create mode 100644 node_modules/delegates/Readme.md create mode 100644 node_modules/delegates/index.js create mode 100644 node_modules/delegates/package.json create mode 100644 node_modules/delegates/test/index.js create mode 100644 node_modules/detect-libc/LICENSE create mode 100644 node_modules/detect-libc/README.md create mode 100644 node_modules/detect-libc/index.d.ts create mode 100644 node_modules/detect-libc/lib/detect-libc.js create mode 100644 node_modules/detect-libc/lib/elf.js create mode 100644 node_modules/detect-libc/lib/filesystem.js create mode 100644 node_modules/detect-libc/lib/process.js create mode 100644 node_modules/detect-libc/package.json create mode 100644 node_modules/emoji-regex/LICENSE-MIT.txt create mode 100644 node_modules/emoji-regex/README.md create mode 100644 node_modules/emoji-regex/es2015/index.js create mode 100644 node_modules/emoji-regex/es2015/text.js create mode 100644 node_modules/emoji-regex/index.d.ts create mode 100644 node_modules/emoji-regex/index.js create mode 100644 node_modules/emoji-regex/package.json create mode 100644 node_modules/emoji-regex/text.js create mode 100644 node_modules/express-session/LICENSE create mode 100644 node_modules/express-session/README.md create mode 100644 node_modules/express-session/index.js create mode 100644 node_modules/express-session/package.json create mode 100644 node_modules/express-session/session/cookie.js create mode 100644 node_modules/express-session/session/memory.js create mode 100644 node_modules/express-session/session/session.js create mode 100644 node_modules/express-session/session/store.js create mode 100644 node_modules/fs-minipass/LICENSE create mode 100644 node_modules/fs-minipass/README.md create mode 100644 node_modules/fs-minipass/index.js create mode 100644 node_modules/fs-minipass/node_modules/minipass/LICENSE create mode 100644 node_modules/fs-minipass/node_modules/minipass/README.md create mode 100644 node_modules/fs-minipass/node_modules/minipass/index.d.ts create mode 100644 node_modules/fs-minipass/node_modules/minipass/index.js create mode 100644 node_modules/fs-minipass/node_modules/minipass/package.json create mode 100644 node_modules/fs-minipass/package.json create mode 100644 node_modules/fs.realpath/LICENSE create mode 100644 node_modules/fs.realpath/README.md create mode 100644 node_modules/fs.realpath/index.js create mode 100644 node_modules/fs.realpath/old.js create mode 100644 node_modules/fs.realpath/package.json create mode 100644 node_modules/gauge/CHANGELOG.md create mode 100644 node_modules/gauge/LICENSE create mode 100644 node_modules/gauge/README.md create mode 100644 node_modules/gauge/base-theme.js create mode 100644 node_modules/gauge/error.js create mode 100644 node_modules/gauge/has-color.js create mode 100644 node_modules/gauge/index.js create mode 100644 node_modules/gauge/package.json create mode 100644 node_modules/gauge/plumbing.js create mode 100644 node_modules/gauge/process.js create mode 100644 node_modules/gauge/progress-bar.js create mode 100644 node_modules/gauge/render-template.js create mode 100644 node_modules/gauge/set-immediate.js create mode 100644 node_modules/gauge/set-interval.js create mode 100644 node_modules/gauge/spin.js create mode 100644 node_modules/gauge/template-item.js create mode 100644 node_modules/gauge/theme-set.js create mode 100644 node_modules/gauge/themes.js create mode 100644 node_modules/gauge/wide-truncate.js create mode 100644 node_modules/generic-pool/README.md create mode 100644 node_modules/generic-pool/index.d.ts create mode 100644 node_modules/generic-pool/index.js create mode 100644 node_modules/generic-pool/lib/DefaultEvictor.js create mode 100644 node_modules/generic-pool/lib/Deferred.js create mode 100644 node_modules/generic-pool/lib/Deque.js create mode 100644 node_modules/generic-pool/lib/DequeIterator.js create mode 100644 node_modules/generic-pool/lib/DoublyLinkedList.js create mode 100644 node_modules/generic-pool/lib/DoublyLinkedListIterator.js create mode 100644 node_modules/generic-pool/lib/Pool.js create mode 100644 node_modules/generic-pool/lib/PoolDefaults.js create mode 100644 node_modules/generic-pool/lib/PoolOptions.js create mode 100644 node_modules/generic-pool/lib/PooledResource.js create mode 100644 node_modules/generic-pool/lib/PooledResourceStateEnum.js create mode 100644 node_modules/generic-pool/lib/PriorityQueue.js create mode 100644 node_modules/generic-pool/lib/Queue.js create mode 100644 node_modules/generic-pool/lib/ResourceLoan.js create mode 100644 node_modules/generic-pool/lib/ResourceRequest.js create mode 100644 node_modules/generic-pool/lib/errors.js create mode 100644 node_modules/generic-pool/lib/factoryValidator.js create mode 100644 node_modules/generic-pool/lib/utils.js create mode 100644 node_modules/generic-pool/package.json create mode 100644 node_modules/glob/LICENSE create mode 100644 node_modules/glob/README.md create mode 100644 node_modules/glob/common.js create mode 100644 node_modules/glob/glob.js create mode 100644 node_modules/glob/package.json create mode 100644 node_modules/glob/sync.js create mode 100644 node_modules/has-unicode/LICENSE create mode 100644 node_modules/has-unicode/README.md create mode 100644 node_modules/has-unicode/index.js create mode 100644 node_modules/has-unicode/package.json create mode 100644 node_modules/https-proxy-agent/README.md create mode 100644 node_modules/https-proxy-agent/dist/agent.d.ts create mode 100644 node_modules/https-proxy-agent/dist/agent.js create mode 100644 node_modules/https-proxy-agent/dist/agent.js.map create mode 100644 node_modules/https-proxy-agent/dist/index.d.ts create mode 100644 node_modules/https-proxy-agent/dist/index.js create mode 100644 node_modules/https-proxy-agent/dist/index.js.map create mode 100644 node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts create mode 100644 node_modules/https-proxy-agent/dist/parse-proxy-response.js create mode 100644 node_modules/https-proxy-agent/dist/parse-proxy-response.js.map create mode 100644 node_modules/https-proxy-agent/node_modules/debug/LICENSE create mode 100644 node_modules/https-proxy-agent/node_modules/debug/README.md create mode 100644 node_modules/https-proxy-agent/node_modules/debug/package.json create mode 100644 node_modules/https-proxy-agent/node_modules/debug/src/browser.js create mode 100644 node_modules/https-proxy-agent/node_modules/debug/src/common.js create mode 100644 node_modules/https-proxy-agent/node_modules/debug/src/index.js create mode 100644 node_modules/https-proxy-agent/node_modules/debug/src/node.js create mode 100644 node_modules/https-proxy-agent/node_modules/ms/index.js create mode 100644 node_modules/https-proxy-agent/node_modules/ms/license.md create mode 100644 node_modules/https-proxy-agent/node_modules/ms/package.json create mode 100644 node_modules/https-proxy-agent/node_modules/ms/readme.md create mode 100644 node_modules/https-proxy-agent/package.json create mode 100644 node_modules/inflight/LICENSE create mode 100644 node_modules/inflight/README.md create mode 100644 node_modules/inflight/inflight.js create mode 100644 node_modules/inflight/package.json create mode 100644 node_modules/is-fullwidth-code-point/index.d.ts create mode 100644 node_modules/is-fullwidth-code-point/index.js create mode 100644 node_modules/is-fullwidth-code-point/license create mode 100644 node_modules/is-fullwidth-code-point/package.json create mode 100644 node_modules/is-fullwidth-code-point/readme.md create mode 100644 node_modules/make-dir/index.d.ts create mode 100644 node_modules/make-dir/index.js create mode 100644 node_modules/make-dir/license create mode 100644 node_modules/make-dir/node_modules/.bin/semver create mode 100644 node_modules/make-dir/node_modules/.bin/semver.cmd create mode 100644 node_modules/make-dir/node_modules/.bin/semver.ps1 create mode 100644 node_modules/make-dir/node_modules/semver/LICENSE create mode 100644 node_modules/make-dir/node_modules/semver/README.md create mode 100644 node_modules/make-dir/node_modules/semver/bin/semver.js create mode 100644 node_modules/make-dir/node_modules/semver/package.json create mode 100644 node_modules/make-dir/node_modules/semver/range.bnf create mode 100644 node_modules/make-dir/node_modules/semver/semver.js create mode 100644 node_modules/make-dir/package.json create mode 100644 node_modules/make-dir/readme.md create mode 100644 node_modules/minimatch/LICENSE create mode 100644 node_modules/minimatch/README.md create mode 100644 node_modules/minimatch/minimatch.js create mode 100644 node_modules/minimatch/package.json create mode 100644 node_modules/minipass/LICENSE create mode 100644 node_modules/minipass/README.md create mode 100644 node_modules/minipass/index.d.ts create mode 100644 node_modules/minipass/index.js create mode 100644 node_modules/minipass/index.mjs create mode 100644 node_modules/minipass/package.json create mode 100644 node_modules/minizlib/LICENSE create mode 100644 node_modules/minizlib/README.md create mode 100644 node_modules/minizlib/constants.js create mode 100644 node_modules/minizlib/index.js create mode 100644 node_modules/minizlib/node_modules/minipass/LICENSE create mode 100644 node_modules/minizlib/node_modules/minipass/README.md create mode 100644 node_modules/minizlib/node_modules/minipass/index.d.ts create mode 100644 node_modules/minizlib/node_modules/minipass/index.js create mode 100644 node_modules/minizlib/node_modules/minipass/package.json create mode 100644 node_modules/minizlib/package.json create mode 100644 node_modules/mkdirp/CHANGELOG.md create mode 100644 node_modules/mkdirp/LICENSE create mode 100644 node_modules/mkdirp/bin/cmd.js create mode 100644 node_modules/mkdirp/index.js create mode 100644 node_modules/mkdirp/lib/find-made.js create mode 100644 node_modules/mkdirp/lib/mkdirp-manual.js create mode 100644 node_modules/mkdirp/lib/mkdirp-native.js create mode 100644 node_modules/mkdirp/lib/opts-arg.js create mode 100644 node_modules/mkdirp/lib/path-arg.js create mode 100644 node_modules/mkdirp/lib/use-native.js create mode 100644 node_modules/mkdirp/package.json create mode 100644 node_modules/mkdirp/readme.markdown create mode 100644 node_modules/node-addon-api/LICENSE.md create mode 100644 node_modules/node-addon-api/README.md create mode 100644 node_modules/node-addon-api/common.gypi create mode 100644 node_modules/node-addon-api/except.gypi create mode 100644 node_modules/node-addon-api/index.js create mode 100644 node_modules/node-addon-api/napi-inl.deprecated.h create mode 100644 node_modules/node-addon-api/napi-inl.h create mode 100644 node_modules/node-addon-api/napi.h create mode 100644 node_modules/node-addon-api/node_api.gyp create mode 100644 node_modules/node-addon-api/noexcept.gypi create mode 100644 node_modules/node-addon-api/nothing.c create mode 100644 node_modules/node-addon-api/package-support.json create mode 100644 node_modules/node-addon-api/package.json create mode 100644 node_modules/node-addon-api/tools/README.md create mode 100644 node_modules/node-addon-api/tools/check-napi.js create mode 100644 node_modules/node-addon-api/tools/clang-format.js create mode 100644 node_modules/node-addon-api/tools/conversion.js create mode 100644 node_modules/node-addon-api/tools/eslint-format.js create mode 100644 node_modules/node-fetch/LICENSE.md create mode 100644 node_modules/node-fetch/README.md create mode 100644 node_modules/node-fetch/browser.js create mode 100644 node_modules/node-fetch/lib/index.es.js create mode 100644 node_modules/node-fetch/lib/index.js create mode 100644 node_modules/node-fetch/lib/index.mjs create mode 100644 node_modules/node-fetch/package.json create mode 100644 node_modules/nopt/CHANGELOG.md create mode 100644 node_modules/nopt/LICENSE create mode 100644 node_modules/nopt/README.md create mode 100644 node_modules/nopt/bin/nopt.js create mode 100644 node_modules/nopt/lib/nopt.js create mode 100644 node_modules/nopt/package.json create mode 100644 node_modules/npmlog/LICENSE create mode 100644 node_modules/npmlog/README.md create mode 100644 node_modules/npmlog/log.js create mode 100644 node_modules/npmlog/package.json create mode 100644 node_modules/on-headers/HISTORY.md create mode 100644 node_modules/on-headers/LICENSE create mode 100644 node_modules/on-headers/README.md create mode 100644 node_modules/on-headers/index.js create mode 100644 node_modules/on-headers/package.json create mode 100644 node_modules/once/LICENSE create mode 100644 node_modules/once/README.md create mode 100644 node_modules/once/once.js create mode 100644 node_modules/once/package.json create mode 100644 node_modules/path-is-absolute/index.js create mode 100644 node_modules/path-is-absolute/license create mode 100644 node_modules/path-is-absolute/package.json create mode 100644 node_modules/path-is-absolute/readme.md create mode 100644 node_modules/random-bytes/HISTORY.md create mode 100644 node_modules/random-bytes/LICENSE create mode 100644 node_modules/random-bytes/README.md create mode 100644 node_modules/random-bytes/index.js create mode 100644 node_modules/random-bytes/package.json create mode 100644 node_modules/readable-stream/CONTRIBUTING.md create mode 100644 node_modules/readable-stream/GOVERNANCE.md create mode 100644 node_modules/readable-stream/LICENSE create mode 100644 node_modules/readable-stream/README.md create mode 100644 node_modules/readable-stream/errors-browser.js create mode 100644 node_modules/readable-stream/errors.js create mode 100644 node_modules/readable-stream/experimentalWarning.js create mode 100644 node_modules/readable-stream/lib/_stream_duplex.js create mode 100644 node_modules/readable-stream/lib/_stream_passthrough.js create mode 100644 node_modules/readable-stream/lib/_stream_readable.js create mode 100644 node_modules/readable-stream/lib/_stream_transform.js create mode 100644 node_modules/readable-stream/lib/_stream_writable.js create mode 100644 node_modules/readable-stream/lib/internal/streams/async_iterator.js create mode 100644 node_modules/readable-stream/lib/internal/streams/buffer_list.js create mode 100644 node_modules/readable-stream/lib/internal/streams/destroy.js create mode 100644 node_modules/readable-stream/lib/internal/streams/end-of-stream.js create mode 100644 node_modules/readable-stream/lib/internal/streams/from-browser.js create mode 100644 node_modules/readable-stream/lib/internal/streams/from.js create mode 100644 node_modules/readable-stream/lib/internal/streams/pipeline.js create mode 100644 node_modules/readable-stream/lib/internal/streams/state.js create mode 100644 node_modules/readable-stream/lib/internal/streams/stream-browser.js create mode 100644 node_modules/readable-stream/lib/internal/streams/stream.js create mode 100644 node_modules/readable-stream/package.json create mode 100644 node_modules/readable-stream/readable-browser.js create mode 100644 node_modules/readable-stream/readable.js create mode 100644 node_modules/redis/LICENSE create mode 100644 node_modules/redis/README.md create mode 100644 node_modules/redis/dist/index.d.ts create mode 100644 node_modules/redis/dist/index.js create mode 100644 node_modules/redis/package.json create mode 100644 node_modules/rimraf/CHANGELOG.md create mode 100644 node_modules/rimraf/LICENSE create mode 100644 node_modules/rimraf/README.md create mode 100644 node_modules/rimraf/bin.js create mode 100644 node_modules/rimraf/package.json create mode 100644 node_modules/rimraf/rimraf.js create mode 100644 node_modules/semver/LICENSE create mode 100644 node_modules/semver/README.md create mode 100644 node_modules/semver/bin/semver.js create mode 100644 node_modules/semver/classes/comparator.js create mode 100644 node_modules/semver/classes/index.js create mode 100644 node_modules/semver/classes/range.js create mode 100644 node_modules/semver/classes/semver.js create mode 100644 node_modules/semver/functions/clean.js create mode 100644 node_modules/semver/functions/cmp.js create mode 100644 node_modules/semver/functions/coerce.js create mode 100644 node_modules/semver/functions/compare-build.js create mode 100644 node_modules/semver/functions/compare-loose.js create mode 100644 node_modules/semver/functions/compare.js create mode 100644 node_modules/semver/functions/diff.js create mode 100644 node_modules/semver/functions/eq.js create mode 100644 node_modules/semver/functions/gt.js create mode 100644 node_modules/semver/functions/gte.js create mode 100644 node_modules/semver/functions/inc.js create mode 100644 node_modules/semver/functions/lt.js create mode 100644 node_modules/semver/functions/lte.js create mode 100644 node_modules/semver/functions/major.js create mode 100644 node_modules/semver/functions/minor.js create mode 100644 node_modules/semver/functions/neq.js create mode 100644 node_modules/semver/functions/parse.js create mode 100644 node_modules/semver/functions/patch.js create mode 100644 node_modules/semver/functions/prerelease.js create mode 100644 node_modules/semver/functions/rcompare.js create mode 100644 node_modules/semver/functions/rsort.js create mode 100644 node_modules/semver/functions/satisfies.js create mode 100644 node_modules/semver/functions/sort.js create mode 100644 node_modules/semver/functions/valid.js create mode 100644 node_modules/semver/index.js create mode 100644 node_modules/semver/internal/constants.js create mode 100644 node_modules/semver/internal/debug.js create mode 100644 node_modules/semver/internal/identifiers.js create mode 100644 node_modules/semver/internal/lrucache.js create mode 100644 node_modules/semver/internal/parse-options.js create mode 100644 node_modules/semver/internal/re.js create mode 100644 node_modules/semver/package.json create mode 100644 node_modules/semver/preload.js create mode 100644 node_modules/semver/range.bnf create mode 100644 node_modules/semver/ranges/gtr.js create mode 100644 node_modules/semver/ranges/intersects.js create mode 100644 node_modules/semver/ranges/ltr.js create mode 100644 node_modules/semver/ranges/max-satisfying.js create mode 100644 node_modules/semver/ranges/min-satisfying.js create mode 100644 node_modules/semver/ranges/min-version.js create mode 100644 node_modules/semver/ranges/outside.js create mode 100644 node_modules/semver/ranges/simplify.js create mode 100644 node_modules/semver/ranges/subset.js create mode 100644 node_modules/semver/ranges/to-comparators.js create mode 100644 node_modules/semver/ranges/valid.js create mode 100644 node_modules/set-blocking/CHANGELOG.md create mode 100644 node_modules/set-blocking/LICENSE.txt create mode 100644 node_modules/set-blocking/README.md create mode 100644 node_modules/set-blocking/index.js create mode 100644 node_modules/set-blocking/package.json create mode 100644 node_modules/signal-exit/LICENSE.txt create mode 100644 node_modules/signal-exit/README.md create mode 100644 node_modules/signal-exit/index.js create mode 100644 node_modules/signal-exit/package.json create mode 100644 node_modules/signal-exit/signals.js create mode 100644 node_modules/string-width/index.d.ts create mode 100644 node_modules/string-width/index.js create mode 100644 node_modules/string-width/license create mode 100644 node_modules/string-width/package.json create mode 100644 node_modules/string-width/readme.md create mode 100644 node_modules/string_decoder/LICENSE create mode 100644 node_modules/string_decoder/README.md create mode 100644 node_modules/string_decoder/lib/string_decoder.js create mode 100644 node_modules/string_decoder/package.json create mode 100644 node_modules/strip-ansi/index.d.ts create mode 100644 node_modules/strip-ansi/index.js create mode 100644 node_modules/strip-ansi/license create mode 100644 node_modules/strip-ansi/package.json create mode 100644 node_modules/strip-ansi/readme.md create mode 100644 node_modules/tar/LICENSE create mode 100644 node_modules/tar/README.md create mode 100644 node_modules/tar/index.js create mode 100644 node_modules/tar/lib/create.js create mode 100644 node_modules/tar/lib/extract.js create mode 100644 node_modules/tar/lib/get-write-flag.js create mode 100644 node_modules/tar/lib/header.js create mode 100644 node_modules/tar/lib/high-level-opt.js create mode 100644 node_modules/tar/lib/large-numbers.js create mode 100644 node_modules/tar/lib/list.js create mode 100644 node_modules/tar/lib/mkdir.js create mode 100644 node_modules/tar/lib/mode-fix.js create mode 100644 node_modules/tar/lib/normalize-unicode.js create mode 100644 node_modules/tar/lib/normalize-windows-path.js create mode 100644 node_modules/tar/lib/pack.js create mode 100644 node_modules/tar/lib/parse.js create mode 100644 node_modules/tar/lib/path-reservations.js create mode 100644 node_modules/tar/lib/pax.js create mode 100644 node_modules/tar/lib/read-entry.js create mode 100644 node_modules/tar/lib/replace.js create mode 100644 node_modules/tar/lib/strip-absolute-path.js create mode 100644 node_modules/tar/lib/strip-trailing-slashes.js create mode 100644 node_modules/tar/lib/types.js create mode 100644 node_modules/tar/lib/unpack.js create mode 100644 node_modules/tar/lib/update.js create mode 100644 node_modules/tar/lib/warn-mixin.js create mode 100644 node_modules/tar/lib/winchars.js create mode 100644 node_modules/tar/lib/write-entry.js create mode 100644 node_modules/tar/package.json create mode 100644 node_modules/tr46/.npmignore create mode 100644 node_modules/tr46/index.js create mode 100644 node_modules/tr46/lib/.gitkeep create mode 100644 node_modules/tr46/lib/mappingTable.json create mode 100644 node_modules/tr46/package.json create mode 100644 node_modules/uid-safe/HISTORY.md create mode 100644 node_modules/uid-safe/LICENSE create mode 100644 node_modules/uid-safe/README.md create mode 100644 node_modules/uid-safe/index.js create mode 100644 node_modules/uid-safe/package.json create mode 100644 node_modules/util-deprecate/History.md create mode 100644 node_modules/util-deprecate/LICENSE create mode 100644 node_modules/util-deprecate/README.md create mode 100644 node_modules/util-deprecate/browser.js create mode 100644 node_modules/util-deprecate/node.js create mode 100644 node_modules/util-deprecate/package.json create mode 100644 node_modules/webidl-conversions/LICENSE.md create mode 100644 node_modules/webidl-conversions/README.md create mode 100644 node_modules/webidl-conversions/lib/index.js create mode 100644 node_modules/webidl-conversions/package.json create mode 100644 node_modules/whatwg-url/LICENSE.txt create mode 100644 node_modules/whatwg-url/README.md create mode 100644 node_modules/whatwg-url/lib/URL-impl.js create mode 100644 node_modules/whatwg-url/lib/URL.js create mode 100644 node_modules/whatwg-url/lib/public-api.js create mode 100644 node_modules/whatwg-url/lib/url-state-machine.js create mode 100644 node_modules/whatwg-url/lib/utils.js create mode 100644 node_modules/whatwg-url/package.json create mode 100644 node_modules/wide-align/LICENSE create mode 100644 node_modules/wide-align/README.md create mode 100644 node_modules/wide-align/align.js create mode 100644 node_modules/wide-align/package.json create mode 100644 node_modules/wrappy/LICENSE create mode 100644 node_modules/wrappy/README.md create mode 100644 node_modules/wrappy/package.json create mode 100644 node_modules/wrappy/wrappy.js create mode 100644 node_modules/yallist/LICENSE create mode 100644 node_modules/yallist/README.md create mode 100644 node_modules/yallist/iterator.js create mode 100644 node_modules/yallist/package.json create mode 100644 node_modules/yallist/yallist.js diff --git a/node_modules/.bin/color-support b/node_modules/.bin/color-support new file mode 100644 index 0000000..f77f9d5 --- /dev/null +++ b/node_modules/.bin/color-support @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../color-support/bin.js" "$@" +else + exec node "$basedir/../color-support/bin.js" "$@" +fi diff --git a/node_modules/.bin/color-support.cmd b/node_modules/.bin/color-support.cmd new file mode 100644 index 0000000..005f9a5 --- /dev/null +++ b/node_modules/.bin/color-support.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\color-support\bin.js" %* diff --git a/node_modules/.bin/color-support.ps1 b/node_modules/.bin/color-support.ps1 new file mode 100644 index 0000000..f5c9fe4 --- /dev/null +++ b/node_modules/.bin/color-support.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../color-support/bin.js" $args + } else { + & "$basedir/node$exe" "$basedir/../color-support/bin.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../color-support/bin.js" $args + } else { + & "node$exe" "$basedir/../color-support/bin.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/mkdirp b/node_modules/.bin/mkdirp new file mode 100644 index 0000000..1ab9c81 --- /dev/null +++ b/node_modules/.bin/mkdirp @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../mkdirp/bin/cmd.js" "$@" +else + exec node "$basedir/../mkdirp/bin/cmd.js" "$@" +fi diff --git a/node_modules/.bin/mkdirp.cmd b/node_modules/.bin/mkdirp.cmd new file mode 100644 index 0000000..a865dd9 --- /dev/null +++ b/node_modules/.bin/mkdirp.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\mkdirp\bin\cmd.js" %* diff --git a/node_modules/.bin/mkdirp.ps1 b/node_modules/.bin/mkdirp.ps1 new file mode 100644 index 0000000..911e854 --- /dev/null +++ b/node_modules/.bin/mkdirp.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args + } else { + & "$basedir/node$exe" "$basedir/../mkdirp/bin/cmd.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args + } else { + & "node$exe" "$basedir/../mkdirp/bin/cmd.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/node-pre-gyp b/node_modules/.bin/node-pre-gyp new file mode 100644 index 0000000..d1619e4 --- /dev/null +++ b/node_modules/.bin/node-pre-gyp @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../@mapbox/node-pre-gyp/bin/node-pre-gyp" "$@" +else + exec node "$basedir/../@mapbox/node-pre-gyp/bin/node-pre-gyp" "$@" +fi diff --git a/node_modules/.bin/node-pre-gyp.cmd b/node_modules/.bin/node-pre-gyp.cmd new file mode 100644 index 0000000..a2fc508 --- /dev/null +++ b/node_modules/.bin/node-pre-gyp.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\@mapbox\node-pre-gyp\bin\node-pre-gyp" %* diff --git a/node_modules/.bin/node-pre-gyp.ps1 b/node_modules/.bin/node-pre-gyp.ps1 new file mode 100644 index 0000000..ed297ff --- /dev/null +++ b/node_modules/.bin/node-pre-gyp.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../@mapbox/node-pre-gyp/bin/node-pre-gyp" $args + } else { + & "$basedir/node$exe" "$basedir/../@mapbox/node-pre-gyp/bin/node-pre-gyp" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../@mapbox/node-pre-gyp/bin/node-pre-gyp" $args + } else { + & "node$exe" "$basedir/../@mapbox/node-pre-gyp/bin/node-pre-gyp" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/nopt b/node_modules/.bin/nopt new file mode 100644 index 0000000..0808130 --- /dev/null +++ b/node_modules/.bin/nopt @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../nopt/bin/nopt.js" "$@" +else + exec node "$basedir/../nopt/bin/nopt.js" "$@" +fi diff --git a/node_modules/.bin/nopt.cmd b/node_modules/.bin/nopt.cmd new file mode 100644 index 0000000..a7f38b3 --- /dev/null +++ b/node_modules/.bin/nopt.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\nopt\bin\nopt.js" %* diff --git a/node_modules/.bin/nopt.ps1 b/node_modules/.bin/nopt.ps1 new file mode 100644 index 0000000..9d6ba56 --- /dev/null +++ b/node_modules/.bin/nopt.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../nopt/bin/nopt.js" $args + } else { + & "$basedir/node$exe" "$basedir/../nopt/bin/nopt.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../nopt/bin/nopt.js" $args + } else { + & "node$exe" "$basedir/../nopt/bin/nopt.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/rimraf b/node_modules/.bin/rimraf new file mode 100644 index 0000000..6d6240a --- /dev/null +++ b/node_modules/.bin/rimraf @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../rimraf/bin.js" "$@" +else + exec node "$basedir/../rimraf/bin.js" "$@" +fi diff --git a/node_modules/.bin/rimraf.cmd b/node_modules/.bin/rimraf.cmd new file mode 100644 index 0000000..13f45ec --- /dev/null +++ b/node_modules/.bin/rimraf.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\rimraf\bin.js" %* diff --git a/node_modules/.bin/rimraf.ps1 b/node_modules/.bin/rimraf.ps1 new file mode 100644 index 0000000..1716791 --- /dev/null +++ b/node_modules/.bin/rimraf.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../rimraf/bin.js" $args + } else { + & "$basedir/node$exe" "$basedir/../rimraf/bin.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../rimraf/bin.js" $args + } else { + & "node$exe" "$basedir/../rimraf/bin.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver new file mode 100644 index 0000000..97c5327 --- /dev/null +++ b/node_modules/.bin/semver @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@" +else + exec node "$basedir/../semver/bin/semver.js" "$@" +fi diff --git a/node_modules/.bin/semver.cmd b/node_modules/.bin/semver.cmd new file mode 100644 index 0000000..9913fa9 --- /dev/null +++ b/node_modules/.bin/semver.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver.js" %* diff --git a/node_modules/.bin/semver.ps1 b/node_modules/.bin/semver.ps1 new file mode 100644 index 0000000..314717a --- /dev/null +++ b/node_modules/.bin/semver.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args + } else { + & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../semver/bin/semver.js" $args + } else { + & "node$exe" "$basedir/../semver/bin/semver.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/.package-lock.json b/node_modules/.package-lock.json index 5331cb6..d8b34c7 100644 --- a/node_modules/.package-lock.json +++ b/node_modules/.package-lock.json @@ -4,6 +4,85 @@ "lockfileVersion": 3, "requires": true, "packages": { + "node_modules/@mapbox/node-pre-gyp": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz", + "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==", + "license": "BSD-3-Clause", + "dependencies": { + "detect-libc": "^2.0.0", + "https-proxy-agent": "^5.0.0", + "make-dir": "^3.1.0", + "node-fetch": "^2.6.7", + "nopt": "^5.0.0", + "npmlog": "^5.0.1", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "tar": "^6.1.11" + }, + "bin": { + "node-pre-gyp": "bin/node-pre-gyp" + } + }, + "node_modules/@redis/bloom": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@redis/bloom/-/bloom-1.2.0.tgz", + "integrity": "sha512-HG2DFjYKbpNmVXsa0keLHp/3leGJz1mjh09f2RLGGLQZzSHpkmZWuwJbAvo3QcRY8p80m5+ZdXZdYOSBLlp7Cg==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/client": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@redis/client/-/client-1.6.1.tgz", + "integrity": "sha512-/KCsg3xSlR+nCK8/8ZYSknYxvXHwubJrU82F3Lm1Fp6789VQ0/3RJKfsmRXjqfaTA++23CvC3hqmqe/2GEt6Kw==", + "license": "MIT", + "dependencies": { + "cluster-key-slot": "1.1.2", + "generic-pool": "3.9.0", + "yallist": "4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@redis/graph": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@redis/graph/-/graph-1.1.1.tgz", + "integrity": "sha512-FEMTcTHZozZciLRl6GiiIB4zGm5z5F3F6a6FZCyrfxdKOhFlGkiAqlexWMBzCi4DcRoyiOsuLfW+cjlGWyExOw==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/json": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@redis/json/-/json-1.0.7.tgz", + "integrity": "sha512-6UyXfjVaTBTJtKNG4/9Z8PSpKE6XgSyEb8iwaqDcy+uKrd/DGYHTWkUdnQDyzm727V7p21WUMhsqz5oy65kPcQ==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/search": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@redis/search/-/search-1.2.0.tgz", + "integrity": "sha512-tYoDBbtqOVigEDMAcTGsRlMycIIjwMCgD8eR2t0NANeQmgK/lvxNAvYyb6bZDD4frHRhIHkJu2TBRvB0ERkOmw==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/time-series": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@redis/time-series/-/time-series-1.1.0.tgz", + "integrity": "sha512-c1Q99M5ljsIuc4YdaCwfUEXsofakb9c8+Zse2qxTadu8TalLXuAESzLvFAvNVbkmSlvlzIQOLpBCmWI9wTOt+g==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, "node_modules/@socket.io/component-emitter": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/@socket.io/component-emitter/-/component-emitter-3.1.2.tgz", @@ -28,6 +107,12 @@ "undici-types": "~7.16.0" } }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "license": "ISC" + }, "node_modules/accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", @@ -41,12 +126,82 @@ "node": ">= 0.6" } }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "license": "MIT", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/agent-base/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/agent-base/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/aproba": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.1.0.tgz", + "integrity": "sha512-tLIEcj5GuR2RSTnxNKdkK0dJ/GrC7P38sUkiDmDuHfsHmbagTFAxDVIBltoklXEVIQ/f14IL8IMJ5pn9Hez1Ew==", + "license": "ISC" + }, + "node_modules/are-we-there-yet": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", + "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", "license": "MIT" }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, "node_modules/base64id": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/base64id/-/base64id-2.0.0.tgz", @@ -56,6 +211,20 @@ "node": "^4.5.0 || >= 5.9" } }, + "node_modules/bcrypt": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz", + "integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==", + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "@mapbox/node-pre-gyp": "^1.0.11", + "node-addon-api": "^5.0.0" + }, + "engines": { + "node": ">= 10.0.0" + } + }, "node_modules/body-parser": { "version": "1.20.4", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz", @@ -80,6 +249,16 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", @@ -118,6 +297,57 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/cluster-key-slot": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", + "integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==", + "license": "Apache-2.0", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "license": "ISC", + "bin": { + "color-support": "bin.js" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" + }, + "node_modules/connect-redis": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/connect-redis/-/connect-redis-7.1.1.tgz", + "integrity": "sha512-M+z7alnCJiuzKa8/1qAYdGUXHYfDnLolOGAUjOioB07pP39qxjG+X9ibsud7qUBc4jMV5Mcy3ugGv8eFcgamJQ==", + "license": "MIT", + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "express-session": ">=1" + } + }, + "node_modules/console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", + "license": "ISC" + }, "node_modules/content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", @@ -180,6 +410,12 @@ "ms": "2.0.0" } }, + "node_modules/delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", + "license": "MIT" + }, "node_modules/depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -199,6 +435,15 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -219,6 +464,12 @@ "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", "license": "MIT" }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "license": "MIT" + }, "node_modules/encodeurl": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", @@ -371,6 +622,29 @@ "url": "https://opencollective.com/express" } }, + "node_modules/express-session": { + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/express-session/-/express-session-1.19.0.tgz", + "integrity": "sha512-0csaMkGq+vaiZTmSMMGkfdCOabYv192VbytFypcvI0MANrp+4i/7yEkJ0sbAEhycQjntaKGzYfjfXQyVb7BHMA==", + "license": "MIT", + "dependencies": { + "cookie": "~0.7.2", + "cookie-signature": "~1.0.7", + "debug": "~2.6.9", + "depd": "~2.0.0", + "on-headers": "~1.1.0", + "parseurl": "~1.3.3", + "safe-buffer": "~5.2.1", + "uid-safe": "~2.1.5" + }, + "engines": { + "node": ">= 0.8.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, "node_modules/finalhandler": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.2.tgz", @@ -407,6 +681,36 @@ "node": ">= 0.6" } }, + "node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/fs-minipass/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "license": "ISC" + }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", @@ -416,6 +720,36 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/gauge": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", + "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.2", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.1", + "object-assign": "^4.1.1", + "signal-exit": "^3.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/generic-pool": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-3.9.0.tgz", + "integrity": "sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g==", + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, "node_modules/get-intrinsic": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", @@ -453,6 +787,27 @@ "node": ">= 0.4" } }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/gopd": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", @@ -477,6 +832,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", + "license": "ISC" + }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", @@ -509,6 +870,42 @@ "url": "https://opencollective.com/express" } }, + "node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "license": "MIT", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/https-proxy-agent/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/https-proxy-agent/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, "node_modules/iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", @@ -521,6 +918,17 @@ "node": ">=0.10.0" } }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -536,6 +944,39 @@ "node": ">= 0.10" } }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "license": "MIT", + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -605,6 +1046,64 @@ "node": ">= 0.6" } }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "license": "ISC", + "engines": { + "node": ">=8" + } + }, + "node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "license": "MIT", + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minizlib/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "license": "MIT", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -620,6 +1119,60 @@ "node": ">= 0.6" } }, + "node_modules/node-addon-api": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz", + "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==", + "license": "MIT" + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/nopt": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", + "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", + "license": "ISC", + "dependencies": { + "abbrev": "1" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/npmlog": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", + "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "dependencies": { + "are-we-there-yet": "^2.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^3.0.0", + "set-blocking": "^2.0.0" + } + }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", @@ -653,6 +1206,24 @@ "node": ">= 0.8" } }, + "node_modules/on-headers": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz", + "integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", @@ -662,6 +1233,15 @@ "node": ">= 0.8" } }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/path-to-regexp": { "version": "0.1.12", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", @@ -696,6 +1276,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/random-bytes": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", + "integrity": "sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", @@ -720,6 +1309,53 @@ "node": ">= 0.8" } }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/redis": { + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/redis/-/redis-4.7.1.tgz", + "integrity": "sha512-S1bJDnqLftzHXHP8JsT5II/CtHWQrASX5K96REjWjlmWKrviSOLWmM7QnRLstAWsu1VBBV1ffV6DzCvxNP0UJQ==", + "license": "MIT", + "workspaces": [ + "./packages/*" + ], + "dependencies": { + "@redis/bloom": "1.2.0", + "@redis/client": "1.6.1", + "@redis/graph": "1.1.1", + "@redis/json": "1.0.7", + "@redis/search": "1.2.0", + "@redis/time-series": "1.1.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -746,6 +1382,18 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "license": "MIT" }, + "node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/send": { "version": "0.19.2", "resolved": "https://registry.npmjs.org/send/-/send-0.19.2.tgz", @@ -791,6 +1439,12 @@ "node": ">= 0.8.0" } }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "license": "ISC" + }, "node_modules/setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", @@ -869,6 +1523,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "license": "ISC" + }, "node_modules/socket.io": { "version": "4.8.3", "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.3.tgz", @@ -988,6 +1648,59 @@ "node": ">= 0.8" } }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "deprecated": "Old versions of tar are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exhorbitant rates) by contacting i@izs.me", + "license": "ISC", + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/toidentifier": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", @@ -997,6 +1710,12 @@ "node": ">=0.6" } }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT" + }, "node_modules/type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -1010,6 +1729,18 @@ "node": ">= 0.6" } }, + "node_modules/uid-safe": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz", + "integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==", + "license": "MIT", + "dependencies": { + "random-bytes": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/undici-types": { "version": "7.16.0", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", @@ -1025,6 +1756,12 @@ "node": ">= 0.8" } }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" + }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", @@ -1043,6 +1780,37 @@ "node": ">= 0.8" } }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "license": "ISC", + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, "node_modules/ws": { "version": "8.18.3", "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", @@ -1063,6 +1831,12 @@ "optional": true } } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "license": "ISC" } } } diff --git a/node_modules/@mapbox/node-pre-gyp/.github/workflows/codeql.yml b/node_modules/@mapbox/node-pre-gyp/.github/workflows/codeql.yml new file mode 100644 index 0000000..70eaa56 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/.github/workflows/codeql.yml @@ -0,0 +1,74 @@ +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +# +# ******** NOTE ******** +# We have attempted to detect the languages in your repository. Please check +# the `language` matrix defined below to confirm you have the correct set of +# supported CodeQL languages. +# +name: "CodeQL" + +on: + push: + branches: [ "master" ] + pull_request: + # The branches below must be a subset of the branches above + branches: [ "master" ] + schedule: + - cron: '24 5 * * 4' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'javascript' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + + # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun + + # If the Autobuild fails above, remove it and uncomment the following three lines. + # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. + + # - run: | + # echo "Run, Build Application using script" + # ./location_of_script_within_repo/buildscript.sh + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 + with: + category: "/language:${{matrix.language}}" diff --git a/node_modules/@mapbox/node-pre-gyp/CHANGELOG.md b/node_modules/@mapbox/node-pre-gyp/CHANGELOG.md new file mode 100644 index 0000000..990e929 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/CHANGELOG.md @@ -0,0 +1,510 @@ +# node-pre-gyp changelog + +## 1.0.11 +- Fixes dependabot alert [CVE-2021-44906](https://nvd.nist.gov/vuln/detail/CVE-2021-44906) + +## 1.0.10 +- Upgraded minimist to 1.2.6 to address dependabot alert [CVE-2021-44906](https://nvd.nist.gov/vuln/detail/CVE-2021-44906) + +## 1.0.9 +- Upgraded node-fetch to 2.6.7 to address [CVE-2022-0235](https://www.cve.org/CVERecord?id=CVE-2022-0235) +- Upgraded detect-libc to 2.0.0 to use non-blocking NodeJS(>=12) Report API + +## 1.0.8 +- Downgraded npmlog to maintain node v10 and v8 support (https://github.com/mapbox/node-pre-gyp/pull/624) + +## 1.0.7 +- Upgraded nyc and npmlog to address https://github.com/advisories/GHSA-93q8-gq69-wqmw + +## 1.0.6 +- Added node v17 to the internal node releases listing +- Upgraded various dependencies declared in package.json to latest major versions (node-fetch from 2.6.1 to 2.6.5, npmlog from 4.1.2 to 5.01, semver from 7.3.4 to 7.3.5, and tar from 6.1.0 to 6.1.11) +- Fixed bug in `staging_host` parameter (https://github.com/mapbox/node-pre-gyp/pull/590) + + +## 1.0.5 +- Fix circular reference warning with node >= v14 + +## 1.0.4 +- Added node v16 to the internal node releases listing + +## 1.0.3 +- Improved support configuring s3 uploads (solves https://github.com/mapbox/node-pre-gyp/issues/571) + - New options added in https://github.com/mapbox/node-pre-gyp/pull/576: 'bucket', 'region', and `s3ForcePathStyle` + +## 1.0.2 +- Fixed regression in proxy support (https://github.com/mapbox/node-pre-gyp/issues/572) + +## 1.0.1 +- Switched from mkdirp@1.0.4 to make-dir@3.1.0 to avoid this bug: https://github.com/isaacs/node-mkdirp/issues/31 + +## 1.0.0 +- Module is now name-spaced at `@mapbox/node-pre-gyp` and the original `node-pre-gyp` is deprecated. +- New: support for staging and production s3 targets (see README.md) +- BREAKING: no longer supporting `node_pre_gyp_accessKeyId` & `node_pre_gyp_secretAccessKey`, use `AWS_ACCESS_KEY_ID` & `AWS_SECRET_ACCESS_KEY` instead to authenticate against s3 for `info`, `publish`, and `unpublish` commands. +- Dropped node v6 support, added node v14 support +- Switched tests to use mapbox-owned bucket for testing +- Added coverage tracking and linting with eslint +- Added back support for symlinks inside the tarball +- Upgraded all test apps to N-API/node-addon-api +- New: support for staging and production s3 targets (see README.md) +- Added `node_pre_gyp_s3_host` env var which has priority over the `--s3_host` option or default. +- Replaced needle with node-fetch +- Added proxy support for node-fetch +- Upgraded to mkdirp@1.x + +## 0.17.0 +- Got travis + appveyor green again +- Added support for more node versions + +## 0.16.0 + +- Added Node 15 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/520) + +## 0.15.0 + +- Bump dependency on `mkdirp` from `^0.5.1` to `^0.5.3` (https://github.com/mapbox/node-pre-gyp/pull/492) +- Bump dependency on `needle` from `^2.2.1` to `^2.5.0` (https://github.com/mapbox/node-pre-gyp/pull/502) +- Added Node 14 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/501) + +## 0.14.0 + +- Defer modules requires in napi.js (https://github.com/mapbox/node-pre-gyp/pull/434) +- Bump dependency on `tar` from `^4` to `^4.4.2` (https://github.com/mapbox/node-pre-gyp/pull/454) +- Support extracting compiled binary from local offline mirror (https://github.com/mapbox/node-pre-gyp/pull/459) +- Added Node 13 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/483) + +## 0.13.0 + +- Added Node 12 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/449) + +## 0.12.0 + +- Fixed double-build problem with node v10 (https://github.com/mapbox/node-pre-gyp/pull/428) +- Added node 11 support in the local database (https://github.com/mapbox/node-pre-gyp/pull/422) + +## 0.11.0 + +- Fixed double-install problem with node v10 +- Significant N-API improvements (https://github.com/mapbox/node-pre-gyp/pull/405) + +## 0.10.3 + +- Now will use `request` over `needle` if request is installed. By default `needle` is used for `https`. This should unbreak proxy support that regressed in v0.9.0 + +## 0.10.2 + +- Fixed rc/deep-extent security vulnerability +- Fixed broken reinstall script do to incorrectly named get_best_napi_version + +## 0.10.1 + +- Fix needle error event (@medns) + +## 0.10.0 + +- Allow for a single-level module path when packing @allenluce (https://github.com/mapbox/node-pre-gyp/pull/371) +- Log warnings instead of errors when falling back @xzyfer (https://github.com/mapbox/node-pre-gyp/pull/366) +- Add Node.js v10 support to tests (https://github.com/mapbox/node-pre-gyp/pull/372) +- Remove retire.js from CI (https://github.com/mapbox/node-pre-gyp/pull/372) +- Remove support for Node.js v4 due to [EOL on April 30th, 2018](https://github.com/nodejs/Release/blob/7dd52354049cae99eed0e9fe01345b0722a86fde/schedule.json#L14) +- Update appveyor tests to install default NPM version instead of NPM v2.x for all Windows builds (https://github.com/mapbox/node-pre-gyp/pull/375) + +## 0.9.1 + +- Fixed regression (in v0.9.0) with support for http redirects @allenluce (https://github.com/mapbox/node-pre-gyp/pull/361) + +## 0.9.0 + +- Switched from using `request` to `needle` to reduce size of module deps (https://github.com/mapbox/node-pre-gyp/pull/350) + +## 0.8.0 + +- N-API support (@inspiredware) + +## 0.7.1 + +- Upgraded to tar v4.x + +## 0.7.0 + + - Updated request and hawk (#347) + - Dropped node v0.10.x support + +## 0.6.40 + + - Improved error reporting if an install fails + +## 0.6.39 + + - Support for node v9 + - Support for versioning on `{libc}` to allow binaries to work on non-glic linux systems like alpine linux + + +## 0.6.38 + + - Maintaining compatibility (for v0.6.x series) with node v0.10.x + +## 0.6.37 + + - Solved one part of #276: now now deduce the node ABI from the major version for node >= 2 even when not stored in the abi_crosswalk.json + - Fixed docs to avoid mentioning the deprecated and dangerous `prepublish` in package.json (#291) + - Add new node versions to crosswalk + - Ported tests to use tape instead of mocha + - Got appveyor tests passing by downgrading npm and node-gyp + +## 0.6.36 + + - Removed the running of `testbinary` during install. Because this was regressed for so long, it is too dangerous to re-enable by default. Developers needing validation can call `node-pre-gyp testbinary` directory. + - Fixed regression in v0.6.35 for electron installs (now skipping binary validation which is not yet supported for electron) + +## 0.6.35 + + - No longer recommending `npm ls` in `prepublish` (#291) + - Fixed testbinary command (#283) @szdavid92 + +## 0.6.34 + + - Added new node versions to crosswalk, including v8 + - Upgraded deps to latest versions, started using `^` instead of `~` for all deps. + +## 0.6.33 + + - Improved support for yarn + +## 0.6.32 + + - Honor npm configuration for CA bundles (@heikkipora) + - Add node-pre-gyp and npm versions to user agent (@addaleax) + - Updated various deps + - Add known node version for v7.x + +## 0.6.31 + + - Updated various deps + +## 0.6.30 + + - Update to npmlog@4.x and semver@5.3.x + - Add known node version for v6.5.0 + +## 0.6.29 + + - Add known node versions for v0.10.45, v0.12.14, v4.4.4, v5.11.1, and v6.1.0 + +## 0.6.28 + + - Now more verbose when remote binaries are not available. This is needed since npm is increasingly more quiet by default + and users need to know why builds are falling back to source compiles that might then error out. + +## 0.6.27 + + - Add known node version for node v6 + - Stopped bundling dependencies + - Documented method for module authors to avoid bundling node-pre-gyp + - See https://github.com/mapbox/node-pre-gyp/tree/master#configuring for details + +## 0.6.26 + + - Skip validation for nw runtime (https://github.com/mapbox/node-pre-gyp/pull/181) via @fleg + +## 0.6.25 + + - Improved support for auto-detection of electron runtime in `node-pre-gyp.find()` + - Pull request from @enlight - https://github.com/mapbox/node-pre-gyp/pull/187 + - Add known node version for 4.4.1 and 5.9.1 + +## 0.6.24 + + - Add known node version for 5.8.0, 5.9.0, and 4.4.0. + +## 0.6.23 + + - Add known node version for 0.10.43, 0.12.11, 4.3.2, and 5.7.1. + +## 0.6.22 + + - Add known node version for 4.3.1, and 5.7.0. + +## 0.6.21 + + - Add known node version for 0.10.42, 0.12.10, 4.3.0, and 5.6.0. + +## 0.6.20 + + - Add known node version for 4.2.5, 4.2.6, 5.4.0, 5.4.1,and 5.5.0. + +## 0.6.19 + + - Add known node version for 4.2.4 + +## 0.6.18 + + - Add new known node versions for 0.10.x, 0.12.x, 4.x, and 5.x + +## 0.6.17 + + - Re-tagged to fix packaging problem of `Error: Cannot find module 'isarray'` + +## 0.6.16 + + - Added known version in crosswalk for 5.1.0. + +## 0.6.15 + + - Upgraded tar-pack (https://github.com/mapbox/node-pre-gyp/issues/182) + - Support custom binary hosting mirror (https://github.com/mapbox/node-pre-gyp/pull/170) + - Added known version in crosswalk for 4.2.2. + +## 0.6.14 + + - Added node 5.x version + +## 0.6.13 + + - Added more known node 4.x versions + +## 0.6.12 + + - Added support for [Electron](http://electron.atom.io/). Just pass the `--runtime=electron` flag when building/installing. Thanks @zcbenz + +## 0.6.11 + + - Added known node and io.js versions including more 3.x and 4.x versions + +## 0.6.10 + + - Added known node and io.js versions including 3.x and 4.x versions + - Upgraded `tar` dep + +## 0.6.9 + + - Upgraded `rc` dep + - Updated known io.js version: v2.4.0 + +## 0.6.8 + + - Upgraded `semver` and `rimraf` deps + - Updated known node and io.js versions + +## 0.6.7 + + - Fixed `node_abi` versions for io.js 1.1.x -> 1.8.x (should be 43, but was stored as 42) (refs https://github.com/iojs/build/issues/94) + +## 0.6.6 + + - Updated with known io.js 2.0.0 version + +## 0.6.5 + + - Now respecting `npm_config_node_gyp` (https://github.com/npm/npm/pull/4887) + - Updated to semver@4.3.2 + - Updated known node v0.12.x versions and io.js 1.x versions. + +## 0.6.4 + + - Improved support for `io.js` (@fengmk2) + - Test coverage improvements (@mikemorris) + - Fixed support for `--dist-url` that regressed in 0.6.3 + +## 0.6.3 + + - Added support for passing raw options to node-gyp using `--` separator. Flags passed after + the `--` to `node-pre-gyp configure` will be passed directly to gyp while flags passed + after the `--` will be passed directly to make/visual studio. + - Added `node-pre-gyp configure` command to be able to call `node-gyp configure` directly + - Fix issue with require validation not working on windows 7 (@edgarsilva) + +## 0.6.2 + + - Support for io.js >= v1.0.2 + - Deferred require of `request` and `tar` to help speed up command line usage of `node-pre-gyp`. + +## 0.6.1 + + - Fixed bundled `tar` version + +## 0.6.0 + + - BREAKING: node odd releases like v0.11.x now use `major.minor.patch` for `{node_abi}` instead of `NODE_MODULE_VERSION` (#124) + - Added support for `toolset` option in versioning. By default is an empty string but `--toolset` can be passed to publish or install to select alternative binaries that target a custom toolset like C++11. For example to target Visual Studio 2014 modules like node-sqlite3 use `--toolset=v140`. + - Added support for `--no-rollback` option to request that a failed binary test does not remove the binary module leaves it in place. + - Added support for `--update-binary` option to request an existing binary be re-installed and the check for a valid local module be skipped. + - Added support for passing build options from `npm` through `node-pre-gyp` to `node-gyp`: `--nodedir`, `--disturl`, `--python`, and `--msvs_version` + +## 0.5.31 + + - Added support for deducing node_abi for node.js runtime from previous release if the series is even + - Added support for --target=0.10.33 + +## 0.5.30 + + - Repackaged with latest bundled deps + +## 0.5.29 + + - Added support for semver `build`. + - Fixed support for downloading from urls that include `+`. + +## 0.5.28 + + - Now reporting unix style paths only in reveal command + +## 0.5.27 + + - Fixed support for auto-detecting s3 bucket name when it contains `.` - @taavo + - Fixed support for installing when path contains a `'` - @halfdan + - Ported tests to mocha + +## 0.5.26 + + - Fix node-webkit support when `--target` option is not provided + +## 0.5.25 + + - Fix bundling of deps + +## 0.5.24 + + - Updated ABI crosswalk to incldue node v0.10.30 and v0.10.31 + +## 0.5.23 + + - Added `reveal` command. Pass no options to get all versioning data as json. Pass a second arg to grab a single versioned property value + - Added support for `--silent` (shortcut for `--loglevel=silent`) + +## 0.5.22 + + - Fixed node-webkit versioning name (NOTE: node-webkit support still experimental) + +## 0.5.21 + + - New package to fix `shasum check failed` error with v0.5.20 + +## 0.5.20 + + - Now versioning node-webkit binaries based on major.minor.patch - assuming no compatible ABI across versions (#90) + +## 0.5.19 + + - Updated to know about more node-webkit releases + +## 0.5.18 + + - Updated to know about more node-webkit releases + +## 0.5.17 + + - Updated to know about node v0.10.29 release + +## 0.5.16 + + - Now supporting all aws-sdk configuration parameters (http://docs.aws.amazon.com/AWSJavaScriptSDK/guide/node-configuring.html) (#86) + +## 0.5.15 + + - Fixed installation of windows packages sub directories on unix systems (#84) + +## 0.5.14 + + - Finished support for cross building using `--target_platform` option (#82) + - Now skipping binary validation on install if target arch/platform do not match the host. + - Removed multi-arch validing for OS X since it required a FAT node.js binary + +## 0.5.13 + + - Fix problem in 0.5.12 whereby the wrong versions of mkdirp and semver where bundled. + +## 0.5.12 + + - Improved support for node-webkit (@Mithgol) + +## 0.5.11 + + - Updated target versions listing + +## 0.5.10 + + - Fixed handling of `-debug` flag passed directory to node-pre-gyp (#72) + - Added optional second arg to `node_pre_gyp.find` to customize the default versioning options used to locate the runtime binary + - Failed install due to `testbinary` check failure no longer leaves behind binary (#70) + +## 0.5.9 + + - Fixed regression in `testbinary` command causing installs to fail on windows with 0.5.7 (#60) + +## 0.5.8 + + - Started bundling deps + +## 0.5.7 + + - Fixed the `testbinary` check, which is used to determine whether to re-download or source compile, to work even in complex dependency situations (#63) + - Exposed the internal `testbinary` command in node-pre-gyp command line tool + - Fixed minor bug so that `fallback_to_build` option is always respected + +## 0.5.6 + + - Added support for versioning on the `name` value in `package.json` (#57). + - Moved to using streams for reading tarball when publishing (#52) + +## 0.5.5 + + - Improved binary validation that also now works with node-webkit (@Mithgol) + - Upgraded test apps to work with node v0.11.x + - Improved test coverage + +## 0.5.4 + + - No longer depends on external install of node-gyp for compiling builds. + +## 0.5.3 + + - Reverted fix for debian/nodejs since it broke windows (#45) + +## 0.5.2 + + - Support for debian systems where the node binary is named `nodejs` (#45) + - Added `bin/node-pre-gyp.cmd` to be able to run command on windows locally (npm creates an .npm automatically when globally installed) + - Updated abi-crosswalk with node v0.10.26 entry. + +## 0.5.1 + + - Various minor bug fixes, several improving windows support for publishing. + +## 0.5.0 + + - Changed property names in `binary` object: now required are `module_name`, `module_path`, and `host`. + - Now `module_path` supports versioning, which allows developers to opt-in to using a versioned install path (#18). + - Added `remote_path` which also supports versioning. + - Changed `remote_uri` to `host`. + +## 0.4.2 + + - Added support for `--target` flag to request cross-compile against a specific node/node-webkit version. + - Added preliminary support for node-webkit + - Fixed support for `--target_arch` option being respected in all cases. + +## 0.4.1 + + - Fixed exception when only stderr is available in binary test (@bendi / #31) + +## 0.4.0 + + - Enforce only `https:` based remote publishing access. + - Added `node-pre-gyp info` command to display listing of published binaries + - Added support for changing the directory node-pre-gyp should build in with the `-C/--directory` option. + - Added support for S3 prefixes. + +## 0.3.1 + + - Added `unpublish` command. + - Fixed module path construction in tests. + - Added ability to disable falling back to build behavior via `npm install --fallback-to-build=false` which overrides setting in a depedencies package.json `install` target. + +## 0.3.0 + + - Support for packaging all files in `module_path` directory - see `app4` for example + - Added `testpackage` command. + - Changed `clean` command to only delete `.node` not entire `build` directory since node-gyp will handle that. + - `.node` modules must be in a folder of there own since tar-pack will remove everything when it unpacks. diff --git a/node_modules/@mapbox/node-pre-gyp/LICENSE b/node_modules/@mapbox/node-pre-gyp/LICENSE new file mode 100644 index 0000000..8f5fce9 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/LICENSE @@ -0,0 +1,27 @@ +Copyright (c), Mapbox + +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + * Neither the name of node-pre-gyp nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR +CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, +EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, +PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR +PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/@mapbox/node-pre-gyp/README.md b/node_modules/@mapbox/node-pre-gyp/README.md new file mode 100644 index 0000000..203285a --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/README.md @@ -0,0 +1,742 @@ +# @mapbox/node-pre-gyp + +#### @mapbox/node-pre-gyp makes it easy to publish and install Node.js C++ addons from binaries + +[![Build Status](https://travis-ci.com/mapbox/node-pre-gyp.svg?branch=master)](https://travis-ci.com/mapbox/node-pre-gyp) +[![Build status](https://ci.appveyor.com/api/projects/status/3nxewb425y83c0gv)](https://ci.appveyor.com/project/Mapbox/node-pre-gyp) + +`@mapbox/node-pre-gyp` stands between [npm](https://github.com/npm/npm) and [node-gyp](https://github.com/Tootallnate/node-gyp) and offers a cross-platform method of binary deployment. + +### Special note on previous package + +On Feb 9th, 2021 `@mapbox/node-pre-gyp@1.0.0` was [released](./CHANGELOG.md). Older, unscoped versions that are not part of the `@mapbox` org are deprecated and only `@mapbox/node-pre-gyp` will see updates going forward. To upgrade to the new package do: + +``` +npm uninstall node-pre-gyp --save +npm install @mapbox/node-pre-gyp --save +``` + +### Features + + - A command line tool called `node-pre-gyp` that can install your package's C++ module from a binary. + - A variety of developer targeted commands for packaging, testing, and publishing binaries. + - A JavaScript module that can dynamically require your installed binary: `require('@mapbox/node-pre-gyp').find` + +For a hello world example of a module packaged with `node-pre-gyp` see and [the wiki ](https://github.com/mapbox/node-pre-gyp/wiki/Modules-using-node-pre-gyp) for real world examples. + +## Credits + + - The module is modeled after [node-gyp](https://github.com/Tootallnate/node-gyp) by [@Tootallnate](https://github.com/Tootallnate) + - Motivation for initial development came from [@ErisDS](https://github.com/ErisDS) and the [Ghost Project](https://github.com/TryGhost/Ghost). + - Development is sponsored by [Mapbox](https://www.mapbox.com/) + +## FAQ + +See the [Frequently Ask Questions](https://github.com/mapbox/node-pre-gyp/wiki/FAQ). + +## Depends + + - Node.js >= node v8.x + +## Install + +`node-pre-gyp` is designed to be installed as a local dependency of your Node.js C++ addon and accessed like: + + ./node_modules/.bin/node-pre-gyp --help + +But you can also install it globally: + + npm install @mapbox/node-pre-gyp -g + +## Usage + +### Commands + +View all possible commands: + + node-pre-gyp --help + +- clean - Remove the entire folder containing the compiled .node module +- install - Install pre-built binary for module +- reinstall - Run "clean" and "install" at once +- build - Compile the module by dispatching to node-gyp or nw-gyp +- rebuild - Run "clean" and "build" at once +- package - Pack binary into tarball +- testpackage - Test that the staged package is valid +- publish - Publish pre-built binary +- unpublish - Unpublish pre-built binary +- info - Fetch info on published binaries + +You can also chain commands: + + node-pre-gyp clean build unpublish publish info + +### Options + +Options include: + + - `-C/--directory`: run the command in this directory + - `--build-from-source`: build from source instead of using pre-built binary + - `--update-binary`: reinstall by replacing previously installed local binary with remote binary + - `--runtime=node-webkit`: customize the runtime: `node`, `electron` and `node-webkit` are the valid options + - `--fallback-to-build`: fallback to building from source if pre-built binary is not available + - `--target=0.4.0`: Pass the target node or node-webkit version to compile against + - `--target_arch=ia32`: Pass the target arch and override the host `arch`. Any value that is [supported by Node.js](https://nodejs.org/api/os.html#osarch) is valid. + - `--target_platform=win32`: Pass the target platform and override the host `platform`. Valid values are `linux`, `darwin`, `win32`, `sunos`, `freebsd`, `openbsd`, and `aix`. + +Both `--build-from-source` and `--fallback-to-build` can be passed alone or they can provide values. You can pass `--fallback-to-build=false` to override the option as declared in package.json. In addition to being able to pass `--build-from-source` you can also pass `--build-from-source=myapp` where `myapp` is the name of your module. + +For example: `npm install --build-from-source=myapp`. This is useful if: + + - `myapp` is referenced in the package.json of a larger app and therefore `myapp` is being installed as a dependency with `npm install`. + - The larger app also depends on other modules installed with `node-pre-gyp` + - You only want to trigger a source compile for `myapp` and the other modules. + +### Configuring + +This is a guide to configuring your module to use node-pre-gyp. + +#### 1) Add new entries to your `package.json` + + - Add `@mapbox/node-pre-gyp` to `dependencies` + - Add `aws-sdk` as a `devDependency` + - Add a custom `install` script + - Declare a `binary` object + +This looks like: + +```js + "dependencies" : { + "@mapbox/node-pre-gyp": "1.x" + }, + "devDependencies": { + "aws-sdk": "2.x" + } + "scripts": { + "install": "node-pre-gyp install --fallback-to-build" + }, + "binary": { + "module_name": "your_module", + "module_path": "./lib/binding/", + "host": "https://your_module.s3-us-west-1.amazonaws.com" + } +``` + +For a full example see [node-addon-examples's package.json](https://github.com/springmeyer/node-addon-example/blob/master/package.json). + +Let's break this down: + + - Dependencies need to list `node-pre-gyp` + - Your devDependencies should list `aws-sdk` so that you can run `node-pre-gyp publish` locally or a CI system. We recommend using `devDependencies` only since `aws-sdk` is large and not needed for `node-pre-gyp install` since it only uses http to fetch binaries + - Your `scripts` section should override the `install` target with `"install": "node-pre-gyp install --fallback-to-build"`. This allows node-pre-gyp to be used instead of the default npm behavior of always source compiling with `node-gyp` directly. + - Your package.json should contain a `binary` section describing key properties you provide to allow node-pre-gyp to package optimally. They are detailed below. + +Note: in the past we recommended putting `@mapbox/node-pre-gyp` in the `bundledDependencies`, but we no longer recommend this. In the past there were npm bugs (with node versions 0.10.x) that could lead to node-pre-gyp not being available at the right time during install (unless we bundled). This should no longer be the case. Also, for a time we recommended using `"preinstall": "npm install @mapbox/node-pre-gyp"` as an alternative method to avoid needing to bundle. But this did not behave predictably across all npm versions - see https://github.com/mapbox/node-pre-gyp/issues/260 for the details. So we do not recommend using `preinstall` to install `@mapbox/node-pre-gyp`. More history on this at https://github.com/strongloop/fsevents/issues/157#issuecomment-265545908. + +##### The `binary` object has three required properties + +###### module_name + +The name of your native node module. This value must: + + - Match the name passed to [the NODE_MODULE macro](http://nodejs.org/api/addons.html#addons_hello_world) + - Must be a valid C variable name (e.g. it cannot contain `-`) + - Should not include the `.node` extension. + +###### module_path + +The location your native module is placed after a build. This should be an empty directory without other Javascript files. This entire directory will be packaged in the binary tarball. When installing from a remote package this directory will be overwritten with the contents of the tarball. + +Note: This property supports variables based on [Versioning](#versioning). + +###### host + +A url to the remote location where you've published tarball binaries (must be `https` not `http`). + +It is highly recommended that you use Amazon S3. The reasons are: + + - Various node-pre-gyp commands like `publish` and `info` only work with an S3 host. + - S3 is a very solid hosting platform for distributing large files. + - We provide detail documentation for using [S3 hosting](#s3-hosting) with node-pre-gyp. + +Why then not require S3? Because while some applications using node-pre-gyp need to distribute binaries as large as 20-30 MB, others might have very small binaries and might wish to store them in a GitHub repo. This is not recommended, but if an author really wants to host in a non-S3 location then it should be possible. + +It should also be mentioned that there is an optional and entirely separate npm module called [node-pre-gyp-github](https://github.com/bchr02/node-pre-gyp-github) which is intended to complement node-pre-gyp and be installed along with it. It provides the ability to store and publish your binaries within your repositories GitHub Releases if you would rather not use S3 directly. Installation and usage instructions can be found [here](https://github.com/bchr02/node-pre-gyp-github), but the basic premise is that instead of using the ```node-pre-gyp publish``` command you would use ```node-pre-gyp-github publish```. + +##### The `binary` object other optional S3 properties + +If you are not using a standard s3 path like `bucket_name.s3(.-)region.amazonaws.com`, you might get an error on `publish` because node-pre-gyp extracts the region and bucket from the `host` url. For example, you may have an on-premises s3-compatible storage server, or may have configured a specific dns redirecting to an s3 endpoint. In these cases, you can explicitly set the `region` and `bucket` properties to tell node-pre-gyp to use these values instead of guessing from the `host` property. The following values can be used in the `binary` section: + +###### host + +The url to the remote server root location (must be `https` not `http`). + +###### bucket + +The bucket name where your tarball binaries should be located. + +###### region + +Your S3 server region. + +###### s3ForcePathStyle + +Set `s3ForcePathStyle` to true if the endpoint url should not be prefixed with the bucket name. If false (default), the server endpoint would be constructed as `bucket_name.your_server.com`. + +##### The `binary` object has optional properties + +###### remote_path + +It **is recommended** that you customize this property. This is an extra path to use for publishing and finding remote tarballs. The default value for `remote_path` is `""` meaning that if you do not provide it then all packages will be published at the base of the `host`. It is recommended to provide a value like `./{name}/v{version}` to help organize remote packages in the case that you choose to publish multiple node addons to the same `host`. + +Note: This property supports variables based on [Versioning](#versioning). + +###### package_name + +It is **not recommended** to override this property unless you are also overriding the `remote_path`. This is the versioned name of the remote tarball containing the binary `.node` module and any supporting files you've placed inside the `module_path` directory. Unless you specify `package_name` in your `package.json` then it defaults to `{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz` which allows your binary to work across node versions, platforms, and architectures. If you are using `remote_path` that is also versioned by `./{module_name}/v{version}` then you could remove these variables from the `package_name` and just use: `{node_abi}-{platform}-{arch}.tar.gz`. Then your remote tarball will be looked up at, for example, `https://example.com/your-module/v0.1.0/node-v11-linux-x64.tar.gz`. + +Avoiding the version of your module in the `package_name` and instead only embedding in a directory name can be useful when you want to make a quick tag of your module that does not change any C++ code. In this case you can just copy binaries to the new version behind the scenes like: + +```sh +aws s3 sync --acl public-read s3://mapbox-node-binary/sqlite3/v3.0.3/ s3://mapbox-node-binary/sqlite3/v3.0.4/ +``` + +Note: This property supports variables based on [Versioning](#versioning). + +#### 2) Add a new target to binding.gyp + +`node-pre-gyp` calls out to `node-gyp` to compile the module and passes variables along like [module_name](#module_name) and [module_path](#module_path). + +A new target must be added to `binding.gyp` that moves the compiled `.node` module from `./build/Release/module_name.node` into the directory specified by `module_path`. + +Add a target like this at the end of your `targets` list: + +```js + { + "target_name": "action_after_build", + "type": "none", + "dependencies": [ "<(module_name)" ], + "copies": [ + { + "files": [ "<(PRODUCT_DIR)/<(module_name).node" ], + "destination": "<(module_path)" + } + ] + } +``` + +For a full example see [node-addon-example's binding.gyp](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/binding.gyp). + +#### 3) Dynamically require your `.node` + +Inside the main js file that requires your addon module you are likely currently doing: + +```js +var binding = require('../build/Release/binding.node'); +``` + +or: + +```js +var bindings = require('./bindings') +``` + +Change those lines to: + +```js +var binary = require('@mapbox/node-pre-gyp'); +var path = require('path'); +var binding_path = binary.find(path.resolve(path.join(__dirname,'./package.json'))); +var binding = require(binding_path); +``` + +For a full example see [node-addon-example's index.js](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/index.js#L1-L4) + +#### 4) Build and package your app + +Now build your module from source: + + npm install --build-from-source + +The `--build-from-source` tells `node-pre-gyp` to not look for a remote package and instead dispatch to node-gyp to build. + +Now `node-pre-gyp` should now also be installed as a local dependency so the command line tool it offers can be found at `./node_modules/.bin/node-pre-gyp`. + +#### 5) Test + +Now `npm test` should work just as it did before. + +#### 6) Publish the tarball + +Then package your app: + + ./node_modules/.bin/node-pre-gyp package + +Once packaged, now you can publish: + + ./node_modules/.bin/node-pre-gyp publish + +Currently the `publish` command pushes your binary to S3. This requires: + + - You have installed `aws-sdk` with `npm install aws-sdk` + - You have created a bucket already. + - The `host` points to an S3 http or https endpoint. + - You have configured node-pre-gyp to read your S3 credentials (see [S3 hosting](#s3-hosting) for details). + +You can also host your binaries elsewhere. To do this requires: + + - You manually publish the binary created by the `package` command to an `https` endpoint + - Ensure that the `host` value points to your custom `https` endpoint. + +#### 7) Automate builds + +Now you need to publish builds for all the platforms and node versions you wish to support. This is best automated. + + - See [Appveyor Automation](#appveyor-automation) for how to auto-publish builds on Windows. + - See [Travis Automation](#travis-automation) for how to auto-publish builds on OS X and Linux. + +#### 8) You're done! + +Now publish your module to the npm registry. Users will now be able to install your module from a binary. + +What will happen is this: + +1. `npm install ` will pull from the npm registry +2. npm will run the `install` script which will call out to `node-pre-gyp` +3. `node-pre-gyp` will fetch the binary `.node` module and unpack in the right place +4. Assuming that all worked, you are done + +If a a binary was not available for a given platform and `--fallback-to-build` was used then `node-gyp rebuild` will be called to try to source compile the module. + +#### 9) One more option + +It may be that you want to work with two s3 buckets, one for staging and one for production; this +arrangement makes it less likely to accidentally overwrite a production binary. It also allows the production +environment to have more restrictive permissions than staging while still enabling publishing when +developing and testing. + +The binary.host property can be set at execution time. In order to do so all of the following conditions +must be true. + +- binary.host is falsey or not present +- binary.staging_host is not empty +- binary.production_host is not empty + +If any of these checks fail then the operation will not perform execution time determination of the s3 target. + +If the command being executed is either "publish" or "unpublish" then the default is set to `binary.staging_host`. In all other cases +the default is `binary.production_host`. + +The command-line options `--s3_host=staging` or `--s3_host=production` override the default. If `s3_host` +is present and not `staging` or `production` an exception is thrown. + +This allows installing from staging by specifying `--s3_host=staging`. And it requires specifying +`--s3_option=production` in order to publish to, or unpublish from, production, making accidental errors less likely. + +## Node-API Considerations + +[Node-API](https://nodejs.org/api/n-api.html#n_api_node_api), which was previously known as N-API, is an ABI-stable alternative to previous technologies such as [nan](https://github.com/nodejs/nan) which are tied to a specific Node runtime engine. Node-API is Node runtime engine agnostic and guarantees modules created today will continue to run, without changes, into the future. + +Using `node-pre-gyp` with Node-API projects requires a handful of additional configuration values and imposes some additional requirements. + +The most significant difference is that an Node-API module can be coded to target multiple Node-API versions. Therefore, an Node-API module must declare in its `package.json` file which Node-API versions the module is designed to run against. In addition, since multiple builds may be required for a single module, path and file names must be specified in way that avoids naming conflicts. + +### The `napi_versions` array property + +A Node-API module must declare in its `package.json` file, the Node-API versions the module is intended to support. This is accomplished by including an `napi-versions` array property in the `binary` object. For example: + +```js +"binary": { + "module_name": "your_module", + "module_path": "your_module_path", + "host": "https://your_bucket.s3-us-west-1.amazonaws.com", + "napi_versions": [1,3] + } +``` + +If the `napi_versions` array property is *not* present, `node-pre-gyp` operates as it always has. Including the `napi_versions` array property instructs `node-pre-gyp` that this is a Node-API module build. + +When the `napi_versions` array property is present, `node-pre-gyp` fires off multiple operations, one for each of the Node-API versions in the array. In the example above, two operations are initiated, one for Node-API version 1 and second for Node-API version 3. How this version number is communicated is described next. + +### The `napi_build_version` value + +For each of the Node-API module operations `node-pre-gyp` initiates, it ensures that the `napi_build_version` is set appropriately. + +This value is of importance in two areas: + +1. The C/C++ code which needs to know against which Node-API version it should compile. +2. `node-pre-gyp` itself which must assign appropriate path and file names to avoid collisions. + +### Defining `NAPI_VERSION` for the C/C++ code + +The `napi_build_version` value is communicated to the C/C++ code by adding this code to the `binding.gyp` file: + +``` +"defines": [ + "NAPI_VERSION=<(napi_build_version)", +] +``` + +This ensures that `NAPI_VERSION`, an integer value, is declared appropriately to the C/C++ code for each build. + +> Note that earlier versions of this document recommended defining the symbol `NAPI_BUILD_VERSION`. `NAPI_VERSION` is preferred because it used by the Node-API C/C++ headers to configure the specific Node-API versions being requested. + +### Path and file naming requirements in `package.json` + +Since `node-pre-gyp` fires off multiple operations for each request, it is essential that path and file names be created in such a way as to avoid collisions. This is accomplished by imposing additional path and file naming requirements. + +Specifically, when performing Node-API builds, the `{napi_build_version}` text configuration value *must* be present in the `module_path` property. In addition, the `{napi_build_version}` text configuration value *must* be present in either the `remote_path` or `package_name` property. (No problem if it's in both.) + +Here's an example: + +```js +"binary": { + "module_name": "your_module", + "module_path": "./lib/binding/napi-v{napi_build_version}", + "remote_path": "./{module_name}/v{version}/{configuration}/", + "package_name": "{platform}-{arch}-napi-v{napi_build_version}.tar.gz", + "host": "https://your_bucket.s3-us-west-1.amazonaws.com", + "napi_versions": [1,3] + } +``` + +## Supporting both Node-API and NAN builds + +You may have a legacy native add-on that you wish to continue supporting for those versions of Node that do not support Node-API, as you add Node-API support for later Node versions. This can be accomplished by specifying the `node_napi_label` configuration value in the package.json `binary.package_name` property. + +Placing the configuration value `node_napi_label` in the package.json `binary.package_name` property instructs `node-pre-gyp` to build all viable Node-API binaries supported by the current Node instance. If the current Node instance does not support Node-API, `node-pre-gyp` will request a traditional, non-Node-API build. + +The configuration value `node_napi_label` is set by `node-pre-gyp` to the type of build created, `napi` or `node`, and the version number. For Node-API builds, the string contains the Node-API version nad has values like `napi-v3`. For traditional, non-Node-API builds, the string contains the ABI version with values like `node-v46`. + +Here's how the `binary` configuration above might be changed to support both Node-API and NAN builds: + +```js +"binary": { + "module_name": "your_module", + "module_path": "./lib/binding/{node_napi_label}", + "remote_path": "./{module_name}/v{version}/{configuration}/", + "package_name": "{platform}-{arch}-{node_napi_label}.tar.gz", + "host": "https://your_bucket.s3-us-west-1.amazonaws.com", + "napi_versions": [1,3] + } +``` + +The C/C++ symbol `NAPI_VERSION` can be used to distinguish Node-API and non-Node-API builds. The value of `NAPI_VERSION` is set to the integer Node-API version for Node-API builds and is set to `0` for non-Node-API builds. + +For example: + +```C +#if NAPI_VERSION +// Node-API code goes here +#else +// NAN code goes here +#endif +``` + +### Two additional configuration values + +The following two configuration values, which were implemented in previous versions of `node-pre-gyp`, continue to exist, but have been replaced by the `node_napi_label` configuration value described above. + +1. `napi_version` If Node-API is supported by the currently executing Node instance, this value is the Node-API version number supported by Node. If Node-API is not supported, this value is an empty string. + +2. `node_abi_napi` If the value returned for `napi_version` is non empty, this value is `'napi'`. If the value returned for `napi_version` is empty, this value is the value returned for `node_abi`. + +These values are present for use in the `binding.gyp` file and may be used as `{napi_version}` and `{node_abi_napi}` for text substituion in the `binary` properties of the `package.json` file. + +## S3 Hosting + +You can host wherever you choose but S3 is cheap, `node-pre-gyp publish` expects it, and S3 can be integrated well with [Travis.ci](http://travis-ci.org) to automate builds for OS X and Ubuntu, and with [Appveyor](http://appveyor.com) to automate builds for Windows. Here is an approach to do this: + +First, get setup locally and test the workflow: + +#### 1) Create an S3 bucket + +And have your **key** and **secret key** ready for writing to the bucket. + +It is recommended to create a IAM user with a policy that only gives permissions to the specific bucket you plan to publish to. This can be done in the [IAM console](https://console.aws.amazon.com/iam/) by: 1) adding a new user, 2) choosing `Attach User Policy`, 3) Using the `Policy Generator`, 4) selecting `Amazon S3` for the service, 5) adding the actions: `DeleteObject`, `GetObject`, `GetObjectAcl`, `ListBucket`, `HeadBucket`, `PutObject`, `PutObjectAcl`, 6) adding an ARN of `arn:aws:s3:::bucket/*` (replacing `bucket` with your bucket name), and finally 7) clicking `Add Statement` and saving the policy. It should generate a policy like: + +```js +{ + "Version": "2012-10-17", + "Statement": [ + { + "Sid": "objects", + "Effect": "Allow", + "Action": [ + "s3:PutObject", + "s3:GetObjectAcl", + "s3:GetObject", + "s3:DeleteObject", + "s3:PutObjectAcl" + ], + "Resource": "arn:aws:s3:::your-bucket-name/*" + }, + { + "Sid": "bucket", + "Effect": "Allow", + "Action": "s3:ListBucket", + "Resource": "arn:aws:s3:::your-bucket-name" + }, + { + "Sid": "buckets", + "Effect": "Allow", + "Action": "s3:HeadBucket", + "Resource": "*" + } + ] +} +``` + +#### 2) Install node-pre-gyp + +Either install it globally: + + npm install node-pre-gyp -g + +Or put the local version on your PATH + + export PATH=`pwd`/node_modules/.bin/:$PATH + +#### 3) Configure AWS credentials + +It is recommended to configure the AWS JS SDK v2 used internally by `node-pre-gyp` by setting these environment variables: + +- AWS_ACCESS_KEY_ID +- AWS_SECRET_ACCESS_KEY + +But also you can also use the `Shared Config File` mentioned [in the AWS JS SDK v2 docs](https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/configuring-the-jssdk.html) + +#### 4) Package and publish your build + +Install the `aws-sdk`: + + npm install aws-sdk + +Then publish: + + node-pre-gyp package publish + +Note: if you hit an error like `Hostname/IP doesn't match certificate's altnames` it may mean that you need to provide the `region` option in your config. + +## Appveyor Automation + +[Appveyor](http://www.appveyor.com/) can build binaries and publish the results per commit and supports: + + - Windows Visual Studio 2013 and related compilers + - Both 64 bit (x64) and 32 bit (x86) build configurations + - Multiple Node.js versions + +For an example of doing this see [node-sqlite3's appveyor.yml](https://github.com/mapbox/node-sqlite3/blob/master/appveyor.yml). + +Below is a guide to getting set up: + +#### 1) Create a free Appveyor account + +Go to https://ci.appveyor.com/signup/free and sign in with your GitHub account. + +#### 2) Create a new project + +Go to https://ci.appveyor.com/projects/new and select the GitHub repo for your module + +#### 3) Add appveyor.yml and push it + +Once you have committed an `appveyor.yml` ([appveyor.yml reference](http://www.appveyor.com/docs/appveyor-yml)) to your GitHub repo and pushed it AppVeyor should automatically start building your project. + +#### 4) Create secure variables + +Encrypt your S3 AWS keys by going to and hitting the `encrypt` button. + +Then paste the result into your `appveyor.yml` + +```yml +environment: + AWS_ACCESS_KEY_ID: + secure: Dn9HKdLNYvDgPdQOzRq/DqZ/MPhjknRHB1o+/lVU8MA= + AWS_SECRET_ACCESS_KEY: + secure: W1rwNoSnOku1r+28gnoufO8UA8iWADmL1LiiwH9IOkIVhDTNGdGPJqAlLjNqwLnL +``` + +NOTE: keys are per account but not per repo (this is difference than Travis where keys are per repo but not related to the account used to encrypt them). + +#### 5) Hook up publishing + +Just put `node-pre-gyp package publish` in your `appveyor.yml` after `npm install`. + +#### 6) Publish when you want + +You might wish to publish binaries only on a specific commit. To do this you could borrow from the [Travis CI idea of commit keywords](http://about.travis-ci.org/docs/user/how-to-skip-a-build/) and add special handling for commit messages with `[publish binary]`: + + SET CM=%APPVEYOR_REPO_COMMIT_MESSAGE% + if not "%CM%" == "%CM:[publish binary]=%" node-pre-gyp --msvs_version=2013 publish + +If your commit message contains special characters (e.g. `&`) this method might fail. An alternative is to use PowerShell, which gives you additional possibilities, like ignoring case by using `ToLower()`: + + ps: if($env:APPVEYOR_REPO_COMMIT_MESSAGE.ToLower().Contains('[publish binary]')) { node-pre-gyp --msvs_version=2013 publish } + +Remember this publishing is not the same as `npm publish`. We're just talking about the binary module here and not your entire npm package. + +## Travis Automation + +[Travis](https://travis-ci.org/) can push to S3 after a successful build and supports both: + + - Ubuntu Precise and OS X (64 bit) + - Multiple Node.js versions + +For an example of doing this see [node-add-example's .travis.yml](https://github.com/springmeyer/node-addon-example/blob/2ff60a8ded7f042864ad21db00c3a5a06cf47075/.travis.yml). + +Note: if you need 32 bit binaries, this can be done from a 64 bit Travis machine. See [the node-sqlite3 scripts for an example of doing this](https://github.com/mapbox/node-sqlite3/blob/bae122aa6a2b8a45f6b717fab24e207740e32b5d/scripts/build_against_node.sh#L54-L74). + +Below is a guide to getting set up: + +#### 1) Install the Travis gem + + gem install travis + +#### 2) Create secure variables + +Make sure you run this command from within the directory of your module. + +Use `travis-encrypt` like: + + travis encrypt AWS_ACCESS_KEY_ID=${node_pre_gyp_accessKeyId} + travis encrypt AWS_SECRET_ACCESS_KEY=${node_pre_gyp_secretAccessKey} + +Then put those values in your `.travis.yml` like: + +```yaml +env: + global: + - secure: F+sEL/v56CzHqmCSSES4pEyC9NeQlkoR0Gs/ZuZxX1ytrj8SKtp3MKqBj7zhIclSdXBz4Ev966Da5ctmcTd410p0b240MV6BVOkLUtkjZJyErMBOkeb8n8yVfSoeMx8RiIhBmIvEn+rlQq+bSFis61/JkE9rxsjkGRZi14hHr4M= + - secure: o2nkUQIiABD139XS6L8pxq3XO5gch27hvm/gOdV+dzNKc/s2KomVPWcOyXNxtJGhtecAkABzaW8KHDDi5QL1kNEFx6BxFVMLO8rjFPsMVaBG9Ks6JiDQkkmrGNcnVdxI/6EKTLHTH5WLsz8+J7caDBzvKbEfTux5EamEhxIWgrI= +``` + +More details on Travis encryption at http://about.travis-ci.org/docs/user/encryption-keys/. + +#### 3) Hook up publishing + +Just put `node-pre-gyp package publish` in your `.travis.yml` after `npm install`. + +##### OS X publishing + +If you want binaries for OS X in addition to linux you can enable [multi-os for Travis](http://docs.travis-ci.com/user/multi-os/#Setting-.travis.yml) + +Use a configuration like: + +```yml + +language: cpp + +os: +- linux +- osx + +env: + matrix: + - NODE_VERSION="4" + - NODE_VERSION="6" + +before_install: +- rm -rf ~/.nvm/ && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm +- source ~/.nvm/nvm.sh +- nvm install $NODE_VERSION +- nvm use $NODE_VERSION +``` + +See [Travis OS X Gotchas](#travis-os-x-gotchas) for why we replace `language: node_js` and `node_js:` sections with `language: cpp` and a custom matrix. + +Also create platform specific sections for any deps that need install. For example if you need libpng: + +```yml +- if [ $(uname -s) == 'Linux' ]; then apt-get install libpng-dev; fi; +- if [ $(uname -s) == 'Darwin' ]; then brew install libpng; fi; +``` + +For detailed multi-OS examples see [node-mapnik](https://github.com/mapnik/node-mapnik/blob/master/.travis.yml) and [node-sqlite3](https://github.com/mapbox/node-sqlite3/blob/master/.travis.yml). + +##### Travis OS X Gotchas + +First, unlike the Travis Linux machines, the OS X machines do not put `node-pre-gyp` on PATH by default. To do so you will need to: + +```sh +export PATH=$(pwd)/node_modules/.bin:${PATH} +``` + +Second, the OS X machines do not support using a matrix for installing different Node.js versions. So you need to bootstrap the installation of Node.js in a cross platform way. + +By doing: + +```yml +env: + matrix: + - NODE_VERSION="4" + - NODE_VERSION="6" + +before_install: + - rm -rf ~/.nvm/ && git clone --depth 1 https://github.com/creationix/nvm.git ~/.nvm + - source ~/.nvm/nvm.sh + - nvm install $NODE_VERSION + - nvm use $NODE_VERSION +``` + +You can easily recreate the previous behavior of this matrix: + +```yml +node_js: + - "4" + - "6" +``` + +#### 4) Publish when you want + +You might wish to publish binaries only on a specific commit. To do this you could borrow from the [Travis CI idea of commit keywords](http://about.travis-ci.org/docs/user/how-to-skip-a-build/) and add special handling for commit messages with `[publish binary]`: + + COMMIT_MESSAGE=$(git log --format=%B --no-merges -n 1 | tr -d '\n') + if [[ ${COMMIT_MESSAGE} =~ "[publish binary]" ]]; then node-pre-gyp publish; fi; + +Then you can trigger new binaries to be built like: + + git commit -a -m "[publish binary]" + +Or, if you don't have any changes to make simply run: + + git commit --allow-empty -m "[publish binary]" + +WARNING: if you are working in a pull request and publishing binaries from there then you will want to avoid double publishing when Travis CI builds both the `push` and `pr`. You only want to run the publish on the `push` commit. See https://github.com/Project-OSRM/node-osrm/blob/8eb837abe2e2e30e595093d16e5354bc5c573575/scripts/is_pr_merge.sh which is called from https://github.com/Project-OSRM/node-osrm/blob/8eb837abe2e2e30e595093d16e5354bc5c573575/scripts/publish.sh for an example of how to do this. + +Remember this publishing is not the same as `npm publish`. We're just talking about the binary module here and not your entire npm package. To automate the publishing of your entire package to npm on Travis see http://about.travis-ci.org/docs/user/deployment/npm/ + +# Versioning + +The `binary` properties of `module_path`, `remote_path`, and `package_name` support variable substitution. The strings are evaluated by `node-pre-gyp` depending on your system and any custom build flags you passed. + + - `node_abi`: The node C++ `ABI` number. This value is available in Javascript as `process.versions.modules` as of [`>= v0.10.4 >= v0.11.7`](https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e) and in C++ as the `NODE_MODULE_VERSION` define much earlier. For versions of Node before this was available we fallback to the V8 major and minor version. + - `platform` matches node's `process.platform` like `linux`, `darwin`, and `win32` unless the user passed the `--target_platform` option to override. + - `arch` matches node's `process.arch` like `x64` or `ia32` unless the user passes the `--target_arch` option to override. + - `libc` matches `require('detect-libc').family` like `glibc` or `musl` unless the user passes the `--target_libc` option to override. + - `configuration` - Either 'Release' or 'Debug' depending on if `--debug` is passed during the build. + - `module_name` - the `binary.module_name` attribute from `package.json`. + - `version` - the semver `version` value for your module from `package.json` (NOTE: ignores the `semver.build` property). + - `major`, `minor`, `patch`, and `prelease` match the individual semver values for your module's `version` + - `build` - the sevmer `build` value. For example it would be `this.that` if your package.json `version` was `v1.0.0+this.that` + - `prerelease` - the semver `prerelease` value. For example it would be `alpha.beta` if your package.json `version` was `v1.0.0-alpha.beta` + + +The options are visible in the code at + +# Download binary files from a mirror + +S3 is broken in China for the well known reason. + +Using the `npm` config argument: `--{module_name}_binary_host_mirror` can download binary files through a mirror, `-` in `module_name` will be replaced with `_`. + +e.g.: Install [v8-profiler](https://www.npmjs.com/package/v8-profiler) from `npm`. + +```bash +$ npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/ +``` + +e.g.: Install [canvas-prebuilt](https://www.npmjs.com/package/canvas-prebuilt) from `npm`. + +```bash +$ npm install canvas-prebuilt --canvas_prebuilt_binary_host_mirror=https://npm.taobao.org/mirrors/canvas-prebuilt/ +``` diff --git a/node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp b/node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp new file mode 100644 index 0000000..c38d34d --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp @@ -0,0 +1,4 @@ +#!/usr/bin/env node +'use strict'; + +require('../lib/main'); diff --git a/node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp.cmd b/node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp.cmd new file mode 100644 index 0000000..46e14b5 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/bin/node-pre-gyp.cmd @@ -0,0 +1,2 @@ +@echo off +node "%~dp0\node-pre-gyp" %* diff --git a/node_modules/@mapbox/node-pre-gyp/contributing.md b/node_modules/@mapbox/node-pre-gyp/contributing.md new file mode 100644 index 0000000..4038fa6 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/contributing.md @@ -0,0 +1,10 @@ +# Contributing + + +### Releasing a new version: + +- Ensure tests are passing on travis and appveyor +- Run `node scripts/abi_crosswalk.js` and commit any changes +- Update the changelog +- Tag a new release like: `git tag -a v0.6.34 -m "tagging v0.6.34" && git push --tags` +- Run `npm publish` diff --git a/node_modules/@mapbox/node-pre-gyp/lib/build.js b/node_modules/@mapbox/node-pre-gyp/lib/build.js new file mode 100644 index 0000000..e8a1459 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/build.js @@ -0,0 +1,51 @@ +'use strict'; + +module.exports = exports = build; + +exports.usage = 'Attempts to compile the module by dispatching to node-gyp or nw-gyp'; + +const napi = require('./util/napi.js'); +const compile = require('./util/compile.js'); +const handle_gyp_opts = require('./util/handle_gyp_opts.js'); +const configure = require('./configure.js'); + +function do_build(gyp, argv, callback) { + handle_gyp_opts(gyp, argv, (err, result) => { + let final_args = ['build'].concat(result.gyp).concat(result.pre); + if (result.unparsed.length > 0) { + final_args = final_args. + concat(['--']). + concat(result.unparsed); + } + if (!err && result.opts.napi_build_version) { + napi.swap_build_dir_in(result.opts.napi_build_version); + } + compile.run_gyp(final_args, result.opts, (err2) => { + if (result.opts.napi_build_version) { + napi.swap_build_dir_out(result.opts.napi_build_version); + } + return callback(err2); + }); + }); +} + +function build(gyp, argv, callback) { + + // Form up commands to pass to node-gyp: + // We map `node-pre-gyp build` to `node-gyp configure build` so that we do not + // trigger a clean and therefore do not pay the penalty of a full recompile + if (argv.length && (argv.indexOf('rebuild') > -1)) { + argv.shift(); // remove `rebuild` + // here we map `node-pre-gyp rebuild` to `node-gyp rebuild` which internally means + // "clean + configure + build" and triggers a full recompile + compile.run_gyp(['clean'], {}, (err3) => { + if (err3) return callback(err3); + configure(gyp, argv, (err4) => { + if (err4) return callback(err4); + return do_build(gyp, argv, callback); + }); + }); + } else { + return do_build(gyp, argv, callback); + } +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/clean.js b/node_modules/@mapbox/node-pre-gyp/lib/clean.js new file mode 100644 index 0000000..e693392 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/clean.js @@ -0,0 +1,31 @@ +'use strict'; + +module.exports = exports = clean; + +exports.usage = 'Removes the entire folder containing the compiled .node module'; + +const rm = require('rimraf'); +const exists = require('fs').exists || require('path').exists; +const versioning = require('./util/versioning.js'); +const napi = require('./util/napi.js'); +const path = require('path'); + +function clean(gyp, argv, callback) { + const package_json = gyp.package_json; + const napi_build_version = napi.get_napi_build_version_from_command_args(argv); + const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + const to_delete = opts.module_path; + if (!to_delete) { + return callback(new Error('module_path is empty, refusing to delete')); + } else if (path.normalize(to_delete) === path.normalize(process.cwd())) { + return callback(new Error('module_path is not set, refusing to delete')); + } else { + exists(to_delete, (found) => { + if (found) { + if (!gyp.opts.silent_clean) console.log('[' + package_json.name + '] Removing "%s"', to_delete); + return rm(to_delete, callback); + } + return callback(); + }); + } +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/configure.js b/node_modules/@mapbox/node-pre-gyp/lib/configure.js new file mode 100644 index 0000000..1337c0c --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/configure.js @@ -0,0 +1,52 @@ +'use strict'; + +module.exports = exports = configure; + +exports.usage = 'Attempts to configure node-gyp or nw-gyp build'; + +const napi = require('./util/napi.js'); +const compile = require('./util/compile.js'); +const handle_gyp_opts = require('./util/handle_gyp_opts.js'); + +function configure(gyp, argv, callback) { + handle_gyp_opts(gyp, argv, (err, result) => { + let final_args = result.gyp.concat(result.pre); + // pull select node-gyp configure options out of the npm environ + const known_gyp_args = ['dist-url', 'python', 'nodedir', 'msvs_version']; + known_gyp_args.forEach((key) => { + const val = gyp.opts[key] || gyp.opts[key.replace('-', '_')]; + if (val) { + final_args.push('--' + key + '=' + val); + } + }); + // --ensure=false tell node-gyp to re-install node development headers + // but it is only respected by node-gyp install, so we have to call install + // as a separate step if the user passes it + if (gyp.opts.ensure === false) { + const install_args = final_args.concat(['install', '--ensure=false']); + compile.run_gyp(install_args, result.opts, (err2) => { + if (err2) return callback(err2); + if (result.unparsed.length > 0) { + final_args = final_args. + concat(['--']). + concat(result.unparsed); + } + compile.run_gyp(['configure'].concat(final_args), result.opts, (err3) => { + return callback(err3); + }); + }); + } else { + if (result.unparsed.length > 0) { + final_args = final_args. + concat(['--']). + concat(result.unparsed); + } + compile.run_gyp(['configure'].concat(final_args), result.opts, (err4) => { + if (!err4 && result.opts.napi_build_version) { + napi.swap_build_dir_out(result.opts.napi_build_version); + } + return callback(err4); + }); + } + }); +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/info.js b/node_modules/@mapbox/node-pre-gyp/lib/info.js new file mode 100644 index 0000000..ba22f32 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/info.js @@ -0,0 +1,38 @@ +'use strict'; + +module.exports = exports = info; + +exports.usage = 'Lists all published binaries (requires aws-sdk)'; + +const log = require('npmlog'); +const versioning = require('./util/versioning.js'); +const s3_setup = require('./util/s3_setup.js'); + +function info(gyp, argv, callback) { + const package_json = gyp.package_json; + const opts = versioning.evaluate(package_json, gyp.opts); + const config = {}; + s3_setup.detect(opts, config); + const s3 = s3_setup.get_s3(config); + const s3_opts = { + Bucket: config.bucket, + Prefix: config.prefix + }; + s3.listObjects(s3_opts, (err, meta) => { + if (err && err.code === 'NotFound') { + return callback(new Error('[' + package_json.name + '] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + config.prefix)); + } else if (err) { + return callback(err); + } else { + log.verbose(JSON.stringify(meta, null, 1)); + if (meta && meta.Contents) { + meta.Contents.forEach((obj) => { + console.log(obj.Key); + }); + } else { + console.error('[' + package_json.name + '] No objects found at https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + config.prefix); + } + return callback(); + } + }); +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/install.js b/node_modules/@mapbox/node-pre-gyp/lib/install.js new file mode 100644 index 0000000..617dd86 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/install.js @@ -0,0 +1,235 @@ +'use strict'; + +module.exports = exports = install; + +exports.usage = 'Attempts to install pre-built binary for module'; + +const fs = require('fs'); +const path = require('path'); +const log = require('npmlog'); +const existsAsync = fs.exists || path.exists; +const versioning = require('./util/versioning.js'); +const napi = require('./util/napi.js'); +const makeDir = require('make-dir'); +// for fetching binaries +const fetch = require('node-fetch'); +const tar = require('tar'); + +let npgVersion = 'unknown'; +try { + // Read own package.json to get the current node-pre-pyp version. + const ownPackageJSON = fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8'); + npgVersion = JSON.parse(ownPackageJSON).version; +} catch (e) { + // do nothing +} + +function place_binary(uri, targetDir, opts, callback) { + log.http('GET', uri); + + // Try getting version info from the currently running npm. + const envVersionInfo = process.env.npm_config_user_agent || + 'node ' + process.version; + + const sanitized = uri.replace('+', '%2B'); + const requestOpts = { + uri: sanitized, + headers: { + 'User-Agent': 'node-pre-gyp (v' + npgVersion + ', ' + envVersionInfo + ')' + }, + follow_max: 10 + }; + + if (opts.cafile) { + try { + requestOpts.ca = fs.readFileSync(opts.cafile); + } catch (e) { + return callback(e); + } + } else if (opts.ca) { + requestOpts.ca = opts.ca; + } + + const proxyUrl = opts.proxy || + process.env.http_proxy || + process.env.HTTP_PROXY || + process.env.npm_config_proxy; + let agent; + if (proxyUrl) { + const ProxyAgent = require('https-proxy-agent'); + agent = new ProxyAgent(proxyUrl); + log.http('download', 'proxy agent configured using: "%s"', proxyUrl); + } + + fetch(sanitized, { agent }) + .then((res) => { + if (!res.ok) { + throw new Error(`response status ${res.status} ${res.statusText} on ${sanitized}`); + } + const dataStream = res.body; + + return new Promise((resolve, reject) => { + let extractions = 0; + const countExtractions = (entry) => { + extractions += 1; + log.info('install', 'unpacking %s', entry.path); + }; + + dataStream.pipe(extract(targetDir, countExtractions)) + .on('error', (e) => { + reject(e); + }); + dataStream.on('end', () => { + resolve(`extracted file count: ${extractions}`); + }); + dataStream.on('error', (e) => { + reject(e); + }); + }); + }) + .then((text) => { + log.info(text); + callback(); + }) + .catch((e) => { + log.error(`install ${e.message}`); + callback(e); + }); +} + +function extract(to, onentry) { + return tar.extract({ + cwd: to, + strip: 1, + onentry + }); +} + +function extract_from_local(from, targetDir, callback) { + if (!fs.existsSync(from)) { + return callback(new Error('Cannot find file ' + from)); + } + log.info('Found local file to extract from ' + from); + + // extract helpers + let extractCount = 0; + function countExtractions(entry) { + extractCount += 1; + log.info('install', 'unpacking ' + entry.path); + } + function afterExtract(err) { + if (err) return callback(err); + if (extractCount === 0) { + return callback(new Error('There was a fatal problem while extracting the tarball')); + } + log.info('tarball', 'done parsing tarball'); + callback(); + } + + fs.createReadStream(from).pipe(extract(targetDir, countExtractions)) + .on('close', afterExtract) + .on('error', afterExtract); +} + +function do_build(gyp, argv, callback) { + const args = ['rebuild'].concat(argv); + gyp.todo.push({ name: 'build', args: args }); + process.nextTick(callback); +} + +function print_fallback_error(err, opts, package_json) { + const fallback_message = ' (falling back to source compile with node-gyp)'; + let full_message = ''; + if (err.statusCode !== undefined) { + // If we got a network response it but failed to download + // it means remote binaries are not available, so let's try to help + // the user/developer with the info to debug why + full_message = 'Pre-built binaries not found for ' + package_json.name + '@' + package_json.version; + full_message += ' and ' + opts.runtime + '@' + (opts.target || process.versions.node) + ' (' + opts.node_abi + ' ABI, ' + opts.libc + ')'; + full_message += fallback_message; + log.warn('Tried to download(' + err.statusCode + '): ' + opts.hosted_tarball); + log.warn(full_message); + log.http(err.message); + } else { + // If we do not have a statusCode that means an unexpected error + // happened and prevented an http response, so we output the exact error + full_message = 'Pre-built binaries not installable for ' + package_json.name + '@' + package_json.version; + full_message += ' and ' + opts.runtime + '@' + (opts.target || process.versions.node) + ' (' + opts.node_abi + ' ABI, ' + opts.libc + ')'; + full_message += fallback_message; + log.warn(full_message); + log.warn('Hit error ' + err.message); + } +} + +// +// install +// +function install(gyp, argv, callback) { + const package_json = gyp.package_json; + const napi_build_version = napi.get_napi_build_version_from_command_args(argv); + const source_build = gyp.opts['build-from-source'] || gyp.opts.build_from_source; + const update_binary = gyp.opts['update-binary'] || gyp.opts.update_binary; + const should_do_source_build = source_build === package_json.name || (source_build === true || source_build === 'true'); + if (should_do_source_build) { + log.info('build', 'requesting source compile'); + return do_build(gyp, argv, callback); + } else { + const fallback_to_build = gyp.opts['fallback-to-build'] || gyp.opts.fallback_to_build; + let should_do_fallback_build = fallback_to_build === package_json.name || (fallback_to_build === true || fallback_to_build === 'true'); + // but allow override from npm + if (process.env.npm_config_argv) { + const cooked = JSON.parse(process.env.npm_config_argv).cooked; + const match = cooked.indexOf('--fallback-to-build'); + if (match > -1 && cooked.length > match && cooked[match + 1] === 'false') { + should_do_fallback_build = false; + log.info('install', 'Build fallback disabled via npm flag: --fallback-to-build=false'); + } + } + let opts; + try { + opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + } catch (err) { + return callback(err); + } + + opts.ca = gyp.opts.ca; + opts.cafile = gyp.opts.cafile; + + const from = opts.hosted_tarball; + const to = opts.module_path; + const binary_module = path.join(to, opts.module_name + '.node'); + existsAsync(binary_module, (found) => { + if (!update_binary) { + if (found) { + console.log('[' + package_json.name + '] Success: "' + binary_module + '" already installed'); + console.log('Pass --update-binary to reinstall or --build-from-source to recompile'); + return callback(); + } + log.info('check', 'checked for "' + binary_module + '" (not found)'); + } + + makeDir(to).then(() => { + const fileName = from.startsWith('file://') && from.slice('file://'.length); + if (fileName) { + extract_from_local(fileName, to, after_place); + } else { + place_binary(from, to, opts, after_place); + } + }).catch((err) => { + after_place(err); + }); + + function after_place(err) { + if (err && should_do_fallback_build) { + print_fallback_error(err, opts, package_json); + return do_build(gyp, argv, callback); + } else if (err) { + return callback(err); + } else { + console.log('[' + package_json.name + '] Success: "' + binary_module + '" is installed via remote'); + return callback(); + } + } + }); + } +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/main.js b/node_modules/@mapbox/node-pre-gyp/lib/main.js new file mode 100644 index 0000000..bae32ac --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/main.js @@ -0,0 +1,125 @@ +'use strict'; + +/** + * Set the title. + */ + +process.title = 'node-pre-gyp'; + +const node_pre_gyp = require('../'); +const log = require('npmlog'); + +/** + * Process and execute the selected commands. + */ + +const prog = new node_pre_gyp.Run({ argv: process.argv }); +let completed = false; + +if (prog.todo.length === 0) { + if (~process.argv.indexOf('-v') || ~process.argv.indexOf('--version')) { + console.log('v%s', prog.version); + process.exit(0); + } else if (~process.argv.indexOf('-h') || ~process.argv.indexOf('--help')) { + console.log('%s', prog.usage()); + process.exit(0); + } + console.log('%s', prog.usage()); + process.exit(1); +} + +// if --no-color is passed +if (prog.opts && Object.hasOwnProperty.call(prog, 'color') && !prog.opts.color) { + log.disableColor(); +} + +log.info('it worked if it ends with', 'ok'); +log.verbose('cli', process.argv); +log.info('using', process.title + '@%s', prog.version); +log.info('using', 'node@%s | %s | %s', process.versions.node, process.platform, process.arch); + + +/** + * Change dir if -C/--directory was passed. + */ + +const dir = prog.opts.directory; +if (dir) { + const fs = require('fs'); + try { + const stat = fs.statSync(dir); + if (stat.isDirectory()) { + log.info('chdir', dir); + process.chdir(dir); + } else { + log.warn('chdir', dir + ' is not a directory'); + } + } catch (e) { + if (e.code === 'ENOENT') { + log.warn('chdir', dir + ' is not a directory'); + } else { + log.warn('chdir', 'error during chdir() "%s"', e.message); + } + } +} + +function run() { + const command = prog.todo.shift(); + if (!command) { + // done! + completed = true; + log.info('ok'); + return; + } + + // set binary.host when appropriate. host determines the s3 target bucket. + const target = prog.setBinaryHostProperty(command.name); + if (target && ['install', 'publish', 'unpublish', 'info'].indexOf(command.name) >= 0) { + log.info('using binary.host: ' + prog.package_json.binary.host); + } + + prog.commands[command.name](command.args, function(err) { + if (err) { + log.error(command.name + ' error'); + log.error('stack', err.stack); + errorMessage(); + log.error('not ok'); + console.log(err.message); + return process.exit(1); + } + const args_array = [].slice.call(arguments, 1); + if (args_array.length) { + console.log.apply(console, args_array); + } + // now run the next command in the queue + process.nextTick(run); + }); +} + +process.on('exit', (code) => { + if (!completed && !code) { + log.error('Completion callback never invoked!'); + errorMessage(); + process.exit(6); + } +}); + +process.on('uncaughtException', (err) => { + log.error('UNCAUGHT EXCEPTION'); + log.error('stack', err.stack); + errorMessage(); + process.exit(7); +}); + +function errorMessage() { + // copied from npm's lib/util/error-handler.js + const os = require('os'); + log.error('System', os.type() + ' ' + os.release()); + log.error('command', process.argv.map(JSON.stringify).join(' ')); + log.error('cwd', process.cwd()); + log.error('node -v', process.version); + log.error(process.title + ' -v', 'v' + prog.package.version); +} + +// start running the given commands! +run(); diff --git a/node_modules/@mapbox/node-pre-gyp/lib/node-pre-gyp.js b/node_modules/@mapbox/node-pre-gyp/lib/node-pre-gyp.js new file mode 100644 index 0000000..dc18e74 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/node-pre-gyp.js @@ -0,0 +1,309 @@ +'use strict'; + +/** + * Module exports. + */ + +module.exports = exports; + +/** + * Module dependencies. + */ + +// load mocking control function for accessing s3 via https. the function is a noop always returning +// false if not mocking. +exports.mockS3Http = require('./util/s3_setup').get_mockS3Http(); +exports.mockS3Http('on'); +const mocking = exports.mockS3Http('get'); + + +const fs = require('fs'); +const path = require('path'); +const nopt = require('nopt'); +const log = require('npmlog'); +log.disableProgress(); +const napi = require('./util/napi.js'); + +const EE = require('events').EventEmitter; +const inherits = require('util').inherits; +const cli_commands = [ + 'clean', + 'install', + 'reinstall', + 'build', + 'rebuild', + 'package', + 'testpackage', + 'publish', + 'unpublish', + 'info', + 'testbinary', + 'reveal', + 'configure' +]; +const aliases = {}; + +// differentiate node-pre-gyp's logs from npm's +log.heading = 'node-pre-gyp'; + +if (mocking) { + log.warn(`mocking s3 to ${process.env.node_pre_gyp_mock_s3}`); +} + +// this is a getter to avoid circular reference warnings with node v14. +Object.defineProperty(exports, 'find', { + get: function() { + return require('./pre-binding').find; + }, + enumerable: true +}); + +// in the following, "my_module" is using node-pre-gyp to +// prebuild and install pre-built binaries. "main_module" +// is using "my_module". +// +// "bin/node-pre-gyp" invokes Run() without a path. the +// expectation is that the working directory is the package +// root "my_module". this is true because in all cases npm is +// executing a script in the context of "my_module". +// +// "pre-binding.find()" is executed by "my_module" but in the +// context of "main_module". this is because "main_module" is +// executing and requires "my_module" which is then executing +// "pre-binding.find()" via "node-pre-gyp.find()", so the working +// directory is that of "main_module". +// +// that's why "find()" must pass the path to package.json. +// +function Run({ package_json_path = './package.json', argv }) { + this.package_json_path = package_json_path; + this.commands = {}; + + const self = this; + cli_commands.forEach((command) => { + self.commands[command] = function(argvx, callback) { + log.verbose('command', command, argvx); + return require('./' + command)(self, argvx, callback); + }; + }); + + this.parseArgv(argv); + + // this is set to true after the binary.host property was set to + // either staging_host or production_host. + this.binaryHostSet = false; +} +inherits(Run, EE); +exports.Run = Run; +const proto = Run.prototype; + +/** + * Export the contents of the package.json. + */ + +proto.package = require('../package.json'); + +/** + * nopt configuration definitions + */ + +proto.configDefs = { + help: Boolean, // everywhere + arch: String, // 'configure' + debug: Boolean, // 'build' + directory: String, // bin + proxy: String, // 'install' + loglevel: String // everywhere +}; + +/** + * nopt shorthands + */ + +proto.shorthands = { + release: '--no-debug', + C: '--directory', + debug: '--debug', + j: '--jobs', + silent: '--loglevel=silent', + silly: '--loglevel=silly', + verbose: '--loglevel=verbose' +}; + +/** + * expose the command aliases for the bin file to use. + */ + +proto.aliases = aliases; + +/** + * Parses the given argv array and sets the 'opts', 'argv', + * 'command', and 'package_json' properties. + */ + +proto.parseArgv = function parseOpts(argv) { + this.opts = nopt(this.configDefs, this.shorthands, argv); + this.argv = this.opts.argv.remain.slice(); + const commands = this.todo = []; + + // create a copy of the argv array with aliases mapped + argv = this.argv.map((arg) => { + // is this an alias? + if (arg in this.aliases) { + arg = this.aliases[arg]; + } + return arg; + }); + + // process the mapped args into "command" objects ("name" and "args" props) + argv.slice().forEach((arg) => { + if (arg in this.commands) { + const args = argv.splice(0, argv.indexOf(arg)); + argv.shift(); + if (commands.length > 0) { + commands[commands.length - 1].args = args; + } + commands.push({ name: arg, args: [] }); + } + }); + if (commands.length > 0) { + commands[commands.length - 1].args = argv.splice(0); + } + + + // if a directory was specified package.json is assumed to be relative + // to it. + let package_json_path = this.package_json_path; + if (this.opts.directory) { + package_json_path = path.join(this.opts.directory, package_json_path); + } + + this.package_json = JSON.parse(fs.readFileSync(package_json_path)); + + // expand commands entries for multiple napi builds + this.todo = napi.expand_commands(this.package_json, this.opts, commands); + + // support for inheriting config env variables from npm + const npm_config_prefix = 'npm_config_'; + Object.keys(process.env).forEach((name) => { + if (name.indexOf(npm_config_prefix) !== 0) return; + const val = process.env[name]; + if (name === npm_config_prefix + 'loglevel') { + log.level = val; + } else { + // add the user-defined options to the config + name = name.substring(npm_config_prefix.length); + // avoid npm argv clobber already present args + // which avoids problem of 'npm test' calling + // script that runs unique npm install commands + if (name === 'argv') { + if (this.opts.argv && + this.opts.argv.remain && + this.opts.argv.remain.length) { + // do nothing + } else { + this.opts[name] = val; + } + } else { + this.opts[name] = val; + } + } + }); + + if (this.opts.loglevel) { + log.level = this.opts.loglevel; + } + log.resume(); +}; + +/** + * allow the binary.host property to be set at execution time. + * + * for this to take effect requires all the following to be true. + * - binary is a property in package.json + * - binary.host is falsey + * - binary.staging_host is not empty + * - binary.production_host is not empty + * + * if any of the previous checks fail then the function returns an empty string + * and makes no changes to package.json's binary property. + * + * + * if command is "publish" then the default is set to "binary.staging_host" + * if command is not "publish" the the default is set to "binary.production_host" + * + * if the command-line option '--s3_host' is set to "staging" or "production" then + * "binary.host" is set to the specified "staging_host" or "production_host". if + * '--s3_host' is any other value an exception is thrown. + * + * if '--s3_host' is not present then "binary.host" is set to the default as above. + * + * this strategy was chosen so that any command other than "publish" or "unpublish" uses "production" + * as the default without requiring any command-line options but that "publish" and "unpublish" require + * '--s3_host production_host' to be specified in order to *really* publish (or unpublish). publishing + * to staging can be done freely without worrying about disturbing any production releases. + */ +proto.setBinaryHostProperty = function(command) { + if (this.binaryHostSet) { + return this.package_json.binary.host; + } + const p = this.package_json; + // don't set anything if host is present. it must be left blank to trigger this. + if (!p || !p.binary || p.binary.host) { + return ''; + } + // and both staging and production must be present. errors will be reported later. + if (!p.binary.staging_host || !p.binary.production_host) { + return ''; + } + let target = 'production_host'; + if (command === 'publish' || command === 'unpublish') { + target = 'staging_host'; + } + // the environment variable has priority over the default or the command line. if + // either the env var or the command line option are invalid throw an error. + const npg_s3_host = process.env.node_pre_gyp_s3_host; + if (npg_s3_host === 'staging' || npg_s3_host === 'production') { + target = `${npg_s3_host}_host`; + } else if (this.opts['s3_host'] === 'staging' || this.opts['s3_host'] === 'production') { + target = `${this.opts['s3_host']}_host`; + } else if (this.opts['s3_host'] || npg_s3_host) { + throw new Error(`invalid s3_host ${this.opts['s3_host'] || npg_s3_host}`); + } + + p.binary.host = p.binary[target]; + this.binaryHostSet = true; + + return p.binary.host; +}; + +/** + * Returns the usage instructions for node-pre-gyp. + */ + +proto.usage = function usage() { + const str = [ + '', + ' Usage: node-pre-gyp [options]', + '', + ' where is one of:', + cli_commands.map((c) => { + return ' - ' + c + ' - ' + require('./' + c).usage; + }).join('\n'), + '', + 'node-pre-gyp@' + this.version + ' ' + path.resolve(__dirname, '..'), + 'node@' + process.versions.node + ].join('\n'); + return str; +}; + +/** + * Version number getter. + */ + +Object.defineProperty(proto, 'version', { + get: function() { + return this.package.version; + }, + enumerable: true +}); diff --git a/node_modules/@mapbox/node-pre-gyp/lib/package.js b/node_modules/@mapbox/node-pre-gyp/lib/package.js new file mode 100644 index 0000000..0734984 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/package.js @@ -0,0 +1,73 @@ +'use strict'; + +module.exports = exports = _package; + +exports.usage = 'Packs binary (and enclosing directory) into locally staged tarball'; + +const fs = require('fs'); +const path = require('path'); +const log = require('npmlog'); +const versioning = require('./util/versioning.js'); +const napi = require('./util/napi.js'); +const existsAsync = fs.exists || path.exists; +const makeDir = require('make-dir'); +const tar = require('tar'); + +function readdirSync(dir) { + let list = []; + const files = fs.readdirSync(dir); + + files.forEach((file) => { + const stats = fs.lstatSync(path.join(dir, file)); + if (stats.isDirectory()) { + list = list.concat(readdirSync(path.join(dir, file))); + } else { + list.push(path.join(dir, file)); + } + }); + return list; +} + +function _package(gyp, argv, callback) { + const package_json = gyp.package_json; + const napi_build_version = napi.get_napi_build_version_from_command_args(argv); + const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + const from = opts.module_path; + const binary_module = path.join(from, opts.module_name + '.node'); + existsAsync(binary_module, (found) => { + if (!found) { + return callback(new Error('Cannot package because ' + binary_module + ' missing: run `node-pre-gyp rebuild` first')); + } + const tarball = opts.staged_tarball; + const filter_func = function(entry) { + const basename = path.basename(entry); + if (basename.length && basename[0] !== '.') { + console.log('packing ' + entry); + return true; + } else { + console.log('skipping ' + entry); + } + return false; + }; + makeDir(path.dirname(tarball)).then(() => { + let files = readdirSync(from); + const base = path.basename(from); + files = files.map((file) => { + return path.join(base, path.relative(from, file)); + }); + tar.create({ + portable: false, + gzip: true, + filter: filter_func, + file: tarball, + cwd: path.dirname(from) + }, files, (err2) => { + if (err2) console.error('[' + package_json.name + '] ' + err2.message); + else log.info('package', 'Binary staged at "' + tarball + '"'); + return callback(err2); + }); + }).catch((err) => { + return callback(err); + }); + }); +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/pre-binding.js b/node_modules/@mapbox/node-pre-gyp/lib/pre-binding.js new file mode 100644 index 0000000..e110fe3 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/pre-binding.js @@ -0,0 +1,34 @@ +'use strict'; + +const npg = require('..'); +const versioning = require('../lib/util/versioning.js'); +const napi = require('../lib/util/napi.js'); +const existsSync = require('fs').existsSync || require('path').existsSync; +const path = require('path'); + +module.exports = exports; + +exports.usage = 'Finds the require path for the node-pre-gyp installed module'; + +exports.validate = function(package_json, opts) { + versioning.validate_config(package_json, opts); +}; + +exports.find = function(package_json_path, opts) { + if (!existsSync(package_json_path)) { + throw new Error(package_json_path + 'does not exist'); + } + const prog = new npg.Run({ package_json_path, argv: process.argv }); + prog.setBinaryHostProperty(); + const package_json = prog.package_json; + + versioning.validate_config(package_json, opts); + let napi_build_version; + if (napi.get_napi_build_versions(package_json, opts)) { + napi_build_version = napi.get_best_napi_build_version(package_json, opts); + } + opts = opts || {}; + if (!opts.module_root) opts.module_root = path.dirname(package_json_path); + const meta = versioning.evaluate(package_json, opts, napi_build_version); + return meta.module; +}; diff --git a/node_modules/@mapbox/node-pre-gyp/lib/publish.js b/node_modules/@mapbox/node-pre-gyp/lib/publish.js new file mode 100644 index 0000000..8367b15 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/publish.js @@ -0,0 +1,81 @@ +'use strict'; + +module.exports = exports = publish; + +exports.usage = 'Publishes pre-built binary (requires aws-sdk)'; + +const fs = require('fs'); +const path = require('path'); +const log = require('npmlog'); +const versioning = require('./util/versioning.js'); +const napi = require('./util/napi.js'); +const s3_setup = require('./util/s3_setup.js'); +const existsAsync = fs.exists || path.exists; +const url = require('url'); + +function publish(gyp, argv, callback) { + const package_json = gyp.package_json; + const napi_build_version = napi.get_napi_build_version_from_command_args(argv); + const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + const tarball = opts.staged_tarball; + existsAsync(tarball, (found) => { + if (!found) { + return callback(new Error('Cannot publish because ' + tarball + ' missing: run `node-pre-gyp package` first')); + } + + log.info('publish', 'Detecting s3 credentials'); + const config = {}; + s3_setup.detect(opts, config); + const s3 = s3_setup.get_s3(config); + + const key_name = url.resolve(config.prefix, opts.package_name); + const s3_opts = { + Bucket: config.bucket, + Key: key_name + }; + log.info('publish', 'Authenticating with s3'); + log.info('publish', config); + + log.info('publish', 'Checking for existing binary at ' + opts.hosted_path); + s3.headObject(s3_opts, (err, meta) => { + if (meta) log.info('publish', JSON.stringify(meta)); + if (err && err.code === 'NotFound') { + // we are safe to publish because + // the object does not already exist + log.info('publish', 'Preparing to put object'); + const s3_put_opts = { + ACL: 'public-read', + Body: fs.createReadStream(tarball), + Key: key_name, + Bucket: config.bucket + }; + log.info('publish', 'Putting object', s3_put_opts.ACL, s3_put_opts.Bucket, s3_put_opts.Key); + try { + s3.putObject(s3_put_opts, (err2, resp) => { + log.info('publish', 'returned from putting object'); + if (err2) { + log.info('publish', 's3 putObject error: "' + err2 + '"'); + return callback(err2); + } + if (resp) log.info('publish', 's3 putObject response: "' + JSON.stringify(resp) + '"'); + log.info('publish', 'successfully put object'); + console.log('[' + package_json.name + '] published to ' + opts.hosted_path); + return callback(); + }); + } catch (err3) { + log.info('publish', 's3 putObject error: "' + err3 + '"'); + return callback(err3); + } + } else if (err) { + log.info('publish', 's3 headObject error: "' + err + '"'); + return callback(err); + } else { + log.error('publish', 'Cannot publish over existing version'); + log.error('publish', "Update the 'version' field in package.json and try again"); + log.error('publish', 'If the previous version was published in error see:'); + log.error('publish', '\t node-pre-gyp unpublish'); + return callback(new Error('Failed publishing to ' + opts.hosted_path)); + } + }); + }); +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/rebuild.js b/node_modules/@mapbox/node-pre-gyp/lib/rebuild.js new file mode 100644 index 0000000..31510fb --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/rebuild.js @@ -0,0 +1,20 @@ +'use strict'; + +module.exports = exports = rebuild; + +exports.usage = 'Runs "clean" and "build" at once'; + +const napi = require('./util/napi.js'); + +function rebuild(gyp, argv, callback) { + const package_json = gyp.package_json; + let commands = [ + { name: 'clean', args: [] }, + { name: 'build', args: ['rebuild'] } + ]; + commands = napi.expand_commands(package_json, gyp.opts, commands); + for (let i = commands.length; i !== 0; i--) { + gyp.todo.unshift(commands[i - 1]); + } + process.nextTick(callback); +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/reinstall.js b/node_modules/@mapbox/node-pre-gyp/lib/reinstall.js new file mode 100644 index 0000000..a29b5c9 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/reinstall.js @@ -0,0 +1,19 @@ +'use strict'; + +module.exports = exports = rebuild; + +exports.usage = 'Runs "clean" and "install" at once'; + +const napi = require('./util/napi.js'); + +function rebuild(gyp, argv, callback) { + const package_json = gyp.package_json; + let installArgs = []; + const napi_build_version = napi.get_best_napi_build_version(package_json, gyp.opts); + if (napi_build_version != null) installArgs = [napi.get_command_arg(napi_build_version)]; + gyp.todo.unshift( + { name: 'clean', args: [] }, + { name: 'install', args: installArgs } + ); + process.nextTick(callback); +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/reveal.js b/node_modules/@mapbox/node-pre-gyp/lib/reveal.js new file mode 100644 index 0000000..7255e5f --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/reveal.js @@ -0,0 +1,32 @@ +'use strict'; + +module.exports = exports = reveal; + +exports.usage = 'Reveals data on the versioned binary'; + +const versioning = require('./util/versioning.js'); +const napi = require('./util/napi.js'); + +function unix_paths(key, val) { + return val && val.replace ? val.replace(/\\/g, '/') : val; +} + +function reveal(gyp, argv, callback) { + const package_json = gyp.package_json; + const napi_build_version = napi.get_napi_build_version_from_command_args(argv); + const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + let hit = false; + // if a second arg is passed look to see + // if it is a known option + // console.log(JSON.stringify(gyp.opts,null,1)) + const remain = gyp.opts.argv.remain[gyp.opts.argv.remain.length - 1]; + if (remain && Object.hasOwnProperty.call(opts, remain)) { + console.log(opts[remain].replace(/\\/g, '/')); + hit = true; + } + // otherwise return all options as json + if (!hit) { + console.log(JSON.stringify(opts, unix_paths, 2)); + } + return callback(); +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/testbinary.js b/node_modules/@mapbox/node-pre-gyp/lib/testbinary.js new file mode 100644 index 0000000..429cb13 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/testbinary.js @@ -0,0 +1,79 @@ +'use strict'; + +module.exports = exports = testbinary; + +exports.usage = 'Tests that the binary.node can be required'; + +const path = require('path'); +const log = require('npmlog'); +const cp = require('child_process'); +const versioning = require('./util/versioning.js'); +const napi = require('./util/napi.js'); + +function testbinary(gyp, argv, callback) { + const args = []; + const options = {}; + let shell_cmd = process.execPath; + const package_json = gyp.package_json; + const napi_build_version = napi.get_napi_build_version_from_command_args(argv); + const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + // skip validation for runtimes we don't explicitly support (like electron) + if (opts.runtime && + opts.runtime !== 'node-webkit' && + opts.runtime !== 'node') { + return callback(); + } + const nw = (opts.runtime && opts.runtime === 'node-webkit'); + // ensure on windows that / are used for require path + const binary_module = opts.module.replace(/\\/g, '/'); + if ((process.arch !== opts.target_arch) || + (process.platform !== opts.target_platform)) { + let msg = 'skipping validation since host platform/arch ('; + msg += process.platform + '/' + process.arch + ')'; + msg += ' does not match target ('; + msg += opts.target_platform + '/' + opts.target_arch + ')'; + log.info('validate', msg); + return callback(); + } + if (nw) { + options.timeout = 5000; + if (process.platform === 'darwin') { + shell_cmd = 'node-webkit'; + } else if (process.platform === 'win32') { + shell_cmd = 'nw.exe'; + } else { + shell_cmd = 'nw'; + } + const modulePath = path.resolve(binary_module); + const appDir = path.join(__dirname, 'util', 'nw-pre-gyp'); + args.push(appDir); + args.push(modulePath); + log.info('validate', "Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'"); + cp.execFile(shell_cmd, args, options, (err, stdout, stderr) => { + // check for normal timeout for node-webkit + if (err) { + if (err.killed === true && err.signal && err.signal.indexOf('SIG') > -1) { + return callback(); + } + const stderrLog = stderr.toString(); + log.info('stderr', stderrLog); + if (/^\s*Xlib:\s*extension\s*"RANDR"\s*missing\s*on\s*display\s*":\d+\.\d+"\.\s*$/.test(stderrLog)) { + log.info('RANDR', 'stderr contains only RANDR error, ignored'); + return callback(); + } + return callback(err); + } + return callback(); + }); + return; + } + args.push('--eval'); + args.push("require('" + binary_module.replace(/'/g, '\'') + "')"); + log.info('validate', "Running test command: '" + shell_cmd + ' ' + args.join(' ') + "'"); + cp.execFile(shell_cmd, args, options, (err, stdout, stderr) => { + if (err) { + return callback(err, { stdout: stdout, stderr: stderr }); + } + return callback(); + }); +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/testpackage.js b/node_modules/@mapbox/node-pre-gyp/lib/testpackage.js new file mode 100644 index 0000000..fab1911 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/testpackage.js @@ -0,0 +1,53 @@ +'use strict'; + +module.exports = exports = testpackage; + +exports.usage = 'Tests that the staged package is valid'; + +const fs = require('fs'); +const path = require('path'); +const log = require('npmlog'); +const existsAsync = fs.exists || path.exists; +const versioning = require('./util/versioning.js'); +const napi = require('./util/napi.js'); +const testbinary = require('./testbinary.js'); +const tar = require('tar'); +const makeDir = require('make-dir'); + +function testpackage(gyp, argv, callback) { + const package_json = gyp.package_json; + const napi_build_version = napi.get_napi_build_version_from_command_args(argv); + const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + const tarball = opts.staged_tarball; + existsAsync(tarball, (found) => { + if (!found) { + return callback(new Error('Cannot test package because ' + tarball + ' missing: run `node-pre-gyp package` first')); + } + const to = opts.module_path; + function filter_func(entry) { + log.info('install', 'unpacking [' + entry.path + ']'); + } + + makeDir(to).then(() => { + tar.extract({ + file: tarball, + cwd: to, + strip: 1, + onentry: filter_func + }).then(after_extract, callback); + }).catch((err) => { + return callback(err); + }); + + function after_extract() { + testbinary(gyp, argv, (err) => { + if (err) { + return callback(err); + } else { + console.log('[' + package_json.name + '] Package appears valid'); + return callback(); + } + }); + } + }); +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/unpublish.js b/node_modules/@mapbox/node-pre-gyp/lib/unpublish.js new file mode 100644 index 0000000..12c9f56 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/unpublish.js @@ -0,0 +1,41 @@ +'use strict'; + +module.exports = exports = unpublish; + +exports.usage = 'Unpublishes pre-built binary (requires aws-sdk)'; + +const log = require('npmlog'); +const versioning = require('./util/versioning.js'); +const napi = require('./util/napi.js'); +const s3_setup = require('./util/s3_setup.js'); +const url = require('url'); + +function unpublish(gyp, argv, callback) { + const package_json = gyp.package_json; + const napi_build_version = napi.get_napi_build_version_from_command_args(argv); + const opts = versioning.evaluate(package_json, gyp.opts, napi_build_version); + const config = {}; + s3_setup.detect(opts, config); + const s3 = s3_setup.get_s3(config); + const key_name = url.resolve(config.prefix, opts.package_name); + const s3_opts = { + Bucket: config.bucket, + Key: key_name + }; + s3.headObject(s3_opts, (err, meta) => { + if (err && err.code === 'NotFound') { + console.log('[' + package_json.name + '] Not found: https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key); + return callback(); + } else if (err) { + return callback(err); + } else { + log.info('unpublish', JSON.stringify(meta)); + s3.deleteObject(s3_opts, (err2, resp) => { + if (err2) return callback(err2); + log.info(JSON.stringify(resp)); + console.log('[' + package_json.name + '] Success: removed https://' + s3_opts.Bucket + '.s3.amazonaws.com/' + s3_opts.Key); + return callback(); + }); + } + }); +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/util/abi_crosswalk.json b/node_modules/@mapbox/node-pre-gyp/lib/util/abi_crosswalk.json new file mode 100644 index 0000000..7f52972 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/util/abi_crosswalk.json @@ -0,0 +1,2602 @@ +{ + "0.1.14": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.15": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.16": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.17": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.18": { + "node_abi": null, + "v8": "1.3" + }, + "0.1.19": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.20": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.21": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.22": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.23": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.24": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.25": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.26": { + "node_abi": null, + "v8": "2.0" + }, + "0.1.27": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.28": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.29": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.30": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.31": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.32": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.33": { + "node_abi": null, + "v8": "2.1" + }, + "0.1.90": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.91": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.92": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.93": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.94": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.95": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.96": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.97": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.98": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.99": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.100": { + "node_abi": null, + "v8": "2.2" + }, + "0.1.101": { + "node_abi": null, + "v8": "2.3" + }, + "0.1.102": { + "node_abi": null, + "v8": "2.3" + }, + "0.1.103": { + "node_abi": null, + "v8": "2.3" + }, + "0.1.104": { + "node_abi": null, + "v8": "2.3" + }, + "0.2.0": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.1": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.2": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.3": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.4": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.5": { + "node_abi": 1, + "v8": "2.3" + }, + "0.2.6": { + "node_abi": 1, + "v8": "2.3" + }, + "0.3.0": { + "node_abi": 1, + "v8": "2.5" + }, + "0.3.1": { + "node_abi": 1, + "v8": "2.5" + }, + "0.3.2": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.3": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.4": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.5": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.6": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.7": { + "node_abi": 1, + "v8": "3.0" + }, + "0.3.8": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.0": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.1": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.2": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.3": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.4": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.5": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.6": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.7": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.8": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.9": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.10": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.11": { + "node_abi": 1, + "v8": "3.1" + }, + "0.4.12": { + "node_abi": 1, + "v8": "3.1" + }, + "0.5.0": { + "node_abi": 1, + "v8": "3.1" + }, + "0.5.1": { + "node_abi": 1, + "v8": "3.4" + }, + "0.5.2": { + "node_abi": 1, + "v8": "3.4" + }, + "0.5.3": { + "node_abi": 1, + "v8": "3.4" + }, + "0.5.4": { + "node_abi": 1, + "v8": "3.5" + }, + "0.5.5": { + "node_abi": 1, + "v8": "3.5" + }, + "0.5.6": { + "node_abi": 1, + "v8": "3.6" + }, + "0.5.7": { + "node_abi": 1, + "v8": "3.6" + }, + "0.5.8": { + "node_abi": 1, + "v8": "3.6" + }, + "0.5.9": { + "node_abi": 1, + "v8": "3.6" + }, + "0.5.10": { + "node_abi": 1, + "v8": "3.7" + }, + "0.6.0": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.1": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.2": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.3": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.4": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.5": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.6": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.7": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.8": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.9": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.10": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.11": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.12": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.13": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.14": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.15": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.16": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.17": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.18": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.19": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.20": { + "node_abi": 1, + "v8": "3.6" + }, + "0.6.21": { + "node_abi": 1, + "v8": "3.6" + }, + "0.7.0": { + "node_abi": 1, + "v8": "3.8" + }, + "0.7.1": { + "node_abi": 1, + "v8": "3.8" + }, + "0.7.2": { + "node_abi": 1, + "v8": "3.8" + }, + "0.7.3": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.4": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.5": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.6": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.7": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.8": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.9": { + "node_abi": 1, + "v8": "3.11" + }, + "0.7.10": { + "node_abi": 1, + "v8": "3.9" + }, + "0.7.11": { + "node_abi": 1, + "v8": "3.11" + }, + "0.7.12": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.0": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.1": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.2": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.3": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.4": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.5": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.6": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.7": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.8": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.9": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.10": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.11": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.12": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.13": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.14": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.15": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.16": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.17": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.18": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.19": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.20": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.21": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.22": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.23": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.24": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.25": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.26": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.27": { + "node_abi": 1, + "v8": "3.11" + }, + "0.8.28": { + "node_abi": 1, + "v8": "3.11" + }, + "0.9.0": { + "node_abi": 1, + "v8": "3.11" + }, + "0.9.1": { + "node_abi": 10, + "v8": "3.11" + }, + "0.9.2": { + "node_abi": 10, + "v8": "3.11" + }, + "0.9.3": { + "node_abi": 10, + "v8": "3.13" + }, + "0.9.4": { + "node_abi": 10, + "v8": "3.13" + }, + "0.9.5": { + "node_abi": 10, + "v8": "3.13" + }, + "0.9.6": { + "node_abi": 10, + "v8": "3.15" + }, + "0.9.7": { + "node_abi": 10, + "v8": "3.15" + }, + "0.9.8": { + "node_abi": 10, + "v8": "3.15" + }, + "0.9.9": { + "node_abi": 11, + "v8": "3.15" + }, + "0.9.10": { + "node_abi": 11, + "v8": "3.15" + }, + "0.9.11": { + "node_abi": 11, + "v8": "3.14" + }, + "0.9.12": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.0": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.1": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.2": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.3": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.4": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.5": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.6": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.7": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.8": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.9": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.10": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.11": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.12": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.13": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.14": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.15": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.16": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.17": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.18": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.19": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.20": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.21": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.22": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.23": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.24": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.25": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.26": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.27": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.28": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.29": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.30": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.31": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.32": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.33": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.34": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.35": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.36": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.37": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.38": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.39": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.40": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.41": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.42": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.43": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.44": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.45": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.46": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.47": { + "node_abi": 11, + "v8": "3.14" + }, + "0.10.48": { + "node_abi": 11, + "v8": "3.14" + }, + "0.11.0": { + "node_abi": 12, + "v8": "3.17" + }, + "0.11.1": { + "node_abi": 12, + "v8": "3.18" + }, + "0.11.2": { + "node_abi": 12, + "v8": "3.19" + }, + "0.11.3": { + "node_abi": 12, + "v8": "3.19" + }, + "0.11.4": { + "node_abi": 12, + "v8": "3.20" + }, + "0.11.5": { + "node_abi": 12, + "v8": "3.20" + }, + "0.11.6": { + "node_abi": 12, + "v8": "3.20" + }, + "0.11.7": { + "node_abi": 12, + "v8": "3.20" + }, + "0.11.8": { + "node_abi": 13, + "v8": "3.21" + }, + "0.11.9": { + "node_abi": 13, + "v8": "3.22" + }, + "0.11.10": { + "node_abi": 13, + "v8": "3.22" + }, + "0.11.11": { + "node_abi": 14, + "v8": "3.22" + }, + "0.11.12": { + "node_abi": 14, + "v8": "3.22" + }, + "0.11.13": { + "node_abi": 14, + "v8": "3.25" + }, + "0.11.14": { + "node_abi": 14, + "v8": "3.26" + }, + "0.11.15": { + "node_abi": 14, + "v8": "3.28" + }, + "0.11.16": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.0": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.1": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.2": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.3": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.4": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.5": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.6": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.7": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.8": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.9": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.10": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.11": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.12": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.13": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.14": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.15": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.16": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.17": { + "node_abi": 14, + "v8": "3.28" + }, + "0.12.18": { + "node_abi": 14, + "v8": "3.28" + }, + "1.0.0": { + "node_abi": 42, + "v8": "3.31" + }, + "1.0.1": { + "node_abi": 42, + "v8": "3.31" + }, + "1.0.2": { + "node_abi": 42, + "v8": "3.31" + }, + "1.0.3": { + "node_abi": 42, + "v8": "4.1" + }, + "1.0.4": { + "node_abi": 42, + "v8": "4.1" + }, + "1.1.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.2.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.3.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.4.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.4.2": { + "node_abi": 43, + "v8": "4.1" + }, + "1.4.3": { + "node_abi": 43, + "v8": "4.1" + }, + "1.5.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.5.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.0": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.2": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.3": { + "node_abi": 43, + "v8": "4.1" + }, + "1.6.4": { + "node_abi": 43, + "v8": "4.1" + }, + "1.7.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.8.1": { + "node_abi": 43, + "v8": "4.1" + }, + "1.8.2": { + "node_abi": 43, + "v8": "4.1" + }, + "1.8.3": { + "node_abi": 43, + "v8": "4.1" + }, + "1.8.4": { + "node_abi": 43, + "v8": "4.1" + }, + "2.0.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.0.1": { + "node_abi": 44, + "v8": "4.2" + }, + "2.0.2": { + "node_abi": 44, + "v8": "4.2" + }, + "2.1.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.2.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.2.1": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.1": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.2": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.3": { + "node_abi": 44, + "v8": "4.2" + }, + "2.3.4": { + "node_abi": 44, + "v8": "4.2" + }, + "2.4.0": { + "node_abi": 44, + "v8": "4.2" + }, + "2.5.0": { + "node_abi": 44, + "v8": "4.2" + }, + "3.0.0": { + "node_abi": 45, + "v8": "4.4" + }, + "3.1.0": { + "node_abi": 45, + "v8": "4.4" + }, + "3.2.0": { + "node_abi": 45, + "v8": "4.4" + }, + "3.3.0": { + "node_abi": 45, + "v8": "4.4" + }, + "3.3.1": { + "node_abi": 45, + "v8": "4.4" + }, + "4.0.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.1.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.1.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.1.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.3": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.4": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.5": { + "node_abi": 46, + "v8": "4.5" + }, + "4.2.6": { + "node_abi": 46, + "v8": "4.5" + }, + "4.3.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.3.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.3.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.3": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.4": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.5": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.6": { + "node_abi": 46, + "v8": "4.5" + }, + "4.4.7": { + "node_abi": 46, + "v8": "4.5" + }, + "4.5.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.6.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.6.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.6.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.7.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.7.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.7.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.7.3": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.1": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.2": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.3": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.4": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.5": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.6": { + "node_abi": 46, + "v8": "4.5" + }, + "4.8.7": { + "node_abi": 46, + "v8": "4.5" + }, + "4.9.0": { + "node_abi": 46, + "v8": "4.5" + }, + "4.9.1": { + "node_abi": 46, + "v8": "4.5" + }, + "5.0.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.1.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.1.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.2.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.3.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.4.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.4.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.5.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.6.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.7.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.7.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.8.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.9.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.9.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.10.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.10.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.11.0": { + "node_abi": 47, + "v8": "4.6" + }, + "5.11.1": { + "node_abi": 47, + "v8": "4.6" + }, + "5.12.0": { + "node_abi": 47, + "v8": "4.6" + }, + "6.0.0": { + "node_abi": 48, + "v8": "5.0" + }, + "6.1.0": { + "node_abi": 48, + "v8": "5.0" + }, + "6.2.0": { + "node_abi": 48, + "v8": "5.0" + }, + "6.2.1": { + "node_abi": 48, + "v8": "5.0" + }, + "6.2.2": { + "node_abi": 48, + "v8": "5.0" + }, + "6.3.0": { + "node_abi": 48, + "v8": "5.0" + }, + "6.3.1": { + "node_abi": 48, + "v8": "5.0" + }, + "6.4.0": { + "node_abi": 48, + "v8": "5.0" + }, + "6.5.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.6.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.7.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.8.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.8.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.9.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.9.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.9.2": { + "node_abi": 48, + "v8": "5.1" + }, + "6.9.3": { + "node_abi": 48, + "v8": "5.1" + }, + "6.9.4": { + "node_abi": 48, + "v8": "5.1" + }, + "6.9.5": { + "node_abi": 48, + "v8": "5.1" + }, + "6.10.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.10.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.10.2": { + "node_abi": 48, + "v8": "5.1" + }, + "6.10.3": { + "node_abi": 48, + "v8": "5.1" + }, + "6.11.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.11.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.11.2": { + "node_abi": 48, + "v8": "5.1" + }, + "6.11.3": { + "node_abi": 48, + "v8": "5.1" + }, + "6.11.4": { + "node_abi": 48, + "v8": "5.1" + }, + "6.11.5": { + "node_abi": 48, + "v8": "5.1" + }, + "6.12.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.12.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.12.2": { + "node_abi": 48, + "v8": "5.1" + }, + "6.12.3": { + "node_abi": 48, + "v8": "5.1" + }, + "6.13.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.13.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.14.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.14.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.14.2": { + "node_abi": 48, + "v8": "5.1" + }, + "6.14.3": { + "node_abi": 48, + "v8": "5.1" + }, + "6.14.4": { + "node_abi": 48, + "v8": "5.1" + }, + "6.15.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.15.1": { + "node_abi": 48, + "v8": "5.1" + }, + "6.16.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.17.0": { + "node_abi": 48, + "v8": "5.1" + }, + "6.17.1": { + "node_abi": 48, + "v8": "5.1" + }, + "7.0.0": { + "node_abi": 51, + "v8": "5.4" + }, + "7.1.0": { + "node_abi": 51, + "v8": "5.4" + }, + "7.2.0": { + "node_abi": 51, + "v8": "5.4" + }, + "7.2.1": { + "node_abi": 51, + "v8": "5.4" + }, + "7.3.0": { + "node_abi": 51, + "v8": "5.4" + }, + "7.4.0": { + "node_abi": 51, + "v8": "5.4" + }, + "7.5.0": { + "node_abi": 51, + "v8": "5.4" + }, + "7.6.0": { + "node_abi": 51, + "v8": "5.5" + }, + "7.7.0": { + "node_abi": 51, + "v8": "5.5" + }, + "7.7.1": { + "node_abi": 51, + "v8": "5.5" + }, + "7.7.2": { + "node_abi": 51, + "v8": "5.5" + }, + "7.7.3": { + "node_abi": 51, + "v8": "5.5" + }, + "7.7.4": { + "node_abi": 51, + "v8": "5.5" + }, + "7.8.0": { + "node_abi": 51, + "v8": "5.5" + }, + "7.9.0": { + "node_abi": 51, + "v8": "5.5" + }, + "7.10.0": { + "node_abi": 51, + "v8": "5.5" + }, + "7.10.1": { + "node_abi": 51, + "v8": "5.5" + }, + "8.0.0": { + "node_abi": 57, + "v8": "5.8" + }, + "8.1.0": { + "node_abi": 57, + "v8": "5.8" + }, + "8.1.1": { + "node_abi": 57, + "v8": "5.8" + }, + "8.1.2": { + "node_abi": 57, + "v8": "5.8" + }, + "8.1.3": { + "node_abi": 57, + "v8": "5.8" + }, + "8.1.4": { + "node_abi": 57, + "v8": "5.8" + }, + "8.2.0": { + "node_abi": 57, + "v8": "5.8" + }, + "8.2.1": { + "node_abi": 57, + "v8": "5.8" + }, + "8.3.0": { + "node_abi": 57, + "v8": "6.0" + }, + "8.4.0": { + "node_abi": 57, + "v8": "6.0" + }, + "8.5.0": { + "node_abi": 57, + "v8": "6.0" + }, + "8.6.0": { + "node_abi": 57, + "v8": "6.0" + }, + "8.7.0": { + "node_abi": 57, + "v8": "6.1" + }, + "8.8.0": { + "node_abi": 57, + "v8": "6.1" + }, + "8.8.1": { + "node_abi": 57, + "v8": "6.1" + }, + "8.9.0": { + "node_abi": 57, + "v8": "6.1" + }, + "8.9.1": { + "node_abi": 57, + "v8": "6.1" + }, + "8.9.2": { + "node_abi": 57, + "v8": "6.1" + }, + "8.9.3": { + "node_abi": 57, + "v8": "6.1" + }, + "8.9.4": { + "node_abi": 57, + "v8": "6.1" + }, + "8.10.0": { + "node_abi": 57, + "v8": "6.2" + }, + "8.11.0": { + "node_abi": 57, + "v8": "6.2" + }, + "8.11.1": { + "node_abi": 57, + "v8": "6.2" + }, + "8.11.2": { + "node_abi": 57, + "v8": "6.2" + }, + "8.11.3": { + "node_abi": 57, + "v8": "6.2" + }, + "8.11.4": { + "node_abi": 57, + "v8": "6.2" + }, + "8.12.0": { + "node_abi": 57, + "v8": "6.2" + }, + "8.13.0": { + "node_abi": 57, + "v8": "6.2" + }, + "8.14.0": { + "node_abi": 57, + "v8": "6.2" + }, + "8.14.1": { + "node_abi": 57, + "v8": "6.2" + }, + "8.15.0": { + "node_abi": 57, + "v8": "6.2" + }, + "8.15.1": { + "node_abi": 57, + "v8": "6.2" + }, + "8.16.0": { + "node_abi": 57, + "v8": "6.2" + }, + "8.16.1": { + "node_abi": 57, + "v8": "6.2" + }, + "8.16.2": { + "node_abi": 57, + "v8": "6.2" + }, + "8.17.0": { + "node_abi": 57, + "v8": "6.2" + }, + "9.0.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.1.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.2.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.2.1": { + "node_abi": 59, + "v8": "6.2" + }, + "9.3.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.4.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.5.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.6.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.6.1": { + "node_abi": 59, + "v8": "6.2" + }, + "9.7.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.7.1": { + "node_abi": 59, + "v8": "6.2" + }, + "9.8.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.9.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.10.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.10.1": { + "node_abi": 59, + "v8": "6.2" + }, + "9.11.0": { + "node_abi": 59, + "v8": "6.2" + }, + "9.11.1": { + "node_abi": 59, + "v8": "6.2" + }, + "9.11.2": { + "node_abi": 59, + "v8": "6.2" + }, + "10.0.0": { + "node_abi": 64, + "v8": "6.6" + }, + "10.1.0": { + "node_abi": 64, + "v8": "6.6" + }, + "10.2.0": { + "node_abi": 64, + "v8": "6.6" + }, + "10.2.1": { + "node_abi": 64, + "v8": "6.6" + }, + "10.3.0": { + "node_abi": 64, + "v8": "6.6" + }, + "10.4.0": { + "node_abi": 64, + "v8": "6.7" + }, + "10.4.1": { + "node_abi": 64, + "v8": "6.7" + }, + "10.5.0": { + "node_abi": 64, + "v8": "6.7" + }, + "10.6.0": { + "node_abi": 64, + "v8": "6.7" + }, + "10.7.0": { + "node_abi": 64, + "v8": "6.7" + }, + "10.8.0": { + "node_abi": 64, + "v8": "6.7" + }, + "10.9.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.10.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.11.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.12.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.13.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.14.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.14.1": { + "node_abi": 64, + "v8": "6.8" + }, + "10.14.2": { + "node_abi": 64, + "v8": "6.8" + }, + "10.15.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.15.1": { + "node_abi": 64, + "v8": "6.8" + }, + "10.15.2": { + "node_abi": 64, + "v8": "6.8" + }, + "10.15.3": { + "node_abi": 64, + "v8": "6.8" + }, + "10.16.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.16.1": { + "node_abi": 64, + "v8": "6.8" + }, + "10.16.2": { + "node_abi": 64, + "v8": "6.8" + }, + "10.16.3": { + "node_abi": 64, + "v8": "6.8" + }, + "10.17.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.18.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.18.1": { + "node_abi": 64, + "v8": "6.8" + }, + "10.19.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.20.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.20.1": { + "node_abi": 64, + "v8": "6.8" + }, + "10.21.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.22.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.22.1": { + "node_abi": 64, + "v8": "6.8" + }, + "10.23.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.23.1": { + "node_abi": 64, + "v8": "6.8" + }, + "10.23.2": { + "node_abi": 64, + "v8": "6.8" + }, + "10.23.3": { + "node_abi": 64, + "v8": "6.8" + }, + "10.24.0": { + "node_abi": 64, + "v8": "6.8" + }, + "10.24.1": { + "node_abi": 64, + "v8": "6.8" + }, + "11.0.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.1.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.2.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.3.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.4.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.5.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.6.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.7.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.8.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.9.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.10.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.10.1": { + "node_abi": 67, + "v8": "7.0" + }, + "11.11.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.12.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.13.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.14.0": { + "node_abi": 67, + "v8": "7.0" + }, + "11.15.0": { + "node_abi": 67, + "v8": "7.0" + }, + "12.0.0": { + "node_abi": 72, + "v8": "7.4" + }, + "12.1.0": { + "node_abi": 72, + "v8": "7.4" + }, + "12.2.0": { + "node_abi": 72, + "v8": "7.4" + }, + "12.3.0": { + "node_abi": 72, + "v8": "7.4" + }, + "12.3.1": { + "node_abi": 72, + "v8": "7.4" + }, + "12.4.0": { + "node_abi": 72, + "v8": "7.4" + }, + "12.5.0": { + "node_abi": 72, + "v8": "7.5" + }, + "12.6.0": { + "node_abi": 72, + "v8": "7.5" + }, + "12.7.0": { + "node_abi": 72, + "v8": "7.5" + }, + "12.8.0": { + "node_abi": 72, + "v8": "7.5" + }, + "12.8.1": { + "node_abi": 72, + "v8": "7.5" + }, + "12.9.0": { + "node_abi": 72, + "v8": "7.6" + }, + "12.9.1": { + "node_abi": 72, + "v8": "7.6" + }, + "12.10.0": { + "node_abi": 72, + "v8": "7.6" + }, + "12.11.0": { + "node_abi": 72, + "v8": "7.7" + }, + "12.11.1": { + "node_abi": 72, + "v8": "7.7" + }, + "12.12.0": { + "node_abi": 72, + "v8": "7.7" + }, + "12.13.0": { + "node_abi": 72, + "v8": "7.7" + }, + "12.13.1": { + "node_abi": 72, + "v8": "7.7" + }, + "12.14.0": { + "node_abi": 72, + "v8": "7.7" + }, + "12.14.1": { + "node_abi": 72, + "v8": "7.7" + }, + "12.15.0": { + "node_abi": 72, + "v8": "7.7" + }, + "12.16.0": { + "node_abi": 72, + "v8": "7.8" + }, + "12.16.1": { + "node_abi": 72, + "v8": "7.8" + }, + "12.16.2": { + "node_abi": 72, + "v8": "7.8" + }, + "12.16.3": { + "node_abi": 72, + "v8": "7.8" + }, + "12.17.0": { + "node_abi": 72, + "v8": "7.8" + }, + "12.18.0": { + "node_abi": 72, + "v8": "7.8" + }, + "12.18.1": { + "node_abi": 72, + "v8": "7.8" + }, + "12.18.2": { + "node_abi": 72, + "v8": "7.8" + }, + "12.18.3": { + "node_abi": 72, + "v8": "7.8" + }, + "12.18.4": { + "node_abi": 72, + "v8": "7.8" + }, + "12.19.0": { + "node_abi": 72, + "v8": "7.8" + }, + "12.19.1": { + "node_abi": 72, + "v8": "7.8" + }, + "12.20.0": { + "node_abi": 72, + "v8": "7.8" + }, + "12.20.1": { + "node_abi": 72, + "v8": "7.8" + }, + "12.20.2": { + "node_abi": 72, + "v8": "7.8" + }, + "12.21.0": { + "node_abi": 72, + "v8": "7.8" + }, + "12.22.0": { + "node_abi": 72, + "v8": "7.8" + }, + "12.22.1": { + "node_abi": 72, + "v8": "7.8" + }, + "12.22.2": { + "node_abi": 72, + "v8": "7.8" + }, + "12.22.3": { + "node_abi": 72, + "v8": "7.8" + }, + "12.22.4": { + "node_abi": 72, + "v8": "7.8" + }, + "12.22.5": { + "node_abi": 72, + "v8": "7.8" + }, + "12.22.6": { + "node_abi": 72, + "v8": "7.8" + }, + "12.22.7": { + "node_abi": 72, + "v8": "7.8" + }, + "13.0.0": { + "node_abi": 79, + "v8": "7.8" + }, + "13.0.1": { + "node_abi": 79, + "v8": "7.8" + }, + "13.1.0": { + "node_abi": 79, + "v8": "7.8" + }, + "13.2.0": { + "node_abi": 79, + "v8": "7.9" + }, + "13.3.0": { + "node_abi": 79, + "v8": "7.9" + }, + "13.4.0": { + "node_abi": 79, + "v8": "7.9" + }, + "13.5.0": { + "node_abi": 79, + "v8": "7.9" + }, + "13.6.0": { + "node_abi": 79, + "v8": "7.9" + }, + "13.7.0": { + "node_abi": 79, + "v8": "7.9" + }, + "13.8.0": { + "node_abi": 79, + "v8": "7.9" + }, + "13.9.0": { + "node_abi": 79, + "v8": "7.9" + }, + "13.10.0": { + "node_abi": 79, + "v8": "7.9" + }, + "13.10.1": { + "node_abi": 79, + "v8": "7.9" + }, + "13.11.0": { + "node_abi": 79, + "v8": "7.9" + }, + "13.12.0": { + "node_abi": 79, + "v8": "7.9" + }, + "13.13.0": { + "node_abi": 79, + "v8": "7.9" + }, + "13.14.0": { + "node_abi": 79, + "v8": "7.9" + }, + "14.0.0": { + "node_abi": 83, + "v8": "8.1" + }, + "14.1.0": { + "node_abi": 83, + "v8": "8.1" + }, + "14.2.0": { + "node_abi": 83, + "v8": "8.1" + }, + "14.3.0": { + "node_abi": 83, + "v8": "8.1" + }, + "14.4.0": { + "node_abi": 83, + "v8": "8.1" + }, + "14.5.0": { + "node_abi": 83, + "v8": "8.3" + }, + "14.6.0": { + "node_abi": 83, + "v8": "8.4" + }, + "14.7.0": { + "node_abi": 83, + "v8": "8.4" + }, + "14.8.0": { + "node_abi": 83, + "v8": "8.4" + }, + "14.9.0": { + "node_abi": 83, + "v8": "8.4" + }, + "14.10.0": { + "node_abi": 83, + "v8": "8.4" + }, + "14.10.1": { + "node_abi": 83, + "v8": "8.4" + }, + "14.11.0": { + "node_abi": 83, + "v8": "8.4" + }, + "14.12.0": { + "node_abi": 83, + "v8": "8.4" + }, + "14.13.0": { + "node_abi": 83, + "v8": "8.4" + }, + "14.13.1": { + "node_abi": 83, + "v8": "8.4" + }, + "14.14.0": { + "node_abi": 83, + "v8": "8.4" + }, + "14.15.0": { + "node_abi": 83, + "v8": "8.4" + }, + "14.15.1": { + "node_abi": 83, + "v8": "8.4" + }, + "14.15.2": { + "node_abi": 83, + "v8": "8.4" + }, + "14.15.3": { + "node_abi": 83, + "v8": "8.4" + }, + "14.15.4": { + "node_abi": 83, + "v8": "8.4" + }, + "14.15.5": { + "node_abi": 83, + "v8": "8.4" + }, + "14.16.0": { + "node_abi": 83, + "v8": "8.4" + }, + "14.16.1": { + "node_abi": 83, + "v8": "8.4" + }, + "14.17.0": { + "node_abi": 83, + "v8": "8.4" + }, + "14.17.1": { + "node_abi": 83, + "v8": "8.4" + }, + "14.17.2": { + "node_abi": 83, + "v8": "8.4" + }, + "14.17.3": { + "node_abi": 83, + "v8": "8.4" + }, + "14.17.4": { + "node_abi": 83, + "v8": "8.4" + }, + "14.17.5": { + "node_abi": 83, + "v8": "8.4" + }, + "14.17.6": { + "node_abi": 83, + "v8": "8.4" + }, + "14.18.0": { + "node_abi": 83, + "v8": "8.4" + }, + "14.18.1": { + "node_abi": 83, + "v8": "8.4" + }, + "15.0.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.0.1": { + "node_abi": 88, + "v8": "8.6" + }, + "15.1.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.2.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.2.1": { + "node_abi": 88, + "v8": "8.6" + }, + "15.3.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.4.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.5.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.5.1": { + "node_abi": 88, + "v8": "8.6" + }, + "15.6.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.7.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.8.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.9.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.10.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.11.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.12.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.13.0": { + "node_abi": 88, + "v8": "8.6" + }, + "15.14.0": { + "node_abi": 88, + "v8": "8.6" + }, + "16.0.0": { + "node_abi": 93, + "v8": "9.0" + }, + "16.1.0": { + "node_abi": 93, + "v8": "9.0" + }, + "16.2.0": { + "node_abi": 93, + "v8": "9.0" + }, + "16.3.0": { + "node_abi": 93, + "v8": "9.0" + }, + "16.4.0": { + "node_abi": 93, + "v8": "9.1" + }, + "16.4.1": { + "node_abi": 93, + "v8": "9.1" + }, + "16.4.2": { + "node_abi": 93, + "v8": "9.1" + }, + "16.5.0": { + "node_abi": 93, + "v8": "9.1" + }, + "16.6.0": { + "node_abi": 93, + "v8": "9.2" + }, + "16.6.1": { + "node_abi": 93, + "v8": "9.2" + }, + "16.6.2": { + "node_abi": 93, + "v8": "9.2" + }, + "16.7.0": { + "node_abi": 93, + "v8": "9.2" + }, + "16.8.0": { + "node_abi": 93, + "v8": "9.2" + }, + "16.9.0": { + "node_abi": 93, + "v8": "9.3" + }, + "16.9.1": { + "node_abi": 93, + "v8": "9.3" + }, + "16.10.0": { + "node_abi": 93, + "v8": "9.3" + }, + "16.11.0": { + "node_abi": 93, + "v8": "9.4" + }, + "16.11.1": { + "node_abi": 93, + "v8": "9.4" + }, + "16.12.0": { + "node_abi": 93, + "v8": "9.4" + }, + "16.13.0": { + "node_abi": 93, + "v8": "9.4" + }, + "17.0.0": { + "node_abi": 102, + "v8": "9.5" + }, + "17.0.1": { + "node_abi": 102, + "v8": "9.5" + }, + "17.1.0": { + "node_abi": 102, + "v8": "9.5" + } +} \ No newline at end of file diff --git a/node_modules/@mapbox/node-pre-gyp/lib/util/compile.js b/node_modules/@mapbox/node-pre-gyp/lib/util/compile.js new file mode 100644 index 0000000..956e5aa --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/util/compile.js @@ -0,0 +1,93 @@ +'use strict'; + +module.exports = exports; + +const fs = require('fs'); +const path = require('path'); +const win = process.platform === 'win32'; +const existsSync = fs.existsSync || path.existsSync; +const cp = require('child_process'); + +// try to build up the complete path to node-gyp +/* priority: + - node-gyp on ENV:npm_config_node_gyp (https://github.com/npm/npm/pull/4887) + - node-gyp on NODE_PATH + - node-gyp inside npm on NODE_PATH (ignore on iojs) + - node-gyp inside npm beside node exe +*/ +function which_node_gyp() { + let node_gyp_bin; + if (process.env.npm_config_node_gyp) { + try { + node_gyp_bin = process.env.npm_config_node_gyp; + if (existsSync(node_gyp_bin)) { + return node_gyp_bin; + } + } catch (err) { + // do nothing + } + } + try { + const node_gyp_main = require.resolve('node-gyp'); // eslint-disable-line node/no-missing-require + node_gyp_bin = path.join(path.dirname( + path.dirname(node_gyp_main)), + 'bin/node-gyp.js'); + if (existsSync(node_gyp_bin)) { + return node_gyp_bin; + } + } catch (err) { + // do nothing + } + if (process.execPath.indexOf('iojs') === -1) { + try { + const npm_main = require.resolve('npm'); // eslint-disable-line node/no-missing-require + node_gyp_bin = path.join(path.dirname( + path.dirname(npm_main)), + 'node_modules/node-gyp/bin/node-gyp.js'); + if (existsSync(node_gyp_bin)) { + return node_gyp_bin; + } + } catch (err) { + // do nothing + } + } + const npm_base = path.join(path.dirname( + path.dirname(process.execPath)), + 'lib/node_modules/npm/'); + node_gyp_bin = path.join(npm_base, 'node_modules/node-gyp/bin/node-gyp.js'); + if (existsSync(node_gyp_bin)) { + return node_gyp_bin; + } +} + +module.exports.run_gyp = function(args, opts, callback) { + let shell_cmd = ''; + const cmd_args = []; + if (opts.runtime && opts.runtime === 'node-webkit') { + shell_cmd = 'nw-gyp'; + if (win) shell_cmd += '.cmd'; + } else { + const node_gyp_path = which_node_gyp(); + if (node_gyp_path) { + shell_cmd = process.execPath; + cmd_args.push(node_gyp_path); + } else { + shell_cmd = 'node-gyp'; + if (win) shell_cmd += '.cmd'; + } + } + const final_args = cmd_args.concat(args); + const cmd = cp.spawn(shell_cmd, final_args, { cwd: undefined, env: process.env, stdio: [0, 1, 2] }); + cmd.on('error', (err) => { + if (err) { + return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + err + ')')); + } + callback(null, opts); + }); + cmd.on('close', (code) => { + if (code && code !== 0) { + return callback(new Error("Failed to execute '" + shell_cmd + ' ' + final_args.join(' ') + "' (" + code + ')')); + } + callback(null, opts); + }); +}; diff --git a/node_modules/@mapbox/node-pre-gyp/lib/util/handle_gyp_opts.js b/node_modules/@mapbox/node-pre-gyp/lib/util/handle_gyp_opts.js new file mode 100644 index 0000000..d702f78 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/util/handle_gyp_opts.js @@ -0,0 +1,102 @@ +'use strict'; + +module.exports = exports = handle_gyp_opts; + +const versioning = require('./versioning.js'); +const napi = require('./napi.js'); + +/* + +Here we gather node-pre-gyp generated options (from versioning) and pass them along to node-gyp. + +We massage the args and options slightly to account for differences in what commands mean between +node-pre-gyp and node-gyp (e.g. see the difference between "build" and "rebuild" below) + +Keep in mind: the values inside `argv` and `gyp.opts` below are different depending on whether +node-pre-gyp is called directory, or if it is called in a `run-script` phase of npm. + +We also try to preserve any command line options that might have been passed to npm or node-pre-gyp. +But this is fairly difficult without passing way to much through. For example `gyp.opts` contains all +the process.env and npm pushes a lot of variables into process.env which node-pre-gyp inherits. So we have +to be very selective about what we pass through. + +For example: + +`npm install --build-from-source` will give: + +argv == [ 'rebuild' ] +gyp.opts.argv == { remain: [ 'install' ], + cooked: [ 'install', '--fallback-to-build' ], + original: [ 'install', '--fallback-to-build' ] } + +`./bin/node-pre-gyp build` will give: + +argv == [] +gyp.opts.argv == { remain: [ 'build' ], + cooked: [ 'build' ], + original: [ '-C', 'test/app1', 'build' ] } + +*/ + +// select set of node-pre-gyp versioning info +// to share with node-gyp +const share_with_node_gyp = [ + 'module', + 'module_name', + 'module_path', + 'napi_version', + 'node_abi_napi', + 'napi_build_version', + 'node_napi_label' +]; + +function handle_gyp_opts(gyp, argv, callback) { + + // Collect node-pre-gyp specific variables to pass to node-gyp + const node_pre_gyp_options = []; + // generate custom node-pre-gyp versioning info + const napi_build_version = napi.get_napi_build_version_from_command_args(argv); + const opts = versioning.evaluate(gyp.package_json, gyp.opts, napi_build_version); + share_with_node_gyp.forEach((key) => { + const val = opts[key]; + if (val) { + node_pre_gyp_options.push('--' + key + '=' + val); + } else if (key === 'napi_build_version') { + node_pre_gyp_options.push('--' + key + '=0'); + } else { + if (key !== 'napi_version' && key !== 'node_abi_napi') + return callback(new Error('Option ' + key + ' required but not found by node-pre-gyp')); + } + }); + + // Collect options that follow the special -- which disables nopt parsing + const unparsed_options = []; + let double_hyphen_found = false; + gyp.opts.argv.original.forEach((opt) => { + if (double_hyphen_found) { + unparsed_options.push(opt); + } + if (opt === '--') { + double_hyphen_found = true; + } + }); + + // We try respect and pass through remaining command + // line options (like --foo=bar) to node-gyp + const cooked = gyp.opts.argv.cooked; + const node_gyp_options = []; + cooked.forEach((value) => { + if (value.length > 2 && value.slice(0, 2) === '--') { + const key = value.slice(2); + const val = cooked[cooked.indexOf(value) + 1]; + if (val && val.indexOf('--') === -1) { // handle '--foo=bar' or ['--foo','bar'] + node_gyp_options.push('--' + key + '=' + val); + } else { // pass through --foo + node_gyp_options.push(value); + } + } + }); + + const result = { 'opts': opts, 'gyp': node_gyp_options, 'pre': node_pre_gyp_options, 'unparsed': unparsed_options }; + return callback(null, result); +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/util/napi.js b/node_modules/@mapbox/node-pre-gyp/lib/util/napi.js new file mode 100644 index 0000000..5d14ad6 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/util/napi.js @@ -0,0 +1,205 @@ +'use strict'; + +const fs = require('fs'); + +module.exports = exports; + +const versionArray = process.version + .substr(1) + .replace(/-.*$/, '') + .split('.') + .map((item) => { + return +item; + }); + +const napi_multiple_commands = [ + 'build', + 'clean', + 'configure', + 'package', + 'publish', + 'reveal', + 'testbinary', + 'testpackage', + 'unpublish' +]; + +const napi_build_version_tag = 'napi_build_version='; + +module.exports.get_napi_version = function() { + // returns the non-zero numeric napi version or undefined if napi is not supported. + // correctly supporting target requires an updated cross-walk + let version = process.versions.napi; // can be undefined + if (!version) { // this code should never need to be updated + if (versionArray[0] === 9 && versionArray[1] >= 3) version = 2; // 9.3.0+ + else if (versionArray[0] === 8) version = 1; // 8.0.0+ + } + return version; +}; + +module.exports.get_napi_version_as_string = function(target) { + // returns the napi version as a string or an empty string if napi is not supported. + const version = module.exports.get_napi_version(target); + return version ? '' + version : ''; +}; + +module.exports.validate_package_json = function(package_json, opts) { // throws Error + + const binary = package_json.binary; + const module_path_ok = pathOK(binary.module_path); + const remote_path_ok = pathOK(binary.remote_path); + const package_name_ok = pathOK(binary.package_name); + const napi_build_versions = module.exports.get_napi_build_versions(package_json, opts, true); + const napi_build_versions_raw = module.exports.get_napi_build_versions_raw(package_json); + + if (napi_build_versions) { + napi_build_versions.forEach((napi_build_version)=> { + if (!(parseInt(napi_build_version, 10) === napi_build_version && napi_build_version > 0)) { + throw new Error('All values specified in napi_versions must be positive integers.'); + } + }); + } + + if (napi_build_versions && (!module_path_ok || (!remote_path_ok && !package_name_ok))) { + throw new Error('When napi_versions is specified; module_path and either remote_path or ' + + "package_name must contain the substitution string '{napi_build_version}`."); + } + + if ((module_path_ok || remote_path_ok || package_name_ok) && !napi_build_versions_raw) { + throw new Error("When the substitution string '{napi_build_version}` is specified in " + + 'module_path, remote_path, or package_name; napi_versions must also be specified.'); + } + + if (napi_build_versions && !module.exports.get_best_napi_build_version(package_json, opts) && + module.exports.build_napi_only(package_json)) { + throw new Error( + 'The Node-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' + + 'This module supports Node-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' + + 'This Node instance cannot run this module.'); + } + + if (napi_build_versions_raw && !napi_build_versions && module.exports.build_napi_only(package_json)) { + throw new Error( + 'The Node-API version of this Node instance is ' + module.exports.get_napi_version(opts ? opts.target : undefined) + '. ' + + 'This module supports Node-API version(s) ' + module.exports.get_napi_build_versions_raw(package_json) + '. ' + + 'This Node instance cannot run this module.'); + } + +}; + +function pathOK(path) { + return path && (path.indexOf('{napi_build_version}') !== -1 || path.indexOf('{node_napi_label}') !== -1); +} + +module.exports.expand_commands = function(package_json, opts, commands) { + const expanded_commands = []; + const napi_build_versions = module.exports.get_napi_build_versions(package_json, opts); + commands.forEach((command)=> { + if (napi_build_versions && command.name === 'install') { + const napi_build_version = module.exports.get_best_napi_build_version(package_json, opts); + const args = napi_build_version ? [napi_build_version_tag + napi_build_version] : []; + expanded_commands.push({ name: command.name, args: args }); + } else if (napi_build_versions && napi_multiple_commands.indexOf(command.name) !== -1) { + napi_build_versions.forEach((napi_build_version)=> { + const args = command.args.slice(); + args.push(napi_build_version_tag + napi_build_version); + expanded_commands.push({ name: command.name, args: args }); + }); + } else { + expanded_commands.push(command); + } + }); + return expanded_commands; +}; + +module.exports.get_napi_build_versions = function(package_json, opts, warnings) { // opts may be undefined + const log = require('npmlog'); + let napi_build_versions = []; + const supported_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined); + // remove duplicates, verify each napi version can actaully be built + if (package_json.binary && package_json.binary.napi_versions) { + package_json.binary.napi_versions.forEach((napi_version) => { + const duplicated = napi_build_versions.indexOf(napi_version) !== -1; + if (!duplicated && supported_napi_version && napi_version <= supported_napi_version) { + napi_build_versions.push(napi_version); + } else if (warnings && !duplicated && supported_napi_version) { + log.info('This Node instance does not support builds for Node-API version', napi_version); + } + }); + } + if (opts && opts['build-latest-napi-version-only']) { + let latest_version = 0; + napi_build_versions.forEach((napi_version) => { + if (napi_version > latest_version) latest_version = napi_version; + }); + napi_build_versions = latest_version ? [latest_version] : []; + } + return napi_build_versions.length ? napi_build_versions : undefined; +}; + +module.exports.get_napi_build_versions_raw = function(package_json) { + const napi_build_versions = []; + // remove duplicates + if (package_json.binary && package_json.binary.napi_versions) { + package_json.binary.napi_versions.forEach((napi_version) => { + if (napi_build_versions.indexOf(napi_version) === -1) { + napi_build_versions.push(napi_version); + } + }); + } + return napi_build_versions.length ? napi_build_versions : undefined; +}; + +module.exports.get_command_arg = function(napi_build_version) { + return napi_build_version_tag + napi_build_version; +}; + +module.exports.get_napi_build_version_from_command_args = function(command_args) { + for (let i = 0; i < command_args.length; i++) { + const arg = command_args[i]; + if (arg.indexOf(napi_build_version_tag) === 0) { + return parseInt(arg.substr(napi_build_version_tag.length), 10); + } + } + return undefined; +}; + +module.exports.swap_build_dir_out = function(napi_build_version) { + if (napi_build_version) { + const rm = require('rimraf'); + rm.sync(module.exports.get_build_dir(napi_build_version)); + fs.renameSync('build', module.exports.get_build_dir(napi_build_version)); + } +}; + +module.exports.swap_build_dir_in = function(napi_build_version) { + if (napi_build_version) { + const rm = require('rimraf'); + rm.sync('build'); + fs.renameSync(module.exports.get_build_dir(napi_build_version), 'build'); + } +}; + +module.exports.get_build_dir = function(napi_build_version) { + return 'build-tmp-napi-v' + napi_build_version; +}; + +module.exports.get_best_napi_build_version = function(package_json, opts) { + let best_napi_build_version = 0; + const napi_build_versions = module.exports.get_napi_build_versions(package_json, opts); + if (napi_build_versions) { + const our_napi_version = module.exports.get_napi_version(opts ? opts.target : undefined); + napi_build_versions.forEach((napi_build_version)=> { + if (napi_build_version > best_napi_build_version && + napi_build_version <= our_napi_version) { + best_napi_build_version = napi_build_version; + } + }); + } + return best_napi_build_version === 0 ? undefined : best_napi_build_version; +}; + +module.exports.build_napi_only = function(package_json) { + return package_json.binary && package_json.binary.package_name && + package_json.binary.package_name.indexOf('{node_napi_label}') === -1; +}; diff --git a/node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/index.html b/node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/index.html new file mode 100644 index 0000000..244466c --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/index.html @@ -0,0 +1,26 @@ + + + + +Node-webkit-based module test + + + +

Node-webkit-based module test

+ + diff --git a/node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/package.json b/node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/package.json new file mode 100644 index 0000000..71d03f8 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/util/nw-pre-gyp/package.json @@ -0,0 +1,9 @@ +{ +"main": "index.html", +"name": "nw-pre-gyp-module-test", +"description": "Node-webkit-based module test.", +"version": "0.0.1", +"window": { + "show": false +} +} diff --git a/node_modules/@mapbox/node-pre-gyp/lib/util/s3_setup.js b/node_modules/@mapbox/node-pre-gyp/lib/util/s3_setup.js new file mode 100644 index 0000000..6b1b1a6 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/util/s3_setup.js @@ -0,0 +1,163 @@ +'use strict'; + +module.exports = exports; + +const url = require('url'); +const fs = require('fs'); +const path = require('path'); + +module.exports.detect = function(opts, config) { + const to = opts.hosted_path; + const uri = url.parse(to); + config.prefix = (!uri.pathname || uri.pathname === '/') ? '' : uri.pathname.replace('/', ''); + if (opts.bucket && opts.region) { + config.bucket = opts.bucket; + config.region = opts.region; + config.endpoint = opts.host; + config.s3ForcePathStyle = opts.s3ForcePathStyle; + } else { + const parts = uri.hostname.split('.s3'); + const bucket = parts[0]; + if (!bucket) { + return; + } + if (!config.bucket) { + config.bucket = bucket; + } + if (!config.region) { + const region = parts[1].slice(1).split('.')[0]; + if (region === 'amazonaws') { + config.region = 'us-east-1'; + } else { + config.region = region; + } + } + } +}; + +module.exports.get_s3 = function(config) { + + if (process.env.node_pre_gyp_mock_s3) { + // here we're mocking. node_pre_gyp_mock_s3 is the scratch directory + // for the mock code. + const AWSMock = require('mock-aws-s3'); + const os = require('os'); + + AWSMock.config.basePath = `${os.tmpdir()}/mock`; + + const s3 = AWSMock.S3(); + + // wrapped callback maker. fs calls return code of ENOENT but AWS.S3 returns + // NotFound. + const wcb = (fn) => (err, ...args) => { + if (err && err.code === 'ENOENT') { + err.code = 'NotFound'; + } + return fn(err, ...args); + }; + + return { + listObjects(params, callback) { + return s3.listObjects(params, wcb(callback)); + }, + headObject(params, callback) { + return s3.headObject(params, wcb(callback)); + }, + deleteObject(params, callback) { + return s3.deleteObject(params, wcb(callback)); + }, + putObject(params, callback) { + return s3.putObject(params, wcb(callback)); + } + }; + } + + // if not mocking then setup real s3. + const AWS = require('aws-sdk'); + + AWS.config.update(config); + const s3 = new AWS.S3(); + + // need to change if additional options need to be specified. + return { + listObjects(params, callback) { + return s3.listObjects(params, callback); + }, + headObject(params, callback) { + return s3.headObject(params, callback); + }, + deleteObject(params, callback) { + return s3.deleteObject(params, callback); + }, + putObject(params, callback) { + return s3.putObject(params, callback); + } + }; + + + +}; + +// +// function to get the mocking control function. if not mocking it returns a no-op. +// +// if mocking it sets up the mock http interceptors that use the mocked s3 file system +// to fulfill reponses. +module.exports.get_mockS3Http = function() { + let mock_s3 = false; + if (!process.env.node_pre_gyp_mock_s3) { + return () => mock_s3; + } + + const nock = require('nock'); + // the bucket used for testing, as addressed by https. + const host = 'https://mapbox-node-pre-gyp-public-testing-bucket.s3.us-east-1.amazonaws.com'; + const mockDir = process.env.node_pre_gyp_mock_s3 + '/mapbox-node-pre-gyp-public-testing-bucket'; + + // function to setup interceptors. they are "turned off" by setting mock_s3 to false. + const mock_http = () => { + // eslint-disable-next-line no-unused-vars + function get(uri, requestBody) { + const filepath = path.join(mockDir, uri.replace('%2B', '+')); + + try { + fs.accessSync(filepath, fs.constants.R_OK); + } catch (e) { + return [404, 'not found\n']; + } + + // the mock s3 functions just write to disk, so just read from it. + return [200, fs.createReadStream(filepath)]; + } + + // eslint-disable-next-line no-unused-vars + return nock(host) + .persist() + .get(() => mock_s3) // mock any uri for s3 when true + .reply(get); + }; + + // setup interceptors. they check the mock_s3 flag to determine whether to intercept. + mock_http(nock, host, mockDir); + // function to turn matching all requests to s3 on/off. + const mockS3Http = (action) => { + const previous = mock_s3; + if (action === 'off') { + mock_s3 = false; + } else if (action === 'on') { + mock_s3 = true; + } else if (action !== 'get') { + throw new Error(`illegal action for setMockHttp ${action}`); + } + return previous; + }; + + // call mockS3Http with the argument + // - 'on' - turn it on + // - 'off' - turn it off (used by fetch.test.js so it doesn't interfere with redirects) + // - 'get' - return true or false for 'on' or 'off' + return mockS3Http; +}; + + + diff --git a/node_modules/@mapbox/node-pre-gyp/lib/util/versioning.js b/node_modules/@mapbox/node-pre-gyp/lib/util/versioning.js new file mode 100644 index 0000000..825cfa1 --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/lib/util/versioning.js @@ -0,0 +1,335 @@ +'use strict'; + +module.exports = exports; + +const path = require('path'); +const semver = require('semver'); +const url = require('url'); +const detect_libc = require('detect-libc'); +const napi = require('./napi.js'); + +let abi_crosswalk; + +// This is used for unit testing to provide a fake +// ABI crosswalk that emulates one that is not updated +// for the current version +if (process.env.NODE_PRE_GYP_ABI_CROSSWALK) { + abi_crosswalk = require(process.env.NODE_PRE_GYP_ABI_CROSSWALK); +} else { + abi_crosswalk = require('./abi_crosswalk.json'); +} + +const major_versions = {}; +Object.keys(abi_crosswalk).forEach((v) => { + const major = v.split('.')[0]; + if (!major_versions[major]) { + major_versions[major] = v; + } +}); + +function get_electron_abi(runtime, target_version) { + if (!runtime) { + throw new Error('get_electron_abi requires valid runtime arg'); + } + if (typeof target_version === 'undefined') { + // erroneous CLI call + throw new Error('Empty target version is not supported if electron is the target.'); + } + // Electron guarantees that patch version update won't break native modules. + const sem_ver = semver.parse(target_version); + return runtime + '-v' + sem_ver.major + '.' + sem_ver.minor; +} +module.exports.get_electron_abi = get_electron_abi; + +function get_node_webkit_abi(runtime, target_version) { + if (!runtime) { + throw new Error('get_node_webkit_abi requires valid runtime arg'); + } + if (typeof target_version === 'undefined') { + // erroneous CLI call + throw new Error('Empty target version is not supported if node-webkit is the target.'); + } + return runtime + '-v' + target_version; +} +module.exports.get_node_webkit_abi = get_node_webkit_abi; + +function get_node_abi(runtime, versions) { + if (!runtime) { + throw new Error('get_node_abi requires valid runtime arg'); + } + if (!versions) { + throw new Error('get_node_abi requires valid process.versions object'); + } + const sem_ver = semver.parse(versions.node); + if (sem_ver.major === 0 && sem_ver.minor % 2) { // odd series + // https://github.com/mapbox/node-pre-gyp/issues/124 + return runtime + '-v' + versions.node; + } else { + // process.versions.modules added in >= v0.10.4 and v0.11.7 + // https://github.com/joyent/node/commit/ccabd4a6fa8a6eb79d29bc3bbe9fe2b6531c2d8e + return versions.modules ? runtime + '-v' + (+versions.modules) : + 'v8-' + versions.v8.split('.').slice(0, 2).join('.'); + } +} +module.exports.get_node_abi = get_node_abi; + +function get_runtime_abi(runtime, target_version) { + if (!runtime) { + throw new Error('get_runtime_abi requires valid runtime arg'); + } + if (runtime === 'node-webkit') { + return get_node_webkit_abi(runtime, target_version || process.versions['node-webkit']); + } else if (runtime === 'electron') { + return get_electron_abi(runtime, target_version || process.versions.electron); + } else { + if (runtime !== 'node') { + throw new Error("Unknown Runtime: '" + runtime + "'"); + } + if (!target_version) { + return get_node_abi(runtime, process.versions); + } else { + let cross_obj; + // abi_crosswalk generated with ./scripts/abi_crosswalk.js + if (abi_crosswalk[target_version]) { + cross_obj = abi_crosswalk[target_version]; + } else { + const target_parts = target_version.split('.').map((i) => { return +i; }); + if (target_parts.length !== 3) { // parse failed + throw new Error('Unknown target version: ' + target_version); + } + /* + The below code tries to infer the last known ABI compatible version + that we have recorded in the abi_crosswalk.json when an exact match + is not possible. The reasons for this to exist are complicated: + + - We support passing --target to be able to allow developers to package binaries for versions of node + that are not the same one as they are running. This might also be used in combination with the + --target_arch or --target_platform flags to also package binaries for alternative platforms + - When --target is passed we can't therefore determine the ABI (process.versions.modules) from the node + version that is running in memory + - So, therefore node-pre-gyp keeps an "ABI crosswalk" (lib/util/abi_crosswalk.json) to be able to look + this info up for all versions + - But we cannot easily predict what the future ABI will be for released versions + - And node-pre-gyp needs to be a `bundledDependency` in apps that depend on it in order to work correctly + by being fully available at install time. + - So, the speed of node releases and the bundled nature of node-pre-gyp mean that a new node-pre-gyp release + need to happen for every node.js/io.js/node-webkit/nw.js/atom-shell/etc release that might come online if + you want the `--target` flag to keep working for the latest version + - Which is impractical ^^ + - Hence the below code guesses about future ABI to make the need to update node-pre-gyp less demanding. + + In practice then you can have a dependency of your app like `node-sqlite3` that bundles a `node-pre-gyp` that + only knows about node v0.10.33 in the `abi_crosswalk.json` but target node v0.10.34 (which is assumed to be + ABI compatible with v0.10.33). + + TODO: use semver module instead of custom version parsing + */ + const major = target_parts[0]; + let minor = target_parts[1]; + let patch = target_parts[2]; + // io.js: yeah if node.js ever releases 1.x this will break + // but that is unlikely to happen: https://github.com/iojs/io.js/pull/253#issuecomment-69432616 + if (major === 1) { + // look for last release that is the same major version + // e.g. we assume io.js 1.x is ABI compatible with >= 1.0.0 + while (true) { + if (minor > 0) --minor; + if (patch > 0) --patch; + const new_iojs_target = '' + major + '.' + minor + '.' + patch; + if (abi_crosswalk[new_iojs_target]) { + cross_obj = abi_crosswalk[new_iojs_target]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + new_iojs_target + ' as ABI compatible target'); + break; + } + if (minor === 0 && patch === 0) { + break; + } + } + } else if (major >= 2) { + // look for last release that is the same major version + if (major_versions[major]) { + cross_obj = abi_crosswalk[major_versions[major]]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + major_versions[major] + ' as ABI compatible target'); + } + } else if (major === 0) { // node.js + if (target_parts[1] % 2 === 0) { // for stable/even node.js series + // look for the last release that is the same minor release + // e.g. we assume node 0.10.x is ABI compatible with >= 0.10.0 + while (--patch > 0) { + const new_node_target = '' + major + '.' + minor + '.' + patch; + if (abi_crosswalk[new_node_target]) { + cross_obj = abi_crosswalk[new_node_target]; + console.log('Warning: node-pre-gyp could not find exact match for ' + target_version); + console.log('Warning: but node-pre-gyp successfully choose ' + new_node_target + ' as ABI compatible target'); + break; + } + } + } + } + } + if (!cross_obj) { + throw new Error('Unsupported target version: ' + target_version); + } + // emulate process.versions + const versions_obj = { + node: target_version, + v8: cross_obj.v8 + '.0', + // abi_crosswalk uses 1 for node versions lacking process.versions.modules + // process.versions.modules added in >= v0.10.4 and v0.11.7 + modules: cross_obj.node_abi > 1 ? cross_obj.node_abi : undefined + }; + return get_node_abi(runtime, versions_obj); + } + } +} +module.exports.get_runtime_abi = get_runtime_abi; + +const required_parameters = [ + 'module_name', + 'module_path', + 'host' +]; + +function validate_config(package_json, opts) { + const msg = package_json.name + ' package.json is not node-pre-gyp ready:\n'; + const missing = []; + if (!package_json.main) { + missing.push('main'); + } + if (!package_json.version) { + missing.push('version'); + } + if (!package_json.name) { + missing.push('name'); + } + if (!package_json.binary) { + missing.push('binary'); + } + const o = package_json.binary; + if (o) { + required_parameters.forEach((p) => { + if (!o[p] || typeof o[p] !== 'string') { + missing.push('binary.' + p); + } + }); + } + + if (missing.length >= 1) { + throw new Error(msg + 'package.json must declare these properties: \n' + missing.join('\n')); + } + if (o) { + // enforce https over http + const protocol = url.parse(o.host).protocol; + if (protocol === 'http:') { + throw new Error("'host' protocol (" + protocol + ") is invalid - only 'https:' is accepted"); + } + } + napi.validate_package_json(package_json, opts); +} + +module.exports.validate_config = validate_config; + +function eval_template(template, opts) { + Object.keys(opts).forEach((key) => { + const pattern = '{' + key + '}'; + while (template.indexOf(pattern) > -1) { + template = template.replace(pattern, opts[key]); + } + }); + return template; +} + +// url.resolve needs single trailing slash +// to behave correctly, otherwise a double slash +// may end up in the url which breaks requests +// and a lacking slash may not lead to proper joining +function fix_slashes(pathname) { + if (pathname.slice(-1) !== '/') { + return pathname + '/'; + } + return pathname; +} + +// remove double slashes +// note: path.normalize will not work because +// it will convert forward to back slashes +function drop_double_slashes(pathname) { + return pathname.replace(/\/\//g, '/'); +} + +function get_process_runtime(versions) { + let runtime = 'node'; + if (versions['node-webkit']) { + runtime = 'node-webkit'; + } else if (versions.electron) { + runtime = 'electron'; + } + return runtime; +} + +module.exports.get_process_runtime = get_process_runtime; + +const default_package_name = '{module_name}-v{version}-{node_abi}-{platform}-{arch}.tar.gz'; +const default_remote_path = ''; + +module.exports.evaluate = function(package_json, options, napi_build_version) { + options = options || {}; + validate_config(package_json, options); // options is a suitable substitute for opts in this case + const v = package_json.version; + const module_version = semver.parse(v); + const runtime = options.runtime || get_process_runtime(process.versions); + const opts = { + name: package_json.name, + configuration: options.debug ? 'Debug' : 'Release', + debug: options.debug, + module_name: package_json.binary.module_name, + version: module_version.version, + prerelease: module_version.prerelease.length ? module_version.prerelease.join('.') : '', + build: module_version.build.length ? module_version.build.join('.') : '', + major: module_version.major, + minor: module_version.minor, + patch: module_version.patch, + runtime: runtime, + node_abi: get_runtime_abi(runtime, options.target), + node_abi_napi: napi.get_napi_version(options.target) ? 'napi' : get_runtime_abi(runtime, options.target), + napi_version: napi.get_napi_version(options.target), // non-zero numeric, undefined if unsupported + napi_build_version: napi_build_version || '', + node_napi_label: napi_build_version ? 'napi-v' + napi_build_version : get_runtime_abi(runtime, options.target), + target: options.target || '', + platform: options.target_platform || process.platform, + target_platform: options.target_platform || process.platform, + arch: options.target_arch || process.arch, + target_arch: options.target_arch || process.arch, + libc: options.target_libc || detect_libc.familySync() || 'unknown', + module_main: package_json.main, + toolset: options.toolset || '', // address https://github.com/mapbox/node-pre-gyp/issues/119 + bucket: package_json.binary.bucket, + region: package_json.binary.region, + s3ForcePathStyle: package_json.binary.s3ForcePathStyle || false + }; + // support host mirror with npm config `--{module_name}_binary_host_mirror` + // e.g.: https://github.com/node-inspector/v8-profiler/blob/master/package.json#L25 + // > npm install v8-profiler --profiler_binary_host_mirror=https://npm.taobao.org/mirrors/node-inspector/ + const validModuleName = opts.module_name.replace('-', '_'); + const host = process.env['npm_config_' + validModuleName + '_binary_host_mirror'] || package_json.binary.host; + opts.host = fix_slashes(eval_template(host, opts)); + opts.module_path = eval_template(package_json.binary.module_path, opts); + // now we resolve the module_path to ensure it is absolute so that binding.gyp variables work predictably + if (options.module_root) { + // resolve relative to known module root: works for pre-binding require + opts.module_path = path.join(options.module_root, opts.module_path); + } else { + // resolve relative to current working directory: works for node-pre-gyp commands + opts.module_path = path.resolve(opts.module_path); + } + opts.module = path.join(opts.module_path, opts.module_name + '.node'); + opts.remote_path = package_json.binary.remote_path ? drop_double_slashes(fix_slashes(eval_template(package_json.binary.remote_path, opts))) : default_remote_path; + const package_name = package_json.binary.package_name ? package_json.binary.package_name : default_package_name; + opts.package_name = eval_template(package_name, opts); + opts.staged_tarball = path.join('build/stage', opts.remote_path, opts.package_name); + opts.hosted_path = url.resolve(opts.host, opts.remote_path); + opts.hosted_tarball = url.resolve(opts.hosted_path, opts.package_name); + return opts; +}; diff --git a/node_modules/@mapbox/node-pre-gyp/package.json b/node_modules/@mapbox/node-pre-gyp/package.json new file mode 100644 index 0000000..5e1d6fd --- /dev/null +++ b/node_modules/@mapbox/node-pre-gyp/package.json @@ -0,0 +1,62 @@ +{ + "name": "@mapbox/node-pre-gyp", + "description": "Node.js native addon binary install tool", + "version": "1.0.11", + "keywords": [ + "native", + "addon", + "module", + "c", + "c++", + "bindings", + "binary" + ], + "license": "BSD-3-Clause", + "author": "Dane Springmeyer ", + "repository": { + "type": "git", + "url": "git://github.com/mapbox/node-pre-gyp.git" + }, + "bin": "./bin/node-pre-gyp", + "main": "./lib/node-pre-gyp.js", + "dependencies": { + "detect-libc": "^2.0.0", + "https-proxy-agent": "^5.0.0", + "make-dir": "^3.1.0", + "node-fetch": "^2.6.7", + "nopt": "^5.0.0", + "npmlog": "^5.0.1", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "tar": "^6.1.11" + }, + "devDependencies": { + "@mapbox/cloudfriend": "^5.1.0", + "@mapbox/eslint-config-mapbox": "^3.0.0", + "aws-sdk": "^2.1087.0", + "codecov": "^3.8.3", + "eslint": "^7.32.0", + "eslint-plugin-node": "^11.1.0", + "mock-aws-s3": "^4.0.2", + "nock": "^12.0.3", + "node-addon-api": "^4.3.0", + "nyc": "^15.1.0", + "tape": "^5.5.2", + "tar-fs": "^2.1.1" + }, + "nyc": { + "all": true, + "skip-full": false, + "exclude": [ + "test/**" + ] + }, + "scripts": { + "coverage": "nyc --all --include index.js --include lib/ npm test", + "upload-coverage": "nyc report --reporter json && codecov --clear --flags=unit --file=./coverage/coverage-final.json", + "lint": "eslint bin/node-pre-gyp lib/*js lib/util/*js test/*js scripts/*js", + "fix": "npm run lint -- --fix", + "update-crosswalk": "node scripts/abi_crosswalk.js", + "test": "tape test/*test.js" + } +} diff --git a/node_modules/@redis/bloom/README.md b/node_modules/@redis/bloom/README.md new file mode 100644 index 0000000..8eb1445 --- /dev/null +++ b/node_modules/@redis/bloom/README.md @@ -0,0 +1,14 @@ +# @redis/bloom + +This package provides support for the [RedisBloom](https://redisbloom.io) module, which adds additional probabilistic data structures to Redis. It extends the [Node Redis client](https://github.com/redis/node-redis) to include functions for each of the RediBloom commands. + +To use these extra commands, your Redis server must have the RedisBloom module installed. + +RedisBloom provides the following probabilistic data structures: + +* Bloom Filter: for checking set membership with a high degree of certainty. +* Cuckoo Filter: for checking set membership with a high degree of certainty. +* Count-Min Sketch: Determine the frequency of events in a stream. +* Top-K: Maintain a list of k most frequently seen items. + +For complete examples, see `bloom-filter.js`, `cuckoo-filter.js`, `count-min-sketch.js` and `topk.js` in the Node Redis examples folder. diff --git a/node_modules/@redis/bloom/dist/commands/bloom/ADD.d.ts b/node_modules/@redis/bloom/dist/commands/bloom/ADD.d.ts new file mode 100644 index 0000000..98f092d --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/ADD.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, item: string): Array; +export { transformBooleanReply as transformReply } from '@redis/client/dist/lib/commands/generic-transformers'; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/ADD.js b/node_modules/@redis/bloom/dist/commands/bloom/ADD.js new file mode 100644 index 0000000..10aff86 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/ADD.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, item) { + return ['BF.ADD', key, item]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/bloom/CARD.d.ts b/node_modules/@redis/bloom/dist/commands/bloom/CARD.d.ts new file mode 100644 index 0000000..7b54a1f --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/CARD.d.ts @@ -0,0 +1,4 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/CARD.js b/node_modules/@redis/bloom/dist/commands/bloom/CARD.js new file mode 100644 index 0000000..c3a20bf --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/CARD.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['BF.CARD', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/EXISTS.d.ts b/node_modules/@redis/bloom/dist/commands/bloom/EXISTS.d.ts new file mode 100644 index 0000000..44b0f53 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/EXISTS.d.ts @@ -0,0 +1,4 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, item: string): Array; +export { transformBooleanReply as transformReply } from '@redis/client/dist/lib/commands/generic-transformers'; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/EXISTS.js b/node_modules/@redis/bloom/dist/commands/bloom/EXISTS.js new file mode 100644 index 0000000..3fe00d5 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/EXISTS.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, item) { + return ['BF.EXISTS', key, item]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/bloom/INFO.d.ts b/node_modules/@redis/bloom/dist/commands/bloom/INFO.d.ts new file mode 100644 index 0000000..07ee69a --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/INFO.d.ts @@ -0,0 +1,23 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string): Array; +export type InfoRawReply = [ + _: string, + capacity: number, + _: string, + size: number, + _: string, + numberOfFilters: number, + _: string, + numberOfInsertedItems: number, + _: string, + expansionRate: number +]; +export interface InfoReply { + capacity: number; + size: number; + numberOfFilters: number; + numberOfInsertedItems: number; + expansionRate: number; +} +export declare function transformReply(reply: InfoRawReply): InfoReply; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/INFO.js b/node_modules/@redis/bloom/dist/commands/bloom/INFO.js new file mode 100644 index 0000000..379e585 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/INFO.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['BF.INFO', key]; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + capacity: reply[1], + size: reply[3], + numberOfFilters: reply[5], + numberOfInsertedItems: reply[7], + expansionRate: reply[9] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/INSERT.d.ts b/node_modules/@redis/bloom/dist/commands/bloom/INSERT.d.ts new file mode 100644 index 0000000..c1f8fec --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/INSERT.d.ts @@ -0,0 +1,11 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +interface InsertOptions { + CAPACITY?: number; + ERROR?: number; + EXPANSION?: number; + NOCREATE?: true; + NONSCALING?: true; +} +export declare function transformArguments(key: string, items: RedisCommandArgument | Array, options?: InsertOptions): RedisCommandArguments; +export { transformBooleanArrayReply as transformReply } from '@redis/client/dist/lib/commands/generic-transformers'; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/INSERT.js b/node_modules/@redis/bloom/dist/commands/bloom/INSERT.js new file mode 100644 index 0000000..7f27273 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/INSERT.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, items, options) { + const args = ['BF.INSERT', key]; + if (options?.CAPACITY) { + args.push('CAPACITY', options.CAPACITY.toString()); + } + if (options?.ERROR) { + args.push('ERROR', options.ERROR.toString()); + } + if (options?.EXPANSION) { + args.push('EXPANSION', options.EXPANSION.toString()); + } + if (options?.NOCREATE) { + args.push('NOCREATE'); + } + if (options?.NONSCALING) { + args.push('NONSCALING'); + } + args.push('ITEMS'); + return (0, generic_transformers_1.pushVerdictArguments)(args, items); +} +exports.transformArguments = transformArguments; +var generic_transformers_2 = require("@redis/client/dist/lib/commands/generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_2.transformBooleanArrayReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/bloom/LOADCHUNK.d.ts b/node_modules/@redis/bloom/dist/commands/bloom/LOADCHUNK.d.ts new file mode 100644 index 0000000..786418e --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/LOADCHUNK.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, iteretor: number, chunk: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/LOADCHUNK.js b/node_modules/@redis/bloom/dist/commands/bloom/LOADCHUNK.js new file mode 100644 index 0000000..0d7bc27 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/LOADCHUNK.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, iteretor, chunk) { + return ['BF.LOADCHUNK', key, iteretor.toString(), chunk]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/MADD.d.ts b/node_modules/@redis/bloom/dist/commands/bloom/MADD.d.ts new file mode 100644 index 0000000..4ec8f35 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/MADD.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, items: Array): Array; +export { transformBooleanArrayReply as transformReply } from '@redis/client/dist/lib/commands/generic-transformers'; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/MADD.js b/node_modules/@redis/bloom/dist/commands/bloom/MADD.js new file mode 100644 index 0000000..1c283f7 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/MADD.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, items) { + return ['BF.MADD', key, ...items]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanArrayReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/bloom/MEXISTS.d.ts b/node_modules/@redis/bloom/dist/commands/bloom/MEXISTS.d.ts new file mode 100644 index 0000000..81bbd27 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/MEXISTS.d.ts @@ -0,0 +1,4 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, items: Array): Array; +export { transformBooleanArrayReply as transformReply } from '@redis/client/dist/lib/commands/generic-transformers'; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/MEXISTS.js b/node_modules/@redis/bloom/dist/commands/bloom/MEXISTS.js new file mode 100644 index 0000000..4da70a7 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/MEXISTS.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, items) { + return ['BF.MEXISTS', key, ...items]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanArrayReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/bloom/RESERVE.d.ts b/node_modules/@redis/bloom/dist/commands/bloom/RESERVE.d.ts new file mode 100644 index 0000000..3394681 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/RESERVE.d.ts @@ -0,0 +1,8 @@ +export declare const FIRST_KEY_INDEX = 1; +interface ReserveOptions { + EXPANSION?: number; + NONSCALING?: true; +} +export declare function transformArguments(key: string, errorRate: number, capacity: number, options?: ReserveOptions): Array; +export declare function transformReply(): 'OK'; +export {}; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/RESERVE.js b/node_modules/@redis/bloom/dist/commands/bloom/RESERVE.js new file mode 100644 index 0000000..0a5c6c3 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/RESERVE.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, errorRate, capacity, options) { + const args = ['BF.RESERVE', key, errorRate.toString(), capacity.toString()]; + if (options?.EXPANSION) { + args.push('EXPANSION', options.EXPANSION.toString()); + } + if (options?.NONSCALING) { + args.push('NONSCALING'); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/SCANDUMP.d.ts b/node_modules/@redis/bloom/dist/commands/bloom/SCANDUMP.d.ts new file mode 100644 index 0000000..1c4c4a9 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/SCANDUMP.d.ts @@ -0,0 +1,13 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, iterator: number): Array; +type ScanDumpRawReply = [ + iterator: number, + chunk: string +]; +interface ScanDumpReply { + iterator: number; + chunk: string; +} +export declare function transformReply([iterator, chunk]: ScanDumpRawReply): ScanDumpReply; +export {}; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/SCANDUMP.js b/node_modules/@redis/bloom/dist/commands/bloom/SCANDUMP.js new file mode 100644 index 0000000..0214538 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/SCANDUMP.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, iterator) { + return ['BF.SCANDUMP', key, iterator.toString()]; +} +exports.transformArguments = transformArguments; +function transformReply([iterator, chunk]) { + return { + iterator, + chunk + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/index.d.ts b/node_modules/@redis/bloom/dist/commands/bloom/index.d.ts new file mode 100644 index 0000000..3d10a74 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/index.d.ts @@ -0,0 +1,33 @@ +import * as ADD from './ADD'; +import * as CARD from './CARD'; +import * as EXISTS from './EXISTS'; +import * as INFO from './INFO'; +import * as INSERT from './INSERT'; +import * as LOADCHUNK from './LOADCHUNK'; +import * as MADD from './MADD'; +import * as MEXISTS from './MEXISTS'; +import * as RESERVE from './RESERVE'; +import * as SCANDUMP from './SCANDUMP'; +declare const _default: { + ADD: typeof ADD; + add: typeof ADD; + CARD: typeof CARD; + card: typeof CARD; + EXISTS: typeof EXISTS; + exists: typeof EXISTS; + INFO: typeof INFO; + info: typeof INFO; + INSERT: typeof INSERT; + insert: typeof INSERT; + LOADCHUNK: typeof LOADCHUNK; + loadChunk: typeof LOADCHUNK; + MADD: typeof MADD; + mAdd: typeof MADD; + MEXISTS: typeof MEXISTS; + mExists: typeof MEXISTS; + RESERVE: typeof RESERVE; + reserve: typeof RESERVE; + SCANDUMP: typeof SCANDUMP; + scanDump: typeof SCANDUMP; +}; +export default _default; diff --git a/node_modules/@redis/bloom/dist/commands/bloom/index.js b/node_modules/@redis/bloom/dist/commands/bloom/index.js new file mode 100644 index 0000000..4232b4e --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/bloom/index.js @@ -0,0 +1,34 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const ADD = require("./ADD"); +const CARD = require("./CARD"); +const EXISTS = require("./EXISTS"); +const INFO = require("./INFO"); +const INSERT = require("./INSERT"); +const LOADCHUNK = require("./LOADCHUNK"); +const MADD = require("./MADD"); +const MEXISTS = require("./MEXISTS"); +const RESERVE = require("./RESERVE"); +const SCANDUMP = require("./SCANDUMP"); +exports.default = { + ADD, + add: ADD, + CARD, + card: CARD, + EXISTS, + exists: EXISTS, + INFO, + info: INFO, + INSERT, + insert: INSERT, + LOADCHUNK, + loadChunk: LOADCHUNK, + MADD, + mAdd: MADD, + MEXISTS, + mExists: MEXISTS, + RESERVE, + reserve: RESERVE, + SCANDUMP, + scanDump: SCANDUMP +}; diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/INCRBY.d.ts b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INCRBY.d.ts new file mode 100644 index 0000000..60ea8f3 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INCRBY.d.ts @@ -0,0 +1,8 @@ +export declare const FIRST_KEY_INDEX = 1; +interface IncrByItem { + item: string; + incrementBy: number; +} +export declare function transformArguments(key: string, items: IncrByItem | Array): Array; +export declare function transformReply(): Array; +export {}; diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/INCRBY.js b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INCRBY.js new file mode 100644 index 0000000..3e9be04 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INCRBY.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, items) { + const args = ['CMS.INCRBY', key]; + if (Array.isArray(items)) { + for (const item of items) { + pushIncrByItem(args, item); + } + } + else { + pushIncrByItem(args, items); + } + return args; +} +exports.transformArguments = transformArguments; +function pushIncrByItem(args, { item, incrementBy }) { + args.push(item, incrementBy.toString()); +} diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/INFO.d.ts b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INFO.d.ts new file mode 100644 index 0000000..c04cdf2 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INFO.d.ts @@ -0,0 +1,17 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string): Array; +export type InfoRawReply = [ + _: string, + width: number, + _: string, + depth: number, + _: string, + count: number +]; +export interface InfoReply { + width: number; + depth: number; + count: number; +} +export declare function transformReply(reply: InfoRawReply): InfoReply; diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/INFO.js b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INFO.js new file mode 100644 index 0000000..df3ae99 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INFO.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['CMS.INFO', key]; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + width: reply[1], + depth: reply[3], + count: reply[5] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYDIM.d.ts b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYDIM.d.ts new file mode 100644 index 0000000..9a702d2 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYDIM.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, width: number, depth: number): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYDIM.js b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYDIM.js new file mode 100644 index 0000000..68b9e46 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYDIM.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, width, depth) { + return ['CMS.INITBYDIM', key, width.toString(), depth.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYPROB.d.ts b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYPROB.d.ts new file mode 100644 index 0000000..dff4c8f --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYPROB.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, error: number, probability: number): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYPROB.js b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYPROB.js new file mode 100644 index 0000000..70dc3d4 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/INITBYPROB.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, error, probability) { + return ['CMS.INITBYPROB', key, error.toString(), probability.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/MERGE.d.ts b/node_modules/@redis/bloom/dist/commands/count-min-sketch/MERGE.d.ts new file mode 100644 index 0000000..36f9f27 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/MERGE.d.ts @@ -0,0 +1,9 @@ +export declare const FIRST_KEY_INDEX = 1; +interface Sketch { + name: string; + weight: number; +} +type Sketches = Array | Array; +export declare function transformArguments(dest: string, src: Sketches): Array; +export declare function transformReply(): 'OK'; +export {}; diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/MERGE.js b/node_modules/@redis/bloom/dist/commands/count-min-sketch/MERGE.js new file mode 100644 index 0000000..13710b7 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/MERGE.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(dest, src) { + const args = [ + 'CMS.MERGE', + dest, + src.length.toString() + ]; + if (isStringSketches(src)) { + args.push(...src); + } + else { + for (const sketch of src) { + args.push(sketch.name); + } + args.push('WEIGHTS'); + for (const sketch of src) { + args.push(sketch.weight.toString()); + } + } + return args; +} +exports.transformArguments = transformArguments; +function isStringSketches(src) { + return typeof src[0] === 'string'; +} diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/QUERY.d.ts b/node_modules/@redis/bloom/dist/commands/count-min-sketch/QUERY.d.ts new file mode 100644 index 0000000..e419fe6 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/QUERY.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, items: string | Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/QUERY.js b/node_modules/@redis/bloom/dist/commands/count-min-sketch/QUERY.js new file mode 100644 index 0000000..395a425 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/QUERY.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, items) { + return (0, generic_transformers_1.pushVerdictArguments)(['CMS.QUERY', key], items); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/index.d.ts b/node_modules/@redis/bloom/dist/commands/count-min-sketch/index.d.ts new file mode 100644 index 0000000..80d0666 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/index.d.ts @@ -0,0 +1,21 @@ +import * as INCRBY from './INCRBY'; +import * as INFO from './INFO'; +import * as INITBYDIM from './INITBYDIM'; +import * as INITBYPROB from './INITBYPROB'; +import * as MERGE from './MERGE'; +import * as QUERY from './QUERY'; +declare const _default: { + INCRBY: typeof INCRBY; + incrBy: typeof INCRBY; + INFO: typeof INFO; + info: typeof INFO; + INITBYDIM: typeof INITBYDIM; + initByDim: typeof INITBYDIM; + INITBYPROB: typeof INITBYPROB; + initByProb: typeof INITBYPROB; + MERGE: typeof MERGE; + merge: typeof MERGE; + QUERY: typeof QUERY; + query: typeof QUERY; +}; +export default _default; diff --git a/node_modules/@redis/bloom/dist/commands/count-min-sketch/index.js b/node_modules/@redis/bloom/dist/commands/count-min-sketch/index.js new file mode 100644 index 0000000..7aef3e4 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/count-min-sketch/index.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const INCRBY = require("./INCRBY"); +const INFO = require("./INFO"); +const INITBYDIM = require("./INITBYDIM"); +const INITBYPROB = require("./INITBYPROB"); +const MERGE = require("./MERGE"); +const QUERY = require("./QUERY"); +exports.default = { + INCRBY, + incrBy: INCRBY, + INFO, + info: INFO, + INITBYDIM, + initByDim: INITBYDIM, + INITBYPROB, + initByProb: INITBYPROB, + MERGE, + merge: MERGE, + QUERY, + query: QUERY +}; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/ADD.d.ts b/node_modules/@redis/bloom/dist/commands/cuckoo/ADD.d.ts new file mode 100644 index 0000000..98f092d --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/ADD.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, item: string): Array; +export { transformBooleanReply as transformReply } from '@redis/client/dist/lib/commands/generic-transformers'; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/ADD.js b/node_modules/@redis/bloom/dist/commands/cuckoo/ADD.js new file mode 100644 index 0000000..b61ea67 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/ADD.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, item) { + return ['CF.ADD', key, item]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/ADDNX.d.ts b/node_modules/@redis/bloom/dist/commands/cuckoo/ADDNX.d.ts new file mode 100644 index 0000000..98f092d --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/ADDNX.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, item: string): Array; +export { transformBooleanReply as transformReply } from '@redis/client/dist/lib/commands/generic-transformers'; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/ADDNX.js b/node_modules/@redis/bloom/dist/commands/cuckoo/ADDNX.js new file mode 100644 index 0000000..bc1b99a --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/ADDNX.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, item) { + return ['CF.ADDNX', key, item]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/COUNT.d.ts b/node_modules/@redis/bloom/dist/commands/cuckoo/COUNT.d.ts new file mode 100644 index 0000000..e781bba --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/COUNT.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, item: string): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/COUNT.js b/node_modules/@redis/bloom/dist/commands/cuckoo/COUNT.js new file mode 100644 index 0000000..772618e --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/COUNT.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, item) { + return ['CF.COUNT', key, item]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/DEL.d.ts b/node_modules/@redis/bloom/dist/commands/cuckoo/DEL.d.ts new file mode 100644 index 0000000..98f092d --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/DEL.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, item: string): Array; +export { transformBooleanReply as transformReply } from '@redis/client/dist/lib/commands/generic-transformers'; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/DEL.js b/node_modules/@redis/bloom/dist/commands/cuckoo/DEL.js new file mode 100644 index 0000000..edb7e58 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/DEL.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, item) { + return ['CF.DEL', key, item]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/EXISTS.d.ts b/node_modules/@redis/bloom/dist/commands/cuckoo/EXISTS.d.ts new file mode 100644 index 0000000..44b0f53 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/EXISTS.d.ts @@ -0,0 +1,4 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, item: string): Array; +export { transformBooleanReply as transformReply } from '@redis/client/dist/lib/commands/generic-transformers'; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/EXISTS.js b/node_modules/@redis/bloom/dist/commands/cuckoo/EXISTS.js new file mode 100644 index 0000000..eb8d966 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/EXISTS.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, item) { + return ['CF.EXISTS', key, item]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/INFO.d.ts b/node_modules/@redis/bloom/dist/commands/cuckoo/INFO.d.ts new file mode 100644 index 0000000..3a5850d --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/INFO.d.ts @@ -0,0 +1,32 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string): Array; +export type InfoRawReply = [ + _: string, + size: number, + _: string, + numberOfBuckets: number, + _: string, + numberOfFilters: number, + _: string, + numberOfInsertedItems: number, + _: string, + numberOfDeletedItems: number, + _: string, + bucketSize: number, + _: string, + expansionRate: number, + _: string, + maxIteration: number +]; +export interface InfoReply { + size: number; + numberOfBuckets: number; + numberOfFilters: number; + numberOfInsertedItems: number; + numberOfDeletedItems: number; + bucketSize: number; + expansionRate: number; + maxIteration: number; +} +export declare function transformReply(reply: InfoRawReply): InfoReply; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/INFO.js b/node_modules/@redis/bloom/dist/commands/cuckoo/INFO.js new file mode 100644 index 0000000..5c9c4be --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/INFO.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['CF.INFO', key]; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + size: reply[1], + numberOfBuckets: reply[3], + numberOfFilters: reply[5], + numberOfInsertedItems: reply[7], + numberOfDeletedItems: reply[9], + bucketSize: reply[11], + expansionRate: reply[13], + maxIteration: reply[15] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/INSERT.d.ts b/node_modules/@redis/bloom/dist/commands/cuckoo/INSERT.d.ts new file mode 100644 index 0000000..f43ac23 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/INSERT.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { InsertOptions } from "."; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, items: string | Array, options?: InsertOptions): RedisCommandArguments; +export { transformBooleanArrayReply as transformReply } from '@redis/client/dist/lib/commands/generic-transformers'; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/INSERT.js b/node_modules/@redis/bloom/dist/commands/cuckoo/INSERT.js new file mode 100644 index 0000000..ef4fb78 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/INSERT.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, items, options) { + return (0, _1.pushInsertOptions)(['CF.INSERT', key], items, options); +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanArrayReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/INSERTNX.d.ts b/node_modules/@redis/bloom/dist/commands/cuckoo/INSERTNX.d.ts new file mode 100644 index 0000000..f43ac23 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/INSERTNX.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { InsertOptions } from "."; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, items: string | Array, options?: InsertOptions): RedisCommandArguments; +export { transformBooleanArrayReply as transformReply } from '@redis/client/dist/lib/commands/generic-transformers'; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/INSERTNX.js b/node_modules/@redis/bloom/dist/commands/cuckoo/INSERTNX.js new file mode 100644 index 0000000..f80b317 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/INSERTNX.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, items, options) { + return (0, _1.pushInsertOptions)(['CF.INSERTNX', key], items, options); +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanArrayReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/LOADCHUNK.d.ts b/node_modules/@redis/bloom/dist/commands/cuckoo/LOADCHUNK.d.ts new file mode 100644 index 0000000..d6302db --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/LOADCHUNK.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, iterator: number, chunk: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/LOADCHUNK.js b/node_modules/@redis/bloom/dist/commands/cuckoo/LOADCHUNK.js new file mode 100644 index 0000000..ddd0ebd --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/LOADCHUNK.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, iterator, chunk) { + return ['CF.LOADCHUNK', key, iterator.toString(), chunk]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/RESERVE.d.ts b/node_modules/@redis/bloom/dist/commands/cuckoo/RESERVE.d.ts new file mode 100644 index 0000000..35d77cd --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/RESERVE.d.ts @@ -0,0 +1,9 @@ +export declare const FIRST_KEY_INDEX = 1; +interface ReserveOptions { + BUCKETSIZE?: number; + MAXITERATIONS?: number; + EXPANSION?: number; +} +export declare function transformArguments(key: string, capacity: number, options?: ReserveOptions): Array; +export declare function transformReply(): 'OK'; +export {}; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/RESERVE.js b/node_modules/@redis/bloom/dist/commands/cuckoo/RESERVE.js new file mode 100644 index 0000000..6b51866 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/RESERVE.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, capacity, options) { + const args = ['CF.RESERVE', key, capacity.toString()]; + if (options?.BUCKETSIZE) { + args.push('BUCKETSIZE', options.BUCKETSIZE.toString()); + } + if (options?.MAXITERATIONS) { + args.push('MAXITERATIONS', options.MAXITERATIONS.toString()); + } + if (options?.EXPANSION) { + args.push('EXPANSION', options.EXPANSION.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/SCANDUMP.d.ts b/node_modules/@redis/bloom/dist/commands/cuckoo/SCANDUMP.d.ts new file mode 100644 index 0000000..fee6405 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/SCANDUMP.d.ts @@ -0,0 +1,12 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, iterator: number): Array; +type ScanDumpRawReply = [ + iterator: number, + chunk: string | null +]; +interface ScanDumpReply { + iterator: number; + chunk: string | null; +} +export declare function transformReply([iterator, chunk]: ScanDumpRawReply): ScanDumpReply; +export {}; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/SCANDUMP.js b/node_modules/@redis/bloom/dist/commands/cuckoo/SCANDUMP.js new file mode 100644 index 0000000..9401c05 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/SCANDUMP.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, iterator) { + return ['CF.SCANDUMP', key, iterator.toString()]; +} +exports.transformArguments = transformArguments; +function transformReply([iterator, chunk]) { + return { + iterator, + chunk + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/index.d.ts b/node_modules/@redis/bloom/dist/commands/cuckoo/index.d.ts new file mode 100644 index 0000000..7c78372 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/index.d.ts @@ -0,0 +1,42 @@ +import * as ADD from './ADD'; +import * as ADDNX from './ADDNX'; +import * as COUNT from './COUNT'; +import * as DEL from './DEL'; +import * as EXISTS from './EXISTS'; +import * as INFO from './INFO'; +import * as INSERT from './INSERT'; +import * as INSERTNX from './INSERTNX'; +import * as LOADCHUNK from './LOADCHUNK'; +import * as RESERVE from './RESERVE'; +import * as SCANDUMP from './SCANDUMP'; +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +declare const _default: { + ADD: typeof ADD; + add: typeof ADD; + ADDNX: typeof ADDNX; + addNX: typeof ADDNX; + COUNT: typeof COUNT; + count: typeof COUNT; + DEL: typeof DEL; + del: typeof DEL; + EXISTS: typeof EXISTS; + exists: typeof EXISTS; + INFO: typeof INFO; + info: typeof INFO; + INSERT: typeof INSERT; + insert: typeof INSERT; + INSERTNX: typeof INSERTNX; + insertNX: typeof INSERTNX; + LOADCHUNK: typeof LOADCHUNK; + loadChunk: typeof LOADCHUNK; + RESERVE: typeof RESERVE; + reserve: typeof RESERVE; + SCANDUMP: typeof SCANDUMP; + scanDump: typeof SCANDUMP; +}; +export default _default; +export interface InsertOptions { + CAPACITY?: number; + NOCREATE?: true; +} +export declare function pushInsertOptions(args: RedisCommandArguments, items: string | Array, options?: InsertOptions): RedisCommandArguments; diff --git a/node_modules/@redis/bloom/dist/commands/cuckoo/index.js b/node_modules/@redis/bloom/dist/commands/cuckoo/index.js new file mode 100644 index 0000000..3d428ee --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/cuckoo/index.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.pushInsertOptions = void 0; +const ADD = require("./ADD"); +const ADDNX = require("./ADDNX"); +const COUNT = require("./COUNT"); +const DEL = require("./DEL"); +const EXISTS = require("./EXISTS"); +const INFO = require("./INFO"); +const INSERT = require("./INSERT"); +const INSERTNX = require("./INSERTNX"); +const LOADCHUNK = require("./LOADCHUNK"); +const RESERVE = require("./RESERVE"); +const SCANDUMP = require("./SCANDUMP"); +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +exports.default = { + ADD, + add: ADD, + ADDNX, + addNX: ADDNX, + COUNT, + count: COUNT, + DEL, + del: DEL, + EXISTS, + exists: EXISTS, + INFO, + info: INFO, + INSERT, + insert: INSERT, + INSERTNX, + insertNX: INSERTNX, + LOADCHUNK, + loadChunk: LOADCHUNK, + RESERVE, + reserve: RESERVE, + SCANDUMP, + scanDump: SCANDUMP +}; +function pushInsertOptions(args, items, options) { + if (options?.CAPACITY) { + args.push('CAPACITY'); + args.push(options.CAPACITY.toString()); + } + if (options?.NOCREATE) { + args.push('NOCREATE'); + } + args.push('ITEMS'); + return (0, generic_transformers_1.pushVerdictArguments)(args, items); +} +exports.pushInsertOptions = pushInsertOptions; diff --git a/node_modules/@redis/bloom/dist/commands/index.d.ts b/node_modules/@redis/bloom/dist/commands/index.d.ts new file mode 100644 index 0000000..c9de957 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/index.d.ts @@ -0,0 +1,111 @@ +declare const _default: { + bf: { + ADD: typeof import("./bloom/ADD"); + add: typeof import("./bloom/ADD"); + CARD: typeof import("./bloom/CARD"); + card: typeof import("./bloom/CARD"); + EXISTS: typeof import("./bloom/EXISTS"); + exists: typeof import("./bloom/EXISTS"); + INFO: typeof import("./bloom/INFO"); + info: typeof import("./bloom/INFO"); + INSERT: typeof import("./bloom/INSERT"); + insert: typeof import("./bloom/INSERT"); + LOADCHUNK: typeof import("./bloom/LOADCHUNK"); + loadChunk: typeof import("./bloom/LOADCHUNK"); + MADD: typeof import("./bloom/MADD"); + mAdd: typeof import("./bloom/MADD"); + MEXISTS: typeof import("./bloom/MEXISTS"); + mExists: typeof import("./bloom/MEXISTS"); + RESERVE: typeof import("./bloom/RESERVE"); + reserve: typeof import("./bloom/RESERVE"); + SCANDUMP: typeof import("./bloom/SCANDUMP"); + scanDump: typeof import("./bloom/SCANDUMP"); + }; + cms: { + INCRBY: typeof import("./count-min-sketch/INCRBY"); + incrBy: typeof import("./count-min-sketch/INCRBY"); + INFO: typeof import("./count-min-sketch/INFO"); + info: typeof import("./count-min-sketch/INFO"); + INITBYDIM: typeof import("./count-min-sketch/INITBYDIM"); + initByDim: typeof import("./count-min-sketch/INITBYDIM"); + INITBYPROB: typeof import("./count-min-sketch/INITBYPROB"); + initByProb: typeof import("./count-min-sketch/INITBYPROB"); + MERGE: typeof import("./count-min-sketch/MERGE"); + merge: typeof import("./count-min-sketch/MERGE"); + QUERY: typeof import("./count-min-sketch/QUERY"); + query: typeof import("./count-min-sketch/QUERY"); + }; + cf: { + ADD: typeof import("./cuckoo/ADD"); + add: typeof import("./cuckoo/ADD"); + ADDNX: typeof import("./cuckoo/ADDNX"); + addNX: typeof import("./cuckoo/ADDNX"); + COUNT: typeof import("./cuckoo/COUNT"); + count: typeof import("./cuckoo/COUNT"); + DEL: typeof import("./cuckoo/DEL"); + del: typeof import("./cuckoo/DEL"); + EXISTS: typeof import("./cuckoo/EXISTS"); + exists: typeof import("./cuckoo/EXISTS"); + INFO: typeof import("./cuckoo/INFO"); + info: typeof import("./cuckoo/INFO"); + INSERT: typeof import("./cuckoo/INSERT"); + insert: typeof import("./cuckoo/INSERT"); + INSERTNX: typeof import("./cuckoo/INSERTNX"); + insertNX: typeof import("./cuckoo/INSERTNX"); + LOADCHUNK: typeof import("./cuckoo/LOADCHUNK"); + loadChunk: typeof import("./cuckoo/LOADCHUNK"); + RESERVE: typeof import("./cuckoo/RESERVE"); + reserve: typeof import("./cuckoo/RESERVE"); + SCANDUMP: typeof import("./cuckoo/SCANDUMP"); + scanDump: typeof import("./cuckoo/SCANDUMP"); + }; + tDigest: { + ADD: typeof import("./t-digest/ADD"); + add: typeof import("./t-digest/ADD"); + BYRANK: typeof import("./t-digest/BYRANK"); + byRank: typeof import("./t-digest/BYRANK"); + BYREVRANK: typeof import("./t-digest/BYREVRANK"); + byRevRank: typeof import("./t-digest/BYREVRANK"); + CDF: typeof import("./t-digest/CDF"); + cdf: typeof import("./t-digest/CDF"); + CREATE: typeof import("./t-digest/CREATE"); + create: typeof import("./t-digest/CREATE"); + INFO: typeof import("./t-digest/INFO"); + info: typeof import("./t-digest/INFO"); + MAX: typeof import("./t-digest/MAX"); + max: typeof import("./t-digest/MAX"); + MERGE: typeof import("./t-digest/MERGE"); + merge: typeof import("./t-digest/MERGE"); + MIN: typeof import("./t-digest/MIN"); + min: typeof import("./t-digest/MIN"); + QUANTILE: typeof import("./t-digest/QUANTILE"); + quantile: typeof import("./t-digest/QUANTILE"); + RANK: typeof import("./t-digest/RANK"); + rank: typeof import("./t-digest/RANK"); + RESET: typeof import("./t-digest/RESET"); + reset: typeof import("./t-digest/RESET"); + REVRANK: typeof import("./t-digest/REVRANK"); + revRank: typeof import("./t-digest/REVRANK"); + TRIMMED_MEAN: typeof import("./t-digest/TRIMMED_MEAN"); + trimmedMean: typeof import("./t-digest/TRIMMED_MEAN"); + }; + topK: { + ADD: typeof import("./top-k/ADD"); + add: typeof import("./top-k/ADD"); + COUNT: typeof import("./top-k/COUNT"); + count: typeof import("./top-k/COUNT"); + INCRBY: typeof import("./top-k/INCRBY"); + incrBy: typeof import("./top-k/INCRBY"); + INFO: typeof import("./top-k/INFO"); + info: typeof import("./top-k/INFO"); + LIST_WITHCOUNT: typeof import("./top-k/LIST_WITHCOUNT"); + listWithCount: typeof import("./top-k/LIST_WITHCOUNT"); + LIST: typeof import("./top-k/LIST"); + list: typeof import("./top-k/LIST"); + QUERY: typeof import("./top-k/QUERY"); + query: typeof import("./top-k/QUERY"); + RESERVE: typeof import("./top-k/RESERVE"); + reserve: typeof import("./top-k/RESERVE"); + }; +}; +export default _default; diff --git a/node_modules/@redis/bloom/dist/commands/index.js b/node_modules/@redis/bloom/dist/commands/index.js new file mode 100644 index 0000000..c838edf --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/index.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const bloom_1 = require("./bloom"); +const count_min_sketch_1 = require("./count-min-sketch"); +const cuckoo_1 = require("./cuckoo"); +const t_digest_1 = require("./t-digest"); +const top_k_1 = require("./top-k"); +exports.default = { + bf: bloom_1.default, + cms: count_min_sketch_1.default, + cf: cuckoo_1.default, + tDigest: t_digest_1.default, + topK: top_k_1.default +}; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/ADD.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/ADD.d.ts new file mode 100644 index 0000000..19d0946 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/ADD.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, values: Array): RedisCommandArguments; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/ADD.js b/node_modules/@redis/bloom/dist/commands/t-digest/ADD.js new file mode 100644 index 0000000..d31bc9b --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/ADD.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, values) { + const args = ['TDIGEST.ADD', key]; + for (const item of values) { + args.push(item.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/BYRANK.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/BYRANK.d.ts new file mode 100644 index 0000000..c27a10f --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/BYRANK.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, ranks: Array): RedisCommandArguments; +export { transformDoublesReply as transformReply } from '.'; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/BYRANK.js b/node_modules/@redis/bloom/dist/commands/t-digest/BYRANK.js new file mode 100644 index 0000000..84a5069 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/BYRANK.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, ranks) { + const args = ['TDIGEST.BYRANK', key]; + for (const rank of ranks) { + args.push(rank.toString()); + } + return args; +} +exports.transformArguments = transformArguments; +var _1 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _1.transformDoublesReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/BYREVRANK.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/BYREVRANK.d.ts new file mode 100644 index 0000000..c27a10f --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/BYREVRANK.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, ranks: Array): RedisCommandArguments; +export { transformDoublesReply as transformReply } from '.'; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/BYREVRANK.js b/node_modules/@redis/bloom/dist/commands/t-digest/BYREVRANK.js new file mode 100644 index 0000000..b886843 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/BYREVRANK.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, ranks) { + const args = ['TDIGEST.BYREVRANK', key]; + for (const rank of ranks) { + args.push(rank.toString()); + } + return args; +} +exports.transformArguments = transformArguments; +var _1 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _1.transformDoublesReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/CDF.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/CDF.d.ts new file mode 100644 index 0000000..8652fde --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/CDF.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, values: Array): RedisCommandArguments; +export { transformDoublesReply as transformReply } from '.'; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/CDF.js b/node_modules/@redis/bloom/dist/commands/t-digest/CDF.js new file mode 100644 index 0000000..6a64eeb --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/CDF.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, values) { + const args = ['TDIGEST.CDF', key]; + for (const item of values) { + args.push(item.toString()); + } + return args; +} +exports.transformArguments = transformArguments; +var _1 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _1.transformDoublesReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/CREATE.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/CREATE.d.ts new file mode 100644 index 0000000..9a62bfc --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/CREATE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { CompressionOption } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, options?: CompressionOption): RedisCommandArguments; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/CREATE.js b/node_modules/@redis/bloom/dist/commands/t-digest/CREATE.js new file mode 100644 index 0000000..2fd980b --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/CREATE.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, options) { + return (0, _1.pushCompressionArgument)(['TDIGEST.CREATE', key], options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/INFO.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/INFO.d.ts new file mode 100644 index 0000000..d2d9c5c --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/INFO.d.ts @@ -0,0 +1,31 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +type InfoRawReply = [ + 'Compression', + number, + 'Capacity', + number, + 'Merged nodes', + number, + 'Unmerged nodes', + number, + 'Merged weight', + string, + 'Unmerged weight', + string, + 'Total compressions', + number +]; +interface InfoReply { + comperssion: number; + capacity: number; + mergedNodes: number; + unmergedNodes: number; + mergedWeight: number; + unmergedWeight: number; + totalCompression: number; +} +export declare function transformReply(reply: InfoRawReply): InfoReply; +export {}; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/INFO.js b/node_modules/@redis/bloom/dist/commands/t-digest/INFO.js new file mode 100644 index 0000000..97ea9fc --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/INFO.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return [ + 'TDIGEST.INFO', + key + ]; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + comperssion: reply[1], + capacity: reply[3], + mergedNodes: reply[5], + unmergedNodes: reply[7], + mergedWeight: Number(reply[9]), + unmergedWeight: Number(reply[11]), + totalCompression: reply[13] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/MAX.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/MAX.d.ts new file mode 100644 index 0000000..24c6f01 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/MAX.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export { transformDoubleReply as transformReply } from '.'; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/MAX.js b/node_modules/@redis/bloom/dist/commands/t-digest/MAX.js new file mode 100644 index 0000000..80d2af4 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/MAX.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return [ + 'TDIGEST.MAX', + key + ]; +} +exports.transformArguments = transformArguments; +var _1 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _1.transformDoubleReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/MERGE.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/MERGE.d.ts new file mode 100644 index 0000000..a5007ce --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/MERGE.d.ts @@ -0,0 +1,9 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { CompressionOption } from '.'; +export declare const FIRST_KEY_INDEX = 1; +interface MergeOptions extends CompressionOption { + OVERRIDE?: boolean; +} +export declare function transformArguments(destKey: RedisCommandArgument, srcKeys: RedisCommandArgument | Array, options?: MergeOptions): RedisCommandArguments; +export declare function transformReply(): 'OK'; +export {}; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/MERGE.js b/node_modules/@redis/bloom/dist/commands/t-digest/MERGE.js new file mode 100644 index 0000000..8e65759 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/MERGE.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(destKey, srcKeys, options) { + const args = (0, generic_transformers_1.pushVerdictArgument)(['TDIGEST.MERGE', destKey], srcKeys); + (0, _1.pushCompressionArgument)(args, options); + if (options?.OVERRIDE) { + args.push('OVERRIDE'); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/MIN.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/MIN.d.ts new file mode 100644 index 0000000..24c6f01 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/MIN.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export { transformDoubleReply as transformReply } from '.'; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/MIN.js b/node_modules/@redis/bloom/dist/commands/t-digest/MIN.js new file mode 100644 index 0000000..3d8e234 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/MIN.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return [ + 'TDIGEST.MIN', + key + ]; +} +exports.transformArguments = transformArguments; +var _1 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _1.transformDoubleReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/QUANTILE.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/QUANTILE.d.ts new file mode 100644 index 0000000..b684559 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/QUANTILE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, quantiles: Array): RedisCommandArguments; +export { transformDoublesReply as transformReply } from '.'; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/QUANTILE.js b/node_modules/@redis/bloom/dist/commands/t-digest/QUANTILE.js new file mode 100644 index 0000000..cd106ab --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/QUANTILE.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, quantiles) { + const args = [ + 'TDIGEST.QUANTILE', + key + ]; + for (const quantile of quantiles) { + args.push(quantile.toString()); + } + return args; +} +exports.transformArguments = transformArguments; +var _1 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _1.transformDoublesReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/RANK.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/RANK.d.ts new file mode 100644 index 0000000..09eef2d --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/RANK.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, values: Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/RANK.js b/node_modules/@redis/bloom/dist/commands/t-digest/RANK.js new file mode 100644 index 0000000..0edb7bf --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/RANK.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, values) { + const args = ['TDIGEST.RANK', key]; + for (const item of values) { + args.push(item.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/RESET.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/RESET.d.ts new file mode 100644 index 0000000..710975b --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/RESET.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/RESET.js b/node_modules/@redis/bloom/dist/commands/t-digest/RESET.js new file mode 100644 index 0000000..b2b2940 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/RESET.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['TDIGEST.RESET', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/REVRANK.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/REVRANK.d.ts new file mode 100644 index 0000000..09eef2d --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/REVRANK.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, values: Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/REVRANK.js b/node_modules/@redis/bloom/dist/commands/t-digest/REVRANK.js new file mode 100644 index 0000000..a97d27a --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/REVRANK.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, values) { + const args = ['TDIGEST.REVRANK', key]; + for (const item of values) { + args.push(item.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/TRIMMED_MEAN.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/TRIMMED_MEAN.d.ts new file mode 100644 index 0000000..db7707b --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/TRIMMED_MEAN.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, lowCutPercentile: number, highCutPercentile: number): RedisCommandArguments; +export { transformDoubleReply as transformReply } from '.'; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/TRIMMED_MEAN.js b/node_modules/@redis/bloom/dist/commands/t-digest/TRIMMED_MEAN.js new file mode 100644 index 0000000..ee46eda --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/TRIMMED_MEAN.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, lowCutPercentile, highCutPercentile) { + return [ + 'TDIGEST.TRIMMED_MEAN', + key, + lowCutPercentile.toString(), + highCutPercentile.toString() + ]; +} +exports.transformArguments = transformArguments; +var _1 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _1.transformDoubleReply; } }); diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/index.d.ts b/node_modules/@redis/bloom/dist/commands/t-digest/index.d.ts new file mode 100644 index 0000000..67a8ea7 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/index.d.ts @@ -0,0 +1,52 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import * as ADD from './ADD'; +import * as BYRANK from './BYRANK'; +import * as BYREVRANK from './BYREVRANK'; +import * as CDF from './CDF'; +import * as CREATE from './CREATE'; +import * as INFO from './INFO'; +import * as MAX from './MAX'; +import * as MERGE from './MERGE'; +import * as MIN from './MIN'; +import * as QUANTILE from './QUANTILE'; +import * as RANK from './RANK'; +import * as RESET from './RESET'; +import * as REVRANK from './REVRANK'; +import * as TRIMMED_MEAN from './TRIMMED_MEAN'; +declare const _default: { + ADD: typeof ADD; + add: typeof ADD; + BYRANK: typeof BYRANK; + byRank: typeof BYRANK; + BYREVRANK: typeof BYREVRANK; + byRevRank: typeof BYREVRANK; + CDF: typeof CDF; + cdf: typeof CDF; + CREATE: typeof CREATE; + create: typeof CREATE; + INFO: typeof INFO; + info: typeof INFO; + MAX: typeof MAX; + max: typeof MAX; + MERGE: typeof MERGE; + merge: typeof MERGE; + MIN: typeof MIN; + min: typeof MIN; + QUANTILE: typeof QUANTILE; + quantile: typeof QUANTILE; + RANK: typeof RANK; + rank: typeof RANK; + RESET: typeof RESET; + reset: typeof RESET; + REVRANK: typeof REVRANK; + revRank: typeof REVRANK; + TRIMMED_MEAN: typeof TRIMMED_MEAN; + trimmedMean: typeof TRIMMED_MEAN; +}; +export default _default; +export interface CompressionOption { + COMPRESSION?: number; +} +export declare function pushCompressionArgument(args: RedisCommandArguments, options?: CompressionOption): RedisCommandArguments; +export declare function transformDoubleReply(reply: string): number; +export declare function transformDoublesReply(reply: Array): Array; diff --git a/node_modules/@redis/bloom/dist/commands/t-digest/index.js b/node_modules/@redis/bloom/dist/commands/t-digest/index.js new file mode 100644 index 0000000..862b27b --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/t-digest/index.js @@ -0,0 +1,71 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformDoublesReply = exports.transformDoubleReply = exports.pushCompressionArgument = void 0; +const ADD = require("./ADD"); +const BYRANK = require("./BYRANK"); +const BYREVRANK = require("./BYREVRANK"); +const CDF = require("./CDF"); +const CREATE = require("./CREATE"); +const INFO = require("./INFO"); +const MAX = require("./MAX"); +const MERGE = require("./MERGE"); +const MIN = require("./MIN"); +const QUANTILE = require("./QUANTILE"); +const RANK = require("./RANK"); +const RESET = require("./RESET"); +const REVRANK = require("./REVRANK"); +const TRIMMED_MEAN = require("./TRIMMED_MEAN"); +exports.default = { + ADD, + add: ADD, + BYRANK, + byRank: BYRANK, + BYREVRANK, + byRevRank: BYREVRANK, + CDF, + cdf: CDF, + CREATE, + create: CREATE, + INFO, + info: INFO, + MAX, + max: MAX, + MERGE, + merge: MERGE, + MIN, + min: MIN, + QUANTILE, + quantile: QUANTILE, + RANK, + rank: RANK, + RESET, + reset: RESET, + REVRANK, + revRank: REVRANK, + TRIMMED_MEAN, + trimmedMean: TRIMMED_MEAN +}; +function pushCompressionArgument(args, options) { + if (options?.COMPRESSION) { + args.push('COMPRESSION', options.COMPRESSION.toString()); + } + return args; +} +exports.pushCompressionArgument = pushCompressionArgument; +function transformDoubleReply(reply) { + switch (reply) { + case 'inf': + return Infinity; + case '-inf': + return -Infinity; + case 'nan': + return NaN; + default: + return parseFloat(reply); + } +} +exports.transformDoubleReply = transformDoubleReply; +function transformDoublesReply(reply) { + return reply.map(transformDoubleReply); +} +exports.transformDoublesReply = transformDoublesReply; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/ADD.d.ts b/node_modules/@redis/bloom/dist/commands/top-k/ADD.d.ts new file mode 100644 index 0000000..f147189 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/ADD.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, items: string | Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/ADD.js b/node_modules/@redis/bloom/dist/commands/top-k/ADD.js new file mode 100644 index 0000000..794877e --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/ADD.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, items) { + return (0, generic_transformers_1.pushVerdictArguments)(['TOPK.ADD', key], items); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/COUNT.d.ts b/node_modules/@redis/bloom/dist/commands/top-k/COUNT.d.ts new file mode 100644 index 0000000..e419fe6 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/COUNT.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, items: string | Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/COUNT.js b/node_modules/@redis/bloom/dist/commands/top-k/COUNT.js new file mode 100644 index 0000000..38668d4 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/COUNT.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, items) { + return (0, generic_transformers_1.pushVerdictArguments)(['TOPK.COUNT', key], items); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/INCRBY.d.ts b/node_modules/@redis/bloom/dist/commands/top-k/INCRBY.d.ts new file mode 100644 index 0000000..dd04e05 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/INCRBY.d.ts @@ -0,0 +1,8 @@ +export declare const FIRST_KEY_INDEX = 1; +interface IncrByItem { + item: string; + incrementBy: number; +} +export declare function transformArguments(key: string, items: IncrByItem | Array): Array; +export declare function transformReply(): Array; +export {}; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/INCRBY.js b/node_modules/@redis/bloom/dist/commands/top-k/INCRBY.js new file mode 100644 index 0000000..ecee27b --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/INCRBY.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, items) { + const args = ['TOPK.INCRBY', key]; + if (Array.isArray(items)) { + for (const item of items) { + pushIncrByItem(args, item); + } + } + else { + pushIncrByItem(args, items); + } + return args; +} +exports.transformArguments = transformArguments; +function pushIncrByItem(args, { item, incrementBy }) { + args.push(item, incrementBy.toString()); +} diff --git a/node_modules/@redis/bloom/dist/commands/top-k/INFO.d.ts b/node_modules/@redis/bloom/dist/commands/top-k/INFO.d.ts new file mode 100644 index 0000000..5758a0e --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/INFO.d.ts @@ -0,0 +1,20 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string): Array; +export type InfoRawReply = [ + _: string, + k: number, + _: string, + width: number, + _: string, + depth: number, + _: string, + decay: string +]; +export interface InfoReply { + k: number; + width: number; + depth: number; + decay: number; +} +export declare function transformReply(reply: InfoRawReply): InfoReply; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/INFO.js b/node_modules/@redis/bloom/dist/commands/top-k/INFO.js new file mode 100644 index 0000000..244eb77 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/INFO.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['TOPK.INFO', key]; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + k: reply[1], + width: reply[3], + depth: reply[5], + decay: Number(reply[7]) + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/LIST.d.ts b/node_modules/@redis/bloom/dist/commands/top-k/LIST.d.ts new file mode 100644 index 0000000..473a30b --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/LIST.d.ts @@ -0,0 +1,4 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string): Array; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/LIST.js b/node_modules/@redis/bloom/dist/commands/top-k/LIST.js new file mode 100644 index 0000000..0a7728d --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/LIST.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['TOPK.LIST', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/LIST_WITHCOUNT.d.ts b/node_modules/@redis/bloom/dist/commands/top-k/LIST_WITHCOUNT.d.ts new file mode 100644 index 0000000..1307f15 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/LIST_WITHCOUNT.d.ts @@ -0,0 +1,10 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string): Array; +type ListWithCountRawReply = Array; +type ListWithCountReply = Array<{ + item: string; + count: number; +}>; +export declare function transformReply(rawReply: ListWithCountRawReply): ListWithCountReply; +export {}; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/LIST_WITHCOUNT.js b/node_modules/@redis/bloom/dist/commands/top-k/LIST_WITHCOUNT.js new file mode 100644 index 0000000..95736d0 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/LIST_WITHCOUNT.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['TOPK.LIST', key, 'WITHCOUNT']; +} +exports.transformArguments = transformArguments; +function transformReply(rawReply) { + const reply = []; + for (let i = 0; i < rawReply.length; i++) { + reply.push({ + item: rawReply[i], + count: rawReply[++i] + }); + } + return reply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/QUERY.d.ts b/node_modules/@redis/bloom/dist/commands/top-k/QUERY.d.ts new file mode 100644 index 0000000..e419fe6 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/QUERY.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, items: string | Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/QUERY.js b/node_modules/@redis/bloom/dist/commands/top-k/QUERY.js new file mode 100644 index 0000000..f24c075 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/QUERY.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, items) { + return (0, generic_transformers_1.pushVerdictArguments)(['TOPK.QUERY', key], items); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/RESERVE.d.ts b/node_modules/@redis/bloom/dist/commands/top-k/RESERVE.d.ts new file mode 100644 index 0000000..2662d21 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/RESERVE.d.ts @@ -0,0 +1,10 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +interface ReserveOptions { + width: number; + depth: number; + decay: number; +} +export declare function transformArguments(key: string, topK: number, options?: ReserveOptions): Array; +export declare function transformReply(): 'OK'; +export {}; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/RESERVE.js b/node_modules/@redis/bloom/dist/commands/top-k/RESERVE.js new file mode 100644 index 0000000..ca3f9c9 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/RESERVE.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, topK, options) { + const args = ['TOPK.RESERVE', key, topK.toString()]; + if (options) { + args.push(options.width.toString(), options.depth.toString(), options.decay.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/index.d.ts b/node_modules/@redis/bloom/dist/commands/top-k/index.d.ts new file mode 100644 index 0000000..31d20b1 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/index.d.ts @@ -0,0 +1,27 @@ +import * as ADD from './ADD'; +import * as COUNT from './COUNT'; +import * as INCRBY from './INCRBY'; +import * as INFO from './INFO'; +import * as LIST_WITHCOUNT from './LIST_WITHCOUNT'; +import * as LIST from './LIST'; +import * as QUERY from './QUERY'; +import * as RESERVE from './RESERVE'; +declare const _default: { + ADD: typeof ADD; + add: typeof ADD; + COUNT: typeof COUNT; + count: typeof COUNT; + INCRBY: typeof INCRBY; + incrBy: typeof INCRBY; + INFO: typeof INFO; + info: typeof INFO; + LIST_WITHCOUNT: typeof LIST_WITHCOUNT; + listWithCount: typeof LIST_WITHCOUNT; + LIST: typeof LIST; + list: typeof LIST; + QUERY: typeof QUERY; + query: typeof QUERY; + RESERVE: typeof RESERVE; + reserve: typeof RESERVE; +}; +export default _default; diff --git a/node_modules/@redis/bloom/dist/commands/top-k/index.js b/node_modules/@redis/bloom/dist/commands/top-k/index.js new file mode 100644 index 0000000..648cea5 --- /dev/null +++ b/node_modules/@redis/bloom/dist/commands/top-k/index.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const ADD = require("./ADD"); +const COUNT = require("./COUNT"); +const INCRBY = require("./INCRBY"); +const INFO = require("./INFO"); +const LIST_WITHCOUNT = require("./LIST_WITHCOUNT"); +const LIST = require("./LIST"); +const QUERY = require("./QUERY"); +const RESERVE = require("./RESERVE"); +exports.default = { + ADD, + add: ADD, + COUNT, + count: COUNT, + INCRBY, + incrBy: INCRBY, + INFO, + info: INFO, + LIST_WITHCOUNT, + listWithCount: LIST_WITHCOUNT, + LIST, + list: LIST, + QUERY, + query: QUERY, + RESERVE, + reserve: RESERVE +}; diff --git a/node_modules/@redis/bloom/dist/index.d.ts b/node_modules/@redis/bloom/dist/index.d.ts new file mode 100644 index 0000000..bc0e103 --- /dev/null +++ b/node_modules/@redis/bloom/dist/index.d.ts @@ -0,0 +1 @@ +export { default } from './commands'; diff --git a/node_modules/@redis/bloom/dist/index.js b/node_modules/@redis/bloom/dist/index.js new file mode 100644 index 0000000..992c58d --- /dev/null +++ b/node_modules/@redis/bloom/dist/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = void 0; +var commands_1 = require("./commands"); +Object.defineProperty(exports, "default", { enumerable: true, get: function () { return commands_1.default; } }); diff --git a/node_modules/@redis/bloom/package.json b/node_modules/@redis/bloom/package.json new file mode 100644 index 0000000..d6d5e47 --- /dev/null +++ b/node_modules/@redis/bloom/package.json @@ -0,0 +1,29 @@ +{ + "name": "@redis/bloom", + "version": "1.2.0", + "license": "MIT", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist/" + ], + "scripts": { + "test": "nyc -r text-summary -r lcov mocha -r source-map-support/register -r ts-node/register './lib/**/*.spec.ts'", + "build": "tsc", + "documentation": "typedoc" + }, + "peerDependencies": { + "@redis/client": "^1.0.0" + }, + "devDependencies": { + "@istanbuljs/nyc-config-typescript": "^1.0.2", + "@redis/test-utils": "*", + "@types/node": "^18.11.18", + "nyc": "^15.1.0", + "release-it": "^15.6.0", + "source-map-support": "^0.5.21", + "ts-node": "^10.9.1", + "typedoc": "^0.23.24", + "typescript": "^4.9.4" + } +} diff --git a/node_modules/@redis/client/README.md b/node_modules/@redis/client/README.md new file mode 100644 index 0000000..4b5d150 --- /dev/null +++ b/node_modules/@redis/client/README.md @@ -0,0 +1,3 @@ +# @redis/client + +The source code and documentation for this package are in the main [node-redis](https://github.com/redis/node-redis) repo. diff --git a/node_modules/@redis/client/dist/index.d.ts b/node_modules/@redis/client/dist/index.d.ts new file mode 100644 index 0000000..de56486 --- /dev/null +++ b/node_modules/@redis/client/dist/index.d.ts @@ -0,0 +1,13 @@ +import RedisClient from './lib/client'; +import RedisCluster from './lib/cluster'; +export { RedisClientType, RedisClientOptions } from './lib/client'; +export { RedisModules, RedisFunctions, RedisScripts } from './lib/commands'; +export declare const createClient: typeof RedisClient.create; +export declare const commandOptions: typeof RedisClient.commandOptions; +export { RedisClusterType, RedisClusterOptions } from './lib/cluster'; +export declare const createCluster: typeof RedisCluster.create; +export { defineScript } from './lib/lua-script'; +export * from './lib/errors'; +export { GeoReplyWith } from './lib/commands/generic-transformers'; +export { SetOptions } from './lib/commands/SET'; +export { RedisFlushModes } from './lib/commands/FLUSHALL'; diff --git a/node_modules/@redis/client/dist/index.js b/node_modules/@redis/client/dist/index.js new file mode 100644 index 0000000..fe818c3 --- /dev/null +++ b/node_modules/@redis/client/dist/index.js @@ -0,0 +1,29 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.RedisFlushModes = exports.GeoReplyWith = exports.defineScript = exports.createCluster = exports.commandOptions = exports.createClient = void 0; +const client_1 = require("./lib/client"); +const cluster_1 = require("./lib/cluster"); +exports.createClient = client_1.default.create; +exports.commandOptions = client_1.default.commandOptions; +exports.createCluster = cluster_1.default.create; +var lua_script_1 = require("./lib/lua-script"); +Object.defineProperty(exports, "defineScript", { enumerable: true, get: function () { return lua_script_1.defineScript; } }); +__exportStar(require("./lib/errors"), exports); +var generic_transformers_1 = require("./lib/commands/generic-transformers"); +Object.defineProperty(exports, "GeoReplyWith", { enumerable: true, get: function () { return generic_transformers_1.GeoReplyWith; } }); +var FLUSHALL_1 = require("./lib/commands/FLUSHALL"); +Object.defineProperty(exports, "RedisFlushModes", { enumerable: true, get: function () { return FLUSHALL_1.RedisFlushModes; } }); diff --git a/node_modules/@redis/client/dist/lib/client/RESP2/composers/buffer.d.ts b/node_modules/@redis/client/dist/lib/client/RESP2/composers/buffer.d.ts new file mode 100644 index 0000000..e3978ae --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/RESP2/composers/buffer.d.ts @@ -0,0 +1,8 @@ +/// +import { Composer } from './interface'; +export default class BufferComposer implements Composer { + private chunks; + write(buffer: Buffer): void; + end(buffer: Buffer): Buffer; + reset(): void; +} diff --git a/node_modules/@redis/client/dist/lib/client/RESP2/composers/buffer.js b/node_modules/@redis/client/dist/lib/client/RESP2/composers/buffer.js new file mode 100644 index 0000000..3b3347b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/RESP2/composers/buffer.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +class BufferComposer { + constructor() { + Object.defineProperty(this, "chunks", { + enumerable: true, + configurable: true, + writable: true, + value: [] + }); + } + write(buffer) { + this.chunks.push(buffer); + } + end(buffer) { + this.write(buffer); + return Buffer.concat(this.chunks.splice(0)); + } + reset() { + this.chunks = []; + } +} +exports.default = BufferComposer; diff --git a/node_modules/@redis/client/dist/lib/client/RESP2/composers/interface.d.ts b/node_modules/@redis/client/dist/lib/client/RESP2/composers/interface.d.ts new file mode 100644 index 0000000..165cee3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/RESP2/composers/interface.d.ts @@ -0,0 +1,6 @@ +/// +export interface Composer { + write(buffer: Buffer): void; + end(buffer: Buffer): T; + reset(): void; +} diff --git a/node_modules/@redis/client/dist/lib/client/RESP2/composers/interface.js b/node_modules/@redis/client/dist/lib/client/RESP2/composers/interface.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/RESP2/composers/interface.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@redis/client/dist/lib/client/RESP2/composers/string.d.ts b/node_modules/@redis/client/dist/lib/client/RESP2/composers/string.d.ts new file mode 100644 index 0000000..9d7e87a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/RESP2/composers/string.d.ts @@ -0,0 +1,9 @@ +/// +import { Composer } from './interface'; +export default class StringComposer implements Composer { + private decoder; + private string; + write(buffer: Buffer): void; + end(buffer: Buffer): string; + reset(): void; +} diff --git a/node_modules/@redis/client/dist/lib/client/RESP2/composers/string.js b/node_modules/@redis/client/dist/lib/client/RESP2/composers/string.js new file mode 100644 index 0000000..4be78e1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/RESP2/composers/string.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const string_decoder_1 = require("string_decoder"); +class StringComposer { + constructor() { + Object.defineProperty(this, "decoder", { + enumerable: true, + configurable: true, + writable: true, + value: new string_decoder_1.StringDecoder() + }); + Object.defineProperty(this, "string", { + enumerable: true, + configurable: true, + writable: true, + value: '' + }); + } + write(buffer) { + this.string += this.decoder.write(buffer); + } + end(buffer) { + const string = this.string + this.decoder.end(buffer); + this.string = ''; + return string; + } + reset() { + this.string = ''; + } +} +exports.default = StringComposer; diff --git a/node_modules/@redis/client/dist/lib/client/RESP2/decoder.d.ts b/node_modules/@redis/client/dist/lib/client/RESP2/decoder.d.ts new file mode 100644 index 0000000..0fc94cf --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/RESP2/decoder.d.ts @@ -0,0 +1,35 @@ +/// +import { ErrorReply } from '../../errors'; +export type Reply = string | Buffer | ErrorReply | number | null | Array; +export type ReturnStringsAsBuffers = () => boolean; +interface RESP2Options { + returnStringsAsBuffers: ReturnStringsAsBuffers; + onReply(reply: Reply): unknown; +} +export default class RESP2Decoder { + private options; + constructor(options: RESP2Options); + private cursor; + private type?; + private bufferComposer; + private stringComposer; + private currentStringComposer; + reset(): void; + write(chunk: Buffer): void; + private parseType; + private compose; + private parseSimpleString; + private parseError; + private integer; + private isNegativeInteger?; + private parseInteger; + private bulkStringRemainingLength?; + private parseBulkString; + private arraysInProcess; + private initializeArray; + private arrayItemType?; + private parseArray; + private returnArrayReply; + private pushArrayItem; +} +export {}; diff --git a/node_modules/@redis/client/dist/lib/client/RESP2/decoder.js b/node_modules/@redis/client/dist/lib/client/RESP2/decoder.js new file mode 100644 index 0000000..0f62fc8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/RESP2/decoder.js @@ -0,0 +1,250 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const errors_1 = require("../../errors"); +const buffer_1 = require("./composers/buffer"); +const string_1 = require("./composers/string"); +// RESP2 specification +// https://redis.io/topics/protocol +var Types; +(function (Types) { + Types[Types["SIMPLE_STRING"] = 43] = "SIMPLE_STRING"; + Types[Types["ERROR"] = 45] = "ERROR"; + Types[Types["INTEGER"] = 58] = "INTEGER"; + Types[Types["BULK_STRING"] = 36] = "BULK_STRING"; + Types[Types["ARRAY"] = 42] = "ARRAY"; // * +})(Types || (Types = {})); +var ASCII; +(function (ASCII) { + ASCII[ASCII["CR"] = 13] = "CR"; + ASCII[ASCII["ZERO"] = 48] = "ZERO"; + ASCII[ASCII["MINUS"] = 45] = "MINUS"; +})(ASCII || (ASCII = {})); +// Using TypeScript `private` and not the build-in `#` to avoid __classPrivateFieldGet and __classPrivateFieldSet +class RESP2Decoder { + constructor(options) { + Object.defineProperty(this, "options", { + enumerable: true, + configurable: true, + writable: true, + value: options + }); + Object.defineProperty(this, "cursor", { + enumerable: true, + configurable: true, + writable: true, + value: 0 + }); + Object.defineProperty(this, "type", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "bufferComposer", { + enumerable: true, + configurable: true, + writable: true, + value: new buffer_1.default() + }); + Object.defineProperty(this, "stringComposer", { + enumerable: true, + configurable: true, + writable: true, + value: new string_1.default() + }); + Object.defineProperty(this, "currentStringComposer", { + enumerable: true, + configurable: true, + writable: true, + value: this.stringComposer + }); + Object.defineProperty(this, "integer", { + enumerable: true, + configurable: true, + writable: true, + value: 0 + }); + Object.defineProperty(this, "isNegativeInteger", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "bulkStringRemainingLength", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "arraysInProcess", { + enumerable: true, + configurable: true, + writable: true, + value: [] + }); + Object.defineProperty(this, "initializeArray", { + enumerable: true, + configurable: true, + writable: true, + value: false + }); + Object.defineProperty(this, "arrayItemType", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + } + reset() { + this.cursor = 0; + this.type = undefined; + this.bufferComposer.reset(); + this.stringComposer.reset(); + this.currentStringComposer = this.stringComposer; + } + write(chunk) { + while (this.cursor < chunk.length) { + if (!this.type) { + this.currentStringComposer = this.options.returnStringsAsBuffers() ? + this.bufferComposer : + this.stringComposer; + this.type = chunk[this.cursor]; + if (++this.cursor >= chunk.length) + break; + } + const reply = this.parseType(chunk, this.type); + if (reply === undefined) + break; + this.type = undefined; + this.options.onReply(reply); + } + this.cursor -= chunk.length; + } + parseType(chunk, type, arraysToKeep) { + switch (type) { + case Types.SIMPLE_STRING: + return this.parseSimpleString(chunk); + case Types.ERROR: + return this.parseError(chunk); + case Types.INTEGER: + return this.parseInteger(chunk); + case Types.BULK_STRING: + return this.parseBulkString(chunk); + case Types.ARRAY: + return this.parseArray(chunk, arraysToKeep); + } + } + compose(chunk, composer) { + for (let i = this.cursor; i < chunk.length; i++) { + if (chunk[i] === ASCII.CR) { + const reply = composer.end(chunk.subarray(this.cursor, i)); + this.cursor = i + 2; + return reply; + } + } + const toWrite = chunk.subarray(this.cursor); + composer.write(toWrite); + this.cursor = chunk.length; + } + parseSimpleString(chunk) { + return this.compose(chunk, this.currentStringComposer); + } + parseError(chunk) { + const message = this.compose(chunk, this.stringComposer); + if (message !== undefined) { + return new errors_1.ErrorReply(message); + } + } + parseInteger(chunk) { + if (this.isNegativeInteger === undefined) { + this.isNegativeInteger = chunk[this.cursor] === ASCII.MINUS; + if (this.isNegativeInteger && ++this.cursor === chunk.length) + return; + } + do { + const byte = chunk[this.cursor]; + if (byte === ASCII.CR) { + const integer = this.isNegativeInteger ? -this.integer : this.integer; + this.integer = 0; + this.isNegativeInteger = undefined; + this.cursor += 2; + return integer; + } + this.integer = this.integer * 10 + byte - ASCII.ZERO; + } while (++this.cursor < chunk.length); + } + parseBulkString(chunk) { + if (this.bulkStringRemainingLength === undefined) { + const length = this.parseInteger(chunk); + if (length === undefined) + return; + if (length === -1) + return null; + this.bulkStringRemainingLength = length; + if (this.cursor >= chunk.length) + return; + } + const end = this.cursor + this.bulkStringRemainingLength; + if (chunk.length >= end) { + const reply = this.currentStringComposer.end(chunk.subarray(this.cursor, end)); + this.bulkStringRemainingLength = undefined; + this.cursor = end + 2; + return reply; + } + const toWrite = chunk.subarray(this.cursor); + this.currentStringComposer.write(toWrite); + this.bulkStringRemainingLength -= toWrite.length; + this.cursor = chunk.length; + } + parseArray(chunk, arraysToKeep = 0) { + if (this.initializeArray || this.arraysInProcess.length === arraysToKeep) { + const length = this.parseInteger(chunk); + if (length === undefined) { + this.initializeArray = true; + return undefined; + } + this.initializeArray = false; + this.arrayItemType = undefined; + if (length === -1) { + return this.returnArrayReply(null, arraysToKeep, chunk); + } + else if (length === 0) { + return this.returnArrayReply([], arraysToKeep, chunk); + } + this.arraysInProcess.push({ + array: new Array(length), + pushCounter: 0 + }); + } + while (this.cursor < chunk.length) { + if (!this.arrayItemType) { + this.arrayItemType = chunk[this.cursor]; + if (++this.cursor >= chunk.length) + break; + } + const item = this.parseType(chunk, this.arrayItemType, arraysToKeep + 1); + if (item === undefined) + break; + this.arrayItemType = undefined; + const reply = this.pushArrayItem(item, arraysToKeep); + if (reply !== undefined) + return reply; + } + } + returnArrayReply(reply, arraysToKeep, chunk) { + if (this.arraysInProcess.length <= arraysToKeep) + return reply; + return this.pushArrayItem(reply, arraysToKeep, chunk); + } + pushArrayItem(item, arraysToKeep, chunk) { + const to = this.arraysInProcess[this.arraysInProcess.length - 1]; + to.array[to.pushCounter] = item; + if (++to.pushCounter === to.array.length) { + return this.returnArrayReply(this.arraysInProcess.pop().array, arraysToKeep, chunk); + } + else if (chunk && chunk.length > this.cursor) { + return this.parseArray(chunk, arraysToKeep); + } + } +} +exports.default = RESP2Decoder; diff --git a/node_modules/@redis/client/dist/lib/client/RESP2/encoder.d.ts b/node_modules/@redis/client/dist/lib/client/RESP2/encoder.d.ts new file mode 100644 index 0000000..ea0a62b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/RESP2/encoder.d.ts @@ -0,0 +1,2 @@ +import { RedisCommandArgument, RedisCommandArguments } from '../../commands'; +export default function encodeCommand(args: RedisCommandArguments): Array; diff --git a/node_modules/@redis/client/dist/lib/client/RESP2/encoder.js b/node_modules/@redis/client/dist/lib/client/RESP2/encoder.js new file mode 100644 index 0000000..ac713de --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/RESP2/encoder.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const CRLF = '\r\n'; +function encodeCommand(args) { + const toWrite = []; + let strings = '*' + args.length + CRLF; + for (let i = 0; i < args.length; i++) { + const arg = args[i]; + if (typeof arg === 'string') { + strings += '$' + Buffer.byteLength(arg) + CRLF + arg + CRLF; + } + else if (arg instanceof Buffer) { + toWrite.push(strings + '$' + arg.length.toString() + CRLF, arg); + strings = CRLF; + } + else { + throw new TypeError('Invalid argument type'); + } + } + toWrite.push(strings); + return toWrite; +} +exports.default = encodeCommand; diff --git a/node_modules/@redis/client/dist/lib/client/commands-queue.d.ts b/node_modules/@redis/client/dist/lib/client/commands-queue.d.ts new file mode 100644 index 0000000..25fa7e0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/commands-queue.d.ts @@ -0,0 +1,42 @@ +/// +/// +import { RedisCommandArguments, RedisCommandRawReply } from '../commands'; +import { ChannelListeners, PubSubListener, PubSubType, PubSubTypeListeners } from './pub-sub'; +export interface QueueCommandOptions { + asap?: boolean; + chainId?: symbol; + signal?: AbortSignal; + returnBuffers?: boolean; +} +export interface CommandWaitingToBeSent extends CommandWaitingForReply { + args: RedisCommandArguments; + chainId?: symbol; + abort?: { + signal: AbortSignal; + listener(): void; + }; +} +interface CommandWaitingForReply { + resolve(reply?: unknown): void; + reject(err: unknown): void; + channelsCounter?: number; + returnBuffers?: boolean; +} +export type OnShardedChannelMoved = (channel: string, listeners: ChannelListeners) => void; +export default class RedisCommandsQueue { + #private; + get isPubSubActive(): boolean; + constructor(maxLength: number | null | undefined, onShardedChannelMoved: OnShardedChannelMoved); + addCommand(args: RedisCommandArguments, options?: QueueCommandOptions): Promise; + subscribe(type: PubSubType, channels: string | Array, listener: PubSubListener, returnBuffers?: T): Promise | undefined; + unsubscribe(type: PubSubType, channels?: string | Array, listener?: PubSubListener, returnBuffers?: T): Promise | undefined; + resubscribe(): Promise | undefined; + extendPubSubChannelListeners(type: PubSubType, channel: string, listeners: ChannelListeners): Promise | undefined; + extendPubSubListeners(type: PubSubType, listeners: PubSubTypeListeners): Promise | undefined; + getPubSubListeners(type: PubSubType): PubSubTypeListeners; + getCommandToSend(): RedisCommandArguments | undefined; + onReplyChunk(chunk: Buffer): void; + flushWaitingForReply(err: Error): void; + flushAll(err: Error): void; +} +export {}; diff --git a/node_modules/@redis/client/dist/lib/client/commands-queue.js b/node_modules/@redis/client/dist/lib/client/commands-queue.js new file mode 100644 index 0000000..dbc8590 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/commands-queue.js @@ -0,0 +1,197 @@ +"use strict"; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _RedisCommandsQueue_instances, _a, _RedisCommandsQueue_flushQueue, _RedisCommandsQueue_maxLength, _RedisCommandsQueue_waitingToBeSent, _RedisCommandsQueue_waitingForReply, _RedisCommandsQueue_onShardedChannelMoved, _RedisCommandsQueue_pubSub, _RedisCommandsQueue_chainInExecution, _RedisCommandsQueue_decoder, _RedisCommandsQueue_pushPubSubCommand; +Object.defineProperty(exports, "__esModule", { value: true }); +const LinkedList = require("yallist"); +const errors_1 = require("../errors"); +const decoder_1 = require("./RESP2/decoder"); +const encoder_1 = require("./RESP2/encoder"); +const pub_sub_1 = require("./pub-sub"); +const PONG = Buffer.from('pong'); +class RedisCommandsQueue { + get isPubSubActive() { + return __classPrivateFieldGet(this, _RedisCommandsQueue_pubSub, "f").isActive; + } + constructor(maxLength, onShardedChannelMoved) { + _RedisCommandsQueue_instances.add(this); + _RedisCommandsQueue_maxLength.set(this, void 0); + _RedisCommandsQueue_waitingToBeSent.set(this, new LinkedList()); + _RedisCommandsQueue_waitingForReply.set(this, new LinkedList()); + _RedisCommandsQueue_onShardedChannelMoved.set(this, void 0); + _RedisCommandsQueue_pubSub.set(this, new pub_sub_1.PubSub()); + _RedisCommandsQueue_chainInExecution.set(this, void 0); + _RedisCommandsQueue_decoder.set(this, new decoder_1.default({ + returnStringsAsBuffers: () => { + return !!__classPrivateFieldGet(this, _RedisCommandsQueue_waitingForReply, "f").head?.value.returnBuffers || + __classPrivateFieldGet(this, _RedisCommandsQueue_pubSub, "f").isActive; + }, + onReply: reply => { + if (__classPrivateFieldGet(this, _RedisCommandsQueue_pubSub, "f").isActive && Array.isArray(reply)) { + if (__classPrivateFieldGet(this, _RedisCommandsQueue_pubSub, "f").handleMessageReply(reply)) + return; + const isShardedUnsubscribe = pub_sub_1.PubSub.isShardedUnsubscribe(reply); + if (isShardedUnsubscribe && !__classPrivateFieldGet(this, _RedisCommandsQueue_waitingForReply, "f").length) { + const channel = reply[1].toString(); + __classPrivateFieldGet(this, _RedisCommandsQueue_onShardedChannelMoved, "f").call(this, channel, __classPrivateFieldGet(this, _RedisCommandsQueue_pubSub, "f").removeShardedListeners(channel)); + return; + } + else if (isShardedUnsubscribe || pub_sub_1.PubSub.isStatusReply(reply)) { + const head = __classPrivateFieldGet(this, _RedisCommandsQueue_waitingForReply, "f").head.value; + if ((Number.isNaN(head.channelsCounter) && reply[2] === 0) || + --head.channelsCounter === 0) { + __classPrivateFieldGet(this, _RedisCommandsQueue_waitingForReply, "f").shift().resolve(); + } + return; + } + if (PONG.equals(reply[0])) { + const { resolve, returnBuffers } = __classPrivateFieldGet(this, _RedisCommandsQueue_waitingForReply, "f").shift(), buffer = (reply[1].length === 0 ? reply[0] : reply[1]); + resolve(returnBuffers ? buffer : buffer.toString()); + return; + } + } + const { resolve, reject } = __classPrivateFieldGet(this, _RedisCommandsQueue_waitingForReply, "f").shift(); + if (reply instanceof errors_1.ErrorReply) { + reject(reply); + } + else { + resolve(reply); + } + } + })); + __classPrivateFieldSet(this, _RedisCommandsQueue_maxLength, maxLength, "f"); + __classPrivateFieldSet(this, _RedisCommandsQueue_onShardedChannelMoved, onShardedChannelMoved, "f"); + } + addCommand(args, options) { + if (__classPrivateFieldGet(this, _RedisCommandsQueue_maxLength, "f") && __classPrivateFieldGet(this, _RedisCommandsQueue_waitingToBeSent, "f").length + __classPrivateFieldGet(this, _RedisCommandsQueue_waitingForReply, "f").length >= __classPrivateFieldGet(this, _RedisCommandsQueue_maxLength, "f")) { + return Promise.reject(new Error('The queue is full')); + } + else if (options?.signal?.aborted) { + return Promise.reject(new errors_1.AbortError()); + } + return new Promise((resolve, reject) => { + const node = new LinkedList.Node({ + args, + chainId: options?.chainId, + returnBuffers: options?.returnBuffers, + resolve, + reject + }); + if (options?.signal) { + const listener = () => { + __classPrivateFieldGet(this, _RedisCommandsQueue_waitingToBeSent, "f").removeNode(node); + node.value.reject(new errors_1.AbortError()); + }; + node.value.abort = { + signal: options.signal, + listener + }; + // AbortSignal type is incorrent + options.signal.addEventListener('abort', listener, { + once: true + }); + } + if (options?.asap) { + __classPrivateFieldGet(this, _RedisCommandsQueue_waitingToBeSent, "f").unshiftNode(node); + } + else { + __classPrivateFieldGet(this, _RedisCommandsQueue_waitingToBeSent, "f").pushNode(node); + } + }); + } + subscribe(type, channels, listener, returnBuffers) { + return __classPrivateFieldGet(this, _RedisCommandsQueue_instances, "m", _RedisCommandsQueue_pushPubSubCommand).call(this, __classPrivateFieldGet(this, _RedisCommandsQueue_pubSub, "f").subscribe(type, channels, listener, returnBuffers)); + } + unsubscribe(type, channels, listener, returnBuffers) { + return __classPrivateFieldGet(this, _RedisCommandsQueue_instances, "m", _RedisCommandsQueue_pushPubSubCommand).call(this, __classPrivateFieldGet(this, _RedisCommandsQueue_pubSub, "f").unsubscribe(type, channels, listener, returnBuffers)); + } + resubscribe() { + const commands = __classPrivateFieldGet(this, _RedisCommandsQueue_pubSub, "f").resubscribe(); + if (!commands.length) + return; + return Promise.all(commands.map(command => __classPrivateFieldGet(this, _RedisCommandsQueue_instances, "m", _RedisCommandsQueue_pushPubSubCommand).call(this, command))); + } + extendPubSubChannelListeners(type, channel, listeners) { + return __classPrivateFieldGet(this, _RedisCommandsQueue_instances, "m", _RedisCommandsQueue_pushPubSubCommand).call(this, __classPrivateFieldGet(this, _RedisCommandsQueue_pubSub, "f").extendChannelListeners(type, channel, listeners)); + } + extendPubSubListeners(type, listeners) { + return __classPrivateFieldGet(this, _RedisCommandsQueue_instances, "m", _RedisCommandsQueue_pushPubSubCommand).call(this, __classPrivateFieldGet(this, _RedisCommandsQueue_pubSub, "f").extendTypeListeners(type, listeners)); + } + getPubSubListeners(type) { + return __classPrivateFieldGet(this, _RedisCommandsQueue_pubSub, "f").getTypeListeners(type); + } + getCommandToSend() { + const toSend = __classPrivateFieldGet(this, _RedisCommandsQueue_waitingToBeSent, "f").shift(); + if (!toSend) + return; + let encoded; + try { + encoded = (0, encoder_1.default)(toSend.args); + } + catch (err) { + toSend.reject(err); + return; + } + __classPrivateFieldGet(this, _RedisCommandsQueue_waitingForReply, "f").push({ + resolve: toSend.resolve, + reject: toSend.reject, + channelsCounter: toSend.channelsCounter, + returnBuffers: toSend.returnBuffers + }); + __classPrivateFieldSet(this, _RedisCommandsQueue_chainInExecution, toSend.chainId, "f"); + return encoded; + } + onReplyChunk(chunk) { + __classPrivateFieldGet(this, _RedisCommandsQueue_decoder, "f").write(chunk); + } + flushWaitingForReply(err) { + __classPrivateFieldGet(this, _RedisCommandsQueue_decoder, "f").reset(); + __classPrivateFieldGet(this, _RedisCommandsQueue_pubSub, "f").reset(); + __classPrivateFieldGet(_a, _a, "m", _RedisCommandsQueue_flushQueue).call(_a, __classPrivateFieldGet(this, _RedisCommandsQueue_waitingForReply, "f"), err); + if (!__classPrivateFieldGet(this, _RedisCommandsQueue_chainInExecution, "f")) + return; + while (__classPrivateFieldGet(this, _RedisCommandsQueue_waitingToBeSent, "f").head?.value.chainId === __classPrivateFieldGet(this, _RedisCommandsQueue_chainInExecution, "f")) { + __classPrivateFieldGet(this, _RedisCommandsQueue_waitingToBeSent, "f").shift(); + } + __classPrivateFieldSet(this, _RedisCommandsQueue_chainInExecution, undefined, "f"); + } + flushAll(err) { + __classPrivateFieldGet(this, _RedisCommandsQueue_decoder, "f").reset(); + __classPrivateFieldGet(this, _RedisCommandsQueue_pubSub, "f").reset(); + __classPrivateFieldGet(_a, _a, "m", _RedisCommandsQueue_flushQueue).call(_a, __classPrivateFieldGet(this, _RedisCommandsQueue_waitingForReply, "f"), err); + __classPrivateFieldGet(_a, _a, "m", _RedisCommandsQueue_flushQueue).call(_a, __classPrivateFieldGet(this, _RedisCommandsQueue_waitingToBeSent, "f"), err); + } +} +_a = RedisCommandsQueue, _RedisCommandsQueue_maxLength = new WeakMap(), _RedisCommandsQueue_waitingToBeSent = new WeakMap(), _RedisCommandsQueue_waitingForReply = new WeakMap(), _RedisCommandsQueue_onShardedChannelMoved = new WeakMap(), _RedisCommandsQueue_pubSub = new WeakMap(), _RedisCommandsQueue_chainInExecution = new WeakMap(), _RedisCommandsQueue_decoder = new WeakMap(), _RedisCommandsQueue_instances = new WeakSet(), _RedisCommandsQueue_flushQueue = function _RedisCommandsQueue_flushQueue(queue, err) { + while (queue.length) { + queue.shift().reject(err); + } +}, _RedisCommandsQueue_pushPubSubCommand = function _RedisCommandsQueue_pushPubSubCommand(command) { + if (command === undefined) + return; + return new Promise((resolve, reject) => { + __classPrivateFieldGet(this, _RedisCommandsQueue_waitingToBeSent, "f").push({ + args: command.args, + channelsCounter: command.channelsCounter, + returnBuffers: true, + resolve: () => { + command.resolve(); + resolve(); + }, + reject: err => { + command.reject?.(); + reject(err); + } + }); + }); +}; +exports.default = RedisCommandsQueue; diff --git a/node_modules/@redis/client/dist/lib/client/commands.d.ts b/node_modules/@redis/client/dist/lib/client/commands.d.ts new file mode 100644 index 0000000..ce9c15e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/commands.d.ts @@ -0,0 +1,816 @@ +import * as ACL_CAT from '../commands/ACL_CAT'; +import * as ACL_DELUSER from '../commands/ACL_DELUSER'; +import * as ACL_DRYRUN from '../commands/ACL_DRYRUN'; +import * as ACL_GENPASS from '../commands/ACL_GENPASS'; +import * as ACL_GETUSER from '../commands/ACL_GETUSER'; +import * as ACL_LIST from '../commands/ACL_LIST'; +import * as ACL_LOAD from '../commands/ACL_LOAD'; +import * as ACL_LOG_RESET from '../commands/ACL_LOG_RESET'; +import * as ACL_LOG from '../commands/ACL_LOG'; +import * as ACL_SAVE from '../commands/ACL_SAVE'; +import * as ACL_SETUSER from '../commands/ACL_SETUSER'; +import * as ACL_USERS from '../commands/ACL_USERS'; +import * as ACL_WHOAMI from '../commands/ACL_WHOAMI'; +import * as ASKING from '../commands/ASKING'; +import * as AUTH from '../commands/AUTH'; +import * as BGREWRITEAOF from '../commands/BGREWRITEAOF'; +import * as BGSAVE from '../commands/BGSAVE'; +import * as CLIENT_CACHING from '../commands/CLIENT_CACHING'; +import * as CLIENT_GETNAME from '../commands/CLIENT_GETNAME'; +import * as CLIENT_GETREDIR from '../commands/CLIENT_GETREDIR'; +import * as CLIENT_ID from '../commands/CLIENT_ID'; +import * as CLIENT_KILL from '../commands/CLIENT_KILL'; +import * as CLIENT_LIST from '../commands/CLIENT_LIST'; +import * as CLIENT_NO_EVICT from '../commands/CLIENT_NO-EVICT'; +import * as CLIENT_NO_TOUCH from '../commands/CLIENT_NO-TOUCH'; +import * as CLIENT_PAUSE from '../commands/CLIENT_PAUSE'; +import * as CLIENT_SETNAME from '../commands/CLIENT_SETNAME'; +import * as CLIENT_TRACKING from '../commands/CLIENT_TRACKING'; +import * as CLIENT_TRACKINGINFO from '../commands/CLIENT_TRACKINGINFO'; +import * as CLIENT_UNPAUSE from '../commands/CLIENT_UNPAUSE'; +import * as CLIENT_INFO from '../commands/CLIENT_INFO'; +import * as CLUSTER_ADDSLOTS from '../commands/CLUSTER_ADDSLOTS'; +import * as CLUSTER_ADDSLOTSRANGE from '../commands/CLUSTER_ADDSLOTSRANGE'; +import * as CLUSTER_BUMPEPOCH from '../commands/CLUSTER_BUMPEPOCH'; +import * as CLUSTER_COUNT_FAILURE_REPORTS from '../commands/CLUSTER_COUNT-FAILURE-REPORTS'; +import * as CLUSTER_COUNTKEYSINSLOT from '../commands/CLUSTER_COUNTKEYSINSLOT'; +import * as CLUSTER_DELSLOTS from '../commands/CLUSTER_DELSLOTS'; +import * as CLUSTER_DELSLOTSRANGE from '../commands/CLUSTER_DELSLOTSRANGE'; +import * as CLUSTER_FAILOVER from '../commands/CLUSTER_FAILOVER'; +import * as CLUSTER_FLUSHSLOTS from '../commands/CLUSTER_FLUSHSLOTS'; +import * as CLUSTER_FORGET from '../commands/CLUSTER_FORGET'; +import * as CLUSTER_GETKEYSINSLOT from '../commands/CLUSTER_GETKEYSINSLOT'; +import * as CLUSTER_INFO from '../commands/CLUSTER_INFO'; +import * as CLUSTER_KEYSLOT from '../commands/CLUSTER_KEYSLOT'; +import * as CLUSTER_LINKS from '../commands/CLUSTER_LINKS'; +import * as CLUSTER_MEET from '../commands/CLUSTER_MEET'; +import * as CLUSTER_MYID from '../commands/CLUSTER_MYID'; +import * as CLUSTER_MYSHARDID from '../commands/CLUSTER_MYSHARDID'; +import * as CLUSTER_NODES from '../commands/CLUSTER_NODES'; +import * as CLUSTER_REPLICAS from '../commands/CLUSTER_REPLICAS'; +import * as CLUSTER_REPLICATE from '../commands/CLUSTER_REPLICATE'; +import * as CLUSTER_RESET from '../commands/CLUSTER_RESET'; +import * as CLUSTER_SAVECONFIG from '../commands/CLUSTER_SAVECONFIG'; +import * as CLUSTER_SET_CONFIG_EPOCH from '../commands/CLUSTER_SET-CONFIG-EPOCH'; +import * as CLUSTER_SETSLOT from '../commands/CLUSTER_SETSLOT'; +import * as CLUSTER_SLOTS from '../commands/CLUSTER_SLOTS'; +import * as COMMAND_COUNT from '../commands/COMMAND_COUNT'; +import * as COMMAND_GETKEYS from '../commands/COMMAND_GETKEYS'; +import * as COMMAND_GETKEYSANDFLAGS from '../commands/COMMAND_GETKEYSANDFLAGS'; +import * as COMMAND_INFO from '../commands/COMMAND_INFO'; +import * as COMMAND_LIST from '../commands/COMMAND_LIST'; +import * as COMMAND from '../commands/COMMAND'; +import * as CONFIG_GET from '../commands/CONFIG_GET'; +import * as CONFIG_RESETASTAT from '../commands/CONFIG_RESETSTAT'; +import * as CONFIG_REWRITE from '../commands/CONFIG_REWRITE'; +import * as CONFIG_SET from '../commands/CONFIG_SET'; +import * as DBSIZE from '../commands/DBSIZE'; +import * as DISCARD from '../commands/DISCARD'; +import * as ECHO from '../commands/ECHO'; +import * as FAILOVER from '../commands/FAILOVER'; +import * as FLUSHALL from '../commands/FLUSHALL'; +import * as FLUSHDB from '../commands/FLUSHDB'; +import * as FUNCTION_DELETE from '../commands/FUNCTION_DELETE'; +import * as FUNCTION_DUMP from '../commands/FUNCTION_DUMP'; +import * as FUNCTION_FLUSH from '../commands/FUNCTION_FLUSH'; +import * as FUNCTION_KILL from '../commands/FUNCTION_KILL'; +import * as FUNCTION_LIST_WITHCODE from '../commands/FUNCTION_LIST_WITHCODE'; +import * as FUNCTION_LIST from '../commands/FUNCTION_LIST'; +import * as FUNCTION_LOAD from '../commands/FUNCTION_LOAD'; +import * as FUNCTION_RESTORE from '../commands/FUNCTION_RESTORE'; +import * as FUNCTION_STATS from '../commands/FUNCTION_STATS'; +import * as HELLO from '../commands/HELLO'; +import * as INFO from '../commands/INFO'; +import * as KEYS from '../commands/KEYS'; +import * as LASTSAVE from '../commands/LASTSAVE'; +import * as LATENCY_DOCTOR from '../commands/LATENCY_DOCTOR'; +import * as LATENCY_GRAPH from '../commands/LATENCY_GRAPH'; +import * as LATENCY_HISTORY from '../commands/LATENCY_HISTORY'; +import * as LATENCY_LATEST from '../commands/LATENCY_LATEST'; +import * as LOLWUT from '../commands/LOLWUT'; +import * as MEMORY_DOCTOR from '../commands/MEMORY_DOCTOR'; +import * as MEMORY_MALLOC_STATS from '../commands/MEMORY_MALLOC-STATS'; +import * as MEMORY_PURGE from '../commands/MEMORY_PURGE'; +import * as MEMORY_STATS from '../commands/MEMORY_STATS'; +import * as MEMORY_USAGE from '../commands/MEMORY_USAGE'; +import * as MODULE_LIST from '../commands/MODULE_LIST'; +import * as MODULE_LOAD from '../commands/MODULE_LOAD'; +import * as MODULE_UNLOAD from '../commands/MODULE_UNLOAD'; +import * as MOVE from '../commands/MOVE'; +import * as PING from '../commands/PING'; +import * as PUBSUB_CHANNELS from '../commands/PUBSUB_CHANNELS'; +import * as PUBSUB_NUMPAT from '../commands/PUBSUB_NUMPAT'; +import * as PUBSUB_NUMSUB from '../commands/PUBSUB_NUMSUB'; +import * as PUBSUB_SHARDCHANNELS from '../commands/PUBSUB_SHARDCHANNELS'; +import * as PUBSUB_SHARDNUMSUB from '../commands/PUBSUB_SHARDNUMSUB'; +import * as RANDOMKEY from '../commands/RANDOMKEY'; +import * as READONLY from '../commands/READONLY'; +import * as READWRITE from '../commands/READWRITE'; +import * as REPLICAOF from '../commands/REPLICAOF'; +import * as RESTORE_ASKING from '../commands/RESTORE-ASKING'; +import * as ROLE from '../commands/ROLE'; +import * as SAVE from '../commands/SAVE'; +import * as SCAN from '../commands/SCAN'; +import * as SCRIPT_DEBUG from '../commands/SCRIPT_DEBUG'; +import * as SCRIPT_EXISTS from '../commands/SCRIPT_EXISTS'; +import * as SCRIPT_FLUSH from '../commands/SCRIPT_FLUSH'; +import * as SCRIPT_KILL from '../commands/SCRIPT_KILL'; +import * as SCRIPT_LOAD from '../commands/SCRIPT_LOAD'; +import * as SHUTDOWN from '../commands/SHUTDOWN'; +import * as SWAPDB from '../commands/SWAPDB'; +import * as TIME from '../commands/TIME'; +import * as UNWATCH from '../commands/UNWATCH'; +import * as WAIT from '../commands/WAIT'; +declare const _default: { + ACL_CAT: typeof ACL_CAT; + aclCat: typeof ACL_CAT; + ACL_DELUSER: typeof ACL_DELUSER; + aclDelUser: typeof ACL_DELUSER; + ACL_DRYRUN: typeof ACL_DRYRUN; + aclDryRun: typeof ACL_DRYRUN; + ACL_GENPASS: typeof ACL_GENPASS; + aclGenPass: typeof ACL_GENPASS; + ACL_GETUSER: typeof ACL_GETUSER; + aclGetUser: typeof ACL_GETUSER; + ACL_LIST: typeof ACL_LIST; + aclList: typeof ACL_LIST; + ACL_LOAD: typeof ACL_LOAD; + aclLoad: typeof ACL_LOAD; + ACL_LOG_RESET: typeof ACL_LOG_RESET; + aclLogReset: typeof ACL_LOG_RESET; + ACL_LOG: typeof ACL_LOG; + aclLog: typeof ACL_LOG; + ACL_SAVE: typeof ACL_SAVE; + aclSave: typeof ACL_SAVE; + ACL_SETUSER: typeof ACL_SETUSER; + aclSetUser: typeof ACL_SETUSER; + ACL_USERS: typeof ACL_USERS; + aclUsers: typeof ACL_USERS; + ACL_WHOAMI: typeof ACL_WHOAMI; + aclWhoAmI: typeof ACL_WHOAMI; + ASKING: typeof ASKING; + asking: typeof ASKING; + AUTH: typeof AUTH; + auth: typeof AUTH; + BGREWRITEAOF: typeof BGREWRITEAOF; + bgRewriteAof: typeof BGREWRITEAOF; + BGSAVE: typeof BGSAVE; + bgSave: typeof BGSAVE; + CLIENT_CACHING: typeof CLIENT_CACHING; + clientCaching: typeof CLIENT_CACHING; + CLIENT_GETNAME: typeof CLIENT_GETNAME; + clientGetName: typeof CLIENT_GETNAME; + CLIENT_GETREDIR: typeof CLIENT_GETREDIR; + clientGetRedir: typeof CLIENT_GETREDIR; + CLIENT_ID: typeof CLIENT_ID; + clientId: typeof CLIENT_ID; + CLIENT_KILL: typeof CLIENT_KILL; + clientKill: typeof CLIENT_KILL; + 'CLIENT_NO-EVICT': typeof CLIENT_NO_EVICT; + clientNoEvict: typeof CLIENT_NO_EVICT; + 'CLIENT_NO-TOUCH': typeof CLIENT_NO_TOUCH; + clientNoTouch: typeof CLIENT_NO_TOUCH; + CLIENT_LIST: typeof CLIENT_LIST; + clientList: typeof CLIENT_LIST; + CLIENT_PAUSE: typeof CLIENT_PAUSE; + clientPause: typeof CLIENT_PAUSE; + CLIENT_SETNAME: typeof CLIENT_SETNAME; + clientSetName: typeof CLIENT_SETNAME; + CLIENT_TRACKING: typeof CLIENT_TRACKING; + clientTracking: typeof CLIENT_TRACKING; + CLIENT_TRACKINGINFO: typeof CLIENT_TRACKINGINFO; + clientTrackingInfo: typeof CLIENT_TRACKINGINFO; + CLIENT_UNPAUSE: typeof CLIENT_UNPAUSE; + clientUnpause: typeof CLIENT_UNPAUSE; + CLIENT_INFO: typeof CLIENT_INFO; + clientInfo: typeof CLIENT_INFO; + CLUSTER_ADDSLOTS: typeof CLUSTER_ADDSLOTS; + clusterAddSlots: typeof CLUSTER_ADDSLOTS; + CLUSTER_ADDSLOTSRANGE: typeof CLUSTER_ADDSLOTSRANGE; + clusterAddSlotsRange: typeof CLUSTER_ADDSLOTSRANGE; + CLUSTER_BUMPEPOCH: typeof CLUSTER_BUMPEPOCH; + clusterBumpEpoch: typeof CLUSTER_BUMPEPOCH; + CLUSTER_COUNT_FAILURE_REPORTS: typeof CLUSTER_COUNT_FAILURE_REPORTS; + clusterCountFailureReports: typeof CLUSTER_COUNT_FAILURE_REPORTS; + CLUSTER_COUNTKEYSINSLOT: typeof CLUSTER_COUNTKEYSINSLOT; + clusterCountKeysInSlot: typeof CLUSTER_COUNTKEYSINSLOT; + CLUSTER_DELSLOTS: typeof CLUSTER_DELSLOTS; + clusterDelSlots: typeof CLUSTER_DELSLOTS; + CLUSTER_DELSLOTSRANGE: typeof CLUSTER_DELSLOTSRANGE; + clusterDelSlotsRange: typeof CLUSTER_DELSLOTSRANGE; + CLUSTER_FAILOVER: typeof CLUSTER_FAILOVER; + clusterFailover: typeof CLUSTER_FAILOVER; + CLUSTER_FLUSHSLOTS: typeof CLUSTER_FLUSHSLOTS; + clusterFlushSlots: typeof CLUSTER_FLUSHSLOTS; + CLUSTER_FORGET: typeof CLUSTER_FORGET; + clusterForget: typeof CLUSTER_FORGET; + CLUSTER_GETKEYSINSLOT: typeof CLUSTER_GETKEYSINSLOT; + clusterGetKeysInSlot: typeof CLUSTER_GETKEYSINSLOT; + CLUSTER_INFO: typeof CLUSTER_INFO; + clusterInfo: typeof CLUSTER_INFO; + CLUSTER_KEYSLOT: typeof CLUSTER_KEYSLOT; + clusterKeySlot: typeof CLUSTER_KEYSLOT; + CLUSTER_LINKS: typeof CLUSTER_LINKS; + clusterLinks: typeof CLUSTER_LINKS; + CLUSTER_MEET: typeof CLUSTER_MEET; + clusterMeet: typeof CLUSTER_MEET; + CLUSTER_MYID: typeof CLUSTER_MYID; + clusterMyId: typeof CLUSTER_MYID; + CLUSTER_MYSHARDID: typeof CLUSTER_MYSHARDID; + clusterMyShardId: typeof CLUSTER_MYSHARDID; + CLUSTER_NODES: typeof CLUSTER_NODES; + clusterNodes: typeof CLUSTER_NODES; + CLUSTER_REPLICAS: typeof CLUSTER_REPLICAS; + clusterReplicas: typeof CLUSTER_REPLICAS; + CLUSTER_REPLICATE: typeof CLUSTER_REPLICATE; + clusterReplicate: typeof CLUSTER_REPLICATE; + CLUSTER_RESET: typeof CLUSTER_RESET; + clusterReset: typeof CLUSTER_RESET; + CLUSTER_SAVECONFIG: typeof CLUSTER_SAVECONFIG; + clusterSaveConfig: typeof CLUSTER_SAVECONFIG; + CLUSTER_SET_CONFIG_EPOCH: typeof CLUSTER_SET_CONFIG_EPOCH; + clusterSetConfigEpoch: typeof CLUSTER_SET_CONFIG_EPOCH; + CLUSTER_SETSLOT: typeof CLUSTER_SETSLOT; + clusterSetSlot: typeof CLUSTER_SETSLOT; + CLUSTER_SLOTS: typeof CLUSTER_SLOTS; + clusterSlots: typeof CLUSTER_SLOTS; + COMMAND_COUNT: typeof COMMAND_COUNT; + commandCount: typeof COMMAND_COUNT; + COMMAND_GETKEYS: typeof COMMAND_GETKEYS; + commandGetKeys: typeof COMMAND_GETKEYS; + COMMAND_GETKEYSANDFLAGS: typeof COMMAND_GETKEYSANDFLAGS; + commandGetKeysAndFlags: typeof COMMAND_GETKEYSANDFLAGS; + COMMAND_INFO: typeof COMMAND_INFO; + commandInfo: typeof COMMAND_INFO; + COMMAND_LIST: typeof COMMAND_LIST; + commandList: typeof COMMAND_LIST; + COMMAND: typeof COMMAND; + command: typeof COMMAND; + CONFIG_GET: typeof CONFIG_GET; + configGet: typeof CONFIG_GET; + CONFIG_RESETASTAT: typeof CONFIG_RESETASTAT; + configResetStat: typeof CONFIG_RESETASTAT; + CONFIG_REWRITE: typeof CONFIG_REWRITE; + configRewrite: typeof CONFIG_REWRITE; + CONFIG_SET: typeof CONFIG_SET; + configSet: typeof CONFIG_SET; + DBSIZE: typeof DBSIZE; + dbSize: typeof DBSIZE; + DISCARD: typeof DISCARD; + discard: typeof DISCARD; + ECHO: typeof ECHO; + echo: typeof ECHO; + FAILOVER: typeof FAILOVER; + failover: typeof FAILOVER; + FLUSHALL: typeof FLUSHALL; + flushAll: typeof FLUSHALL; + FLUSHDB: typeof FLUSHDB; + flushDb: typeof FLUSHDB; + FUNCTION_DELETE: typeof FUNCTION_DELETE; + functionDelete: typeof FUNCTION_DELETE; + FUNCTION_DUMP: typeof FUNCTION_DUMP; + functionDump: typeof FUNCTION_DUMP; + FUNCTION_FLUSH: typeof FUNCTION_FLUSH; + functionFlush: typeof FUNCTION_FLUSH; + FUNCTION_KILL: typeof FUNCTION_KILL; + functionKill: typeof FUNCTION_KILL; + FUNCTION_LIST_WITHCODE: typeof FUNCTION_LIST_WITHCODE; + functionListWithCode: typeof FUNCTION_LIST_WITHCODE; + FUNCTION_LIST: typeof FUNCTION_LIST; + functionList: typeof FUNCTION_LIST; + FUNCTION_LOAD: typeof FUNCTION_LOAD; + functionLoad: typeof FUNCTION_LOAD; + FUNCTION_RESTORE: typeof FUNCTION_RESTORE; + functionRestore: typeof FUNCTION_RESTORE; + FUNCTION_STATS: typeof FUNCTION_STATS; + functionStats: typeof FUNCTION_STATS; + HELLO: typeof HELLO; + hello: typeof HELLO; + INFO: typeof INFO; + info: typeof INFO; + KEYS: typeof KEYS; + keys: typeof KEYS; + LASTSAVE: typeof LASTSAVE; + lastSave: typeof LASTSAVE; + LATENCY_DOCTOR: typeof LATENCY_DOCTOR; + latencyDoctor: typeof LATENCY_DOCTOR; + LATENCY_GRAPH: typeof LATENCY_GRAPH; + latencyGraph: typeof LATENCY_GRAPH; + LATENCY_HISTORY: typeof LATENCY_HISTORY; + latencyHistory: typeof LATENCY_HISTORY; + LATENCY_LATEST: typeof LATENCY_LATEST; + latencyLatest: typeof LATENCY_LATEST; + LOLWUT: typeof LOLWUT; + lolwut: typeof LOLWUT; + MEMORY_DOCTOR: typeof MEMORY_DOCTOR; + memoryDoctor: typeof MEMORY_DOCTOR; + 'MEMORY_MALLOC-STATS': typeof MEMORY_MALLOC_STATS; + memoryMallocStats: typeof MEMORY_MALLOC_STATS; + MEMORY_PURGE: typeof MEMORY_PURGE; + memoryPurge: typeof MEMORY_PURGE; + MEMORY_STATS: typeof MEMORY_STATS; + memoryStats: typeof MEMORY_STATS; + MEMORY_USAGE: typeof MEMORY_USAGE; + memoryUsage: typeof MEMORY_USAGE; + MODULE_LIST: typeof MODULE_LIST; + moduleList: typeof MODULE_LIST; + MODULE_LOAD: typeof MODULE_LOAD; + moduleLoad: typeof MODULE_LOAD; + MODULE_UNLOAD: typeof MODULE_UNLOAD; + moduleUnload: typeof MODULE_UNLOAD; + MOVE: typeof MOVE; + move: typeof MOVE; + PING: typeof PING; + ping: typeof PING; + PUBSUB_CHANNELS: typeof PUBSUB_CHANNELS; + pubSubChannels: typeof PUBSUB_CHANNELS; + PUBSUB_NUMPAT: typeof PUBSUB_NUMPAT; + pubSubNumPat: typeof PUBSUB_NUMPAT; + PUBSUB_NUMSUB: typeof PUBSUB_NUMSUB; + pubSubNumSub: typeof PUBSUB_NUMSUB; + PUBSUB_SHARDCHANNELS: typeof PUBSUB_SHARDCHANNELS; + pubSubShardChannels: typeof PUBSUB_SHARDCHANNELS; + PUBSUB_SHARDNUMSUB: typeof PUBSUB_SHARDNUMSUB; + pubSubShardNumSub: typeof PUBSUB_SHARDNUMSUB; + RANDOMKEY: typeof RANDOMKEY; + randomKey: typeof RANDOMKEY; + READONLY: typeof READONLY; + readonly: typeof READONLY; + READWRITE: typeof READWRITE; + readwrite: typeof READWRITE; + REPLICAOF: typeof REPLICAOF; + replicaOf: typeof REPLICAOF; + 'RESTORE-ASKING': typeof RESTORE_ASKING; + restoreAsking: typeof RESTORE_ASKING; + ROLE: typeof ROLE; + role: typeof ROLE; + SAVE: typeof SAVE; + save: typeof SAVE; + SCAN: typeof SCAN; + scan: typeof SCAN; + SCRIPT_DEBUG: typeof SCRIPT_DEBUG; + scriptDebug: typeof SCRIPT_DEBUG; + SCRIPT_EXISTS: typeof SCRIPT_EXISTS; + scriptExists: typeof SCRIPT_EXISTS; + SCRIPT_FLUSH: typeof SCRIPT_FLUSH; + scriptFlush: typeof SCRIPT_FLUSH; + SCRIPT_KILL: typeof SCRIPT_KILL; + scriptKill: typeof SCRIPT_KILL; + SCRIPT_LOAD: typeof SCRIPT_LOAD; + scriptLoad: typeof SCRIPT_LOAD; + SHUTDOWN: typeof SHUTDOWN; + shutdown: typeof SHUTDOWN; + SWAPDB: typeof SWAPDB; + swapDb: typeof SWAPDB; + TIME: typeof TIME; + time: typeof TIME; + UNWATCH: typeof UNWATCH; + unwatch: typeof UNWATCH; + WAIT: typeof WAIT; + wait: typeof WAIT; + APPEND: typeof import("../commands/APPEND"); + append: typeof import("../commands/APPEND"); + BITCOUNT: typeof import("../commands/BITCOUNT"); + bitCount: typeof import("../commands/BITCOUNT"); + BITFIELD_RO: typeof import("../commands/BITFIELD_RO"); + bitFieldRo: typeof import("../commands/BITFIELD_RO"); + BITFIELD: typeof import("../commands/BITFIELD"); + bitField: typeof import("../commands/BITFIELD"); + BITOP: typeof import("../commands/BITOP"); + bitOp: typeof import("../commands/BITOP"); + BITPOS: typeof import("../commands/BITPOS"); + bitPos: typeof import("../commands/BITPOS"); + BLMOVE: typeof import("../commands/BLMOVE"); + blMove: typeof import("../commands/BLMOVE"); + BLMPOP: typeof import("../commands/BLMPOP"); + blmPop: typeof import("../commands/BLMPOP"); + BLPOP: typeof import("../commands/BLPOP"); + blPop: typeof import("../commands/BLPOP"); + BRPOP: typeof import("../commands/BRPOP"); + brPop: typeof import("../commands/BRPOP"); + BRPOPLPUSH: typeof import("../commands/BRPOPLPUSH"); + brPopLPush: typeof import("../commands/BRPOPLPUSH"); + BZMPOP: typeof import("../commands/BZMPOP"); + bzmPop: typeof import("../commands/BZMPOP"); + BZPOPMAX: typeof import("../commands/BZPOPMAX"); + bzPopMax: typeof import("../commands/BZPOPMAX"); + BZPOPMIN: typeof import("../commands/BZPOPMIN"); + bzPopMin: typeof import("../commands/BZPOPMIN"); + COPY: typeof import("../commands/COPY"); + copy: typeof import("../commands/COPY"); + DECR: typeof import("../commands/DECR"); + decr: typeof import("../commands/DECR"); + DECRBY: typeof import("../commands/DECRBY"); + decrBy: typeof import("../commands/DECRBY"); + DEL: typeof import("../commands/DEL"); + del: typeof import("../commands/DEL"); + DUMP: typeof import("../commands/DUMP"); + dump: typeof import("../commands/DUMP"); + EVAL_RO: typeof import("../commands/EVAL_RO"); + evalRo: typeof import("../commands/EVAL_RO"); + EVAL: typeof import("../commands/EVAL"); + eval: typeof import("../commands/EVAL"); + EVALSHA: typeof import("../commands/EVALSHA"); + evalSha: typeof import("../commands/EVALSHA"); + EVALSHA_RO: typeof import("../commands/EVALSHA_RO"); + evalShaRo: typeof import("../commands/EVALSHA_RO"); + EXISTS: typeof import("../commands/EXISTS"); + exists: typeof import("../commands/EXISTS"); + EXPIRE: typeof import("../commands/EXPIRE"); + expire: typeof import("../commands/EXPIRE"); + EXPIREAT: typeof import("../commands/EXPIREAT"); + expireAt: typeof import("../commands/EXPIREAT"); + EXPIRETIME: typeof import("../commands/EXPIRETIME"); + expireTime: typeof import("../commands/EXPIRETIME"); + FCALL_RO: typeof import("../commands/FCALL_RO"); + fCallRo: typeof import("../commands/FCALL_RO"); + FCALL: typeof import("../commands/FCALL"); + fCall: typeof import("../commands/FCALL"); + GEOADD: typeof import("../commands/GEOADD"); + geoAdd: typeof import("../commands/GEOADD"); + GEODIST: typeof import("../commands/GEODIST"); + geoDist: typeof import("../commands/GEODIST"); + GEOHASH: typeof import("../commands/GEOHASH"); + geoHash: typeof import("../commands/GEOHASH"); + GEOPOS: typeof import("../commands/GEOPOS"); + geoPos: typeof import("../commands/GEOPOS"); + GEORADIUS_RO_WITH: typeof import("../commands/GEORADIUS_RO_WITH"); + geoRadiusRoWith: typeof import("../commands/GEORADIUS_RO_WITH"); + GEORADIUS_RO: typeof import("../commands/GEORADIUS_RO"); + geoRadiusRo: typeof import("../commands/GEORADIUS_RO"); + GEORADIUS_WITH: typeof import("../commands/GEORADIUS_WITH"); + geoRadiusWith: typeof import("../commands/GEORADIUS_WITH"); + GEORADIUS: typeof import("../commands/GEORADIUS"); + geoRadius: typeof import("../commands/GEORADIUS"); + GEORADIUSBYMEMBER_RO_WITH: typeof import("../commands/GEORADIUSBYMEMBER_RO_WITH"); + geoRadiusByMemberRoWith: typeof import("../commands/GEORADIUSBYMEMBER_RO_WITH"); + GEORADIUSBYMEMBER_RO: typeof import("../commands/GEORADIUSBYMEMBER_RO"); + geoRadiusByMemberRo: typeof import("../commands/GEORADIUSBYMEMBER_RO"); + GEORADIUSBYMEMBER_WITH: typeof import("../commands/GEORADIUSBYMEMBER_WITH"); + geoRadiusByMemberWith: typeof import("../commands/GEORADIUSBYMEMBER_WITH"); + GEORADIUSBYMEMBER: typeof import("../commands/GEORADIUSBYMEMBER"); + geoRadiusByMember: typeof import("../commands/GEORADIUSBYMEMBER"); + GEORADIUSBYMEMBERSTORE: typeof import("../commands/GEORADIUSBYMEMBERSTORE"); + geoRadiusByMemberStore: typeof import("../commands/GEORADIUSBYMEMBERSTORE"); + GEORADIUSSTORE: typeof import("../commands/GEORADIUSSTORE"); + geoRadiusStore: typeof import("../commands/GEORADIUSSTORE"); + GEOSEARCH_WITH: typeof import("../commands/GEOSEARCH_WITH"); + geoSearchWith: typeof import("../commands/GEOSEARCH_WITH"); + GEOSEARCH: typeof import("../commands/GEOSEARCH"); + geoSearch: typeof import("../commands/GEOSEARCH"); + GEOSEARCHSTORE: typeof import("../commands/GEOSEARCHSTORE"); + geoSearchStore: typeof import("../commands/GEOSEARCHSTORE"); + GET: typeof import("../commands/GET"); + get: typeof import("../commands/GET"); + GETBIT: typeof import("../commands/GETBIT"); + getBit: typeof import("../commands/GETBIT"); + GETDEL: typeof import("../commands/GETDEL"); + getDel: typeof import("../commands/GETDEL"); + GETEX: typeof import("../commands/GETEX"); + getEx: typeof import("../commands/GETEX"); + GETRANGE: typeof import("../commands/GETRANGE"); + getRange: typeof import("../commands/GETRANGE"); + GETSET: typeof import("../commands/GETSET"); + getSet: typeof import("../commands/GETSET"); + HDEL: typeof import("../commands/HDEL"); + hDel: typeof import("../commands/HDEL"); + HEXISTS: typeof import("../commands/HEXISTS"); + hExists: typeof import("../commands/HEXISTS"); + HEXPIRE: typeof import("../commands/HEXPIRE"); + hExpire: typeof import("../commands/HEXPIRE"); + HEXPIREAT: typeof import("../commands/HEXPIREAT"); + hExpireAt: typeof import("../commands/HEXPIREAT"); + HEXPIRETIME: typeof import("../commands/HEXPIRETIME"); + hExpireTime: typeof import("../commands/HEXPIRETIME"); + HGET: typeof import("../commands/HGET"); + hGet: typeof import("../commands/HGET"); + HGETALL: typeof import("../commands/HGETALL"); + hGetAll: typeof import("../commands/HGETALL"); + HINCRBY: typeof import("../commands/HINCRBY"); + hIncrBy: typeof import("../commands/HINCRBY"); + HINCRBYFLOAT: typeof import("../commands/HINCRBYFLOAT"); + hIncrByFloat: typeof import("../commands/HINCRBYFLOAT"); + HKEYS: typeof import("../commands/HKEYS"); + hKeys: typeof import("../commands/HKEYS"); + HLEN: typeof import("../commands/HLEN"); + hLen: typeof import("../commands/HLEN"); + HMGET: typeof import("../commands/HMGET"); + hmGet: typeof import("../commands/HMGET"); + HPERSIST: typeof import("../commands/HPERSIST"); + hPersist: typeof import("../commands/HPERSIST"); + HPEXPIRE: typeof import("../commands/HPEXPIRE"); + hpExpire: typeof import("../commands/HPEXPIRE"); + HPEXPIREAT: typeof import("../commands/HPEXPIREAT"); + hpExpireAt: typeof import("../commands/HPEXPIREAT"); + HPEXPIRETIME: typeof import("../commands/HPEXPIRETIME"); + hpExpireTime: typeof import("../commands/HPEXPIRETIME"); + HPTTL: typeof import("../commands/HPTTL"); + hpTTL: typeof import("../commands/HPTTL"); + HRANDFIELD_COUNT_WITHVALUES: typeof import("../commands/HRANDFIELD_COUNT_WITHVALUES"); + hRandFieldCountWithValues: typeof import("../commands/HRANDFIELD_COUNT_WITHVALUES"); + HRANDFIELD_COUNT: typeof import("../commands/HRANDFIELD_COUNT"); + hRandFieldCount: typeof import("../commands/HRANDFIELD_COUNT"); + HRANDFIELD: typeof import("../commands/HRANDFIELD"); + hRandField: typeof import("../commands/HRANDFIELD"); + HSCAN: typeof import("../commands/HSCAN"); + hScan: typeof import("../commands/HSCAN"); + HSCAN_NOVALUES: typeof import("../commands/HSCAN_NOVALUES"); + hScanNoValues: typeof import("../commands/HSCAN_NOVALUES"); + HSET: typeof import("../commands/HSET"); + hSet: typeof import("../commands/HSET"); + HSETNX: typeof import("../commands/HSETNX"); + hSetNX: typeof import("../commands/HSETNX"); + HSTRLEN: typeof import("../commands/HSTRLEN"); + hStrLen: typeof import("../commands/HSTRLEN"); + HTTL: typeof import("../commands/HTTL"); + hTTL: typeof import("../commands/HTTL"); + HVALS: typeof import("../commands/HVALS"); + hVals: typeof import("../commands/HVALS"); + INCR: typeof import("../commands/INCR"); + incr: typeof import("../commands/INCR"); + INCRBY: typeof import("../commands/INCRBY"); + incrBy: typeof import("../commands/INCRBY"); + INCRBYFLOAT: typeof import("../commands/INCRBYFLOAT"); + incrByFloat: typeof import("../commands/INCRBYFLOAT"); + LCS_IDX_WITHMATCHLEN: typeof import("../commands/LCS_IDX_WITHMATCHLEN"); + lcsIdxWithMatchLen: typeof import("../commands/LCS_IDX_WITHMATCHLEN"); + LCS_IDX: typeof import("../commands/LCS_IDX"); + lcsIdx: typeof import("../commands/LCS_IDX"); + LCS_LEN: typeof import("../commands/LCS_LEN"); + lcsLen: typeof import("../commands/LCS_LEN"); + LCS: typeof import("../commands/LCS"); + lcs: typeof import("../commands/LCS"); + LINDEX: typeof import("../commands/LINDEX"); + lIndex: typeof import("../commands/LINDEX"); + LINSERT: typeof import("../commands/LINSERT"); + lInsert: typeof import("../commands/LINSERT"); + LLEN: typeof import("../commands/LLEN"); + lLen: typeof import("../commands/LLEN"); + LMOVE: typeof import("../commands/LMOVE"); + lMove: typeof import("../commands/LMOVE"); + LMPOP: typeof import("../commands/LMPOP"); + lmPop: typeof import("../commands/LMPOP"); + LPOP_COUNT: typeof import("../commands/LPOP_COUNT"); + lPopCount: typeof import("../commands/LPOP_COUNT"); + LPOP: typeof import("../commands/LPOP"); + lPop: typeof import("../commands/LPOP"); + LPOS_COUNT: typeof import("../commands/LPOS_COUNT"); + lPosCount: typeof import("../commands/LPOS_COUNT"); + LPOS: typeof import("../commands/LPOS"); + lPos: typeof import("../commands/LPOS"); + LPUSH: typeof import("../commands/LPUSH"); + lPush: typeof import("../commands/LPUSH"); + LPUSHX: typeof import("../commands/LPUSHX"); + lPushX: typeof import("../commands/LPUSHX"); + LRANGE: typeof import("../commands/LRANGE"); + lRange: typeof import("../commands/LRANGE"); + LREM: typeof import("../commands/LREM"); + lRem: typeof import("../commands/LREM"); + LSET: typeof import("../commands/LSET"); + lSet: typeof import("../commands/LSET"); + LTRIM: typeof import("../commands/LTRIM"); + lTrim: typeof import("../commands/LTRIM"); + MGET: typeof import("../commands/MGET"); + mGet: typeof import("../commands/MGET"); + MIGRATE: typeof import("../commands/MIGRATE"); + migrate: typeof import("../commands/MIGRATE"); + MSET: typeof import("../commands/MSET"); + mSet: typeof import("../commands/MSET"); + MSETNX: typeof import("../commands/MSETNX"); + mSetNX: typeof import("../commands/MSETNX"); + OBJECT_ENCODING: typeof import("../commands/OBJECT_ENCODING"); + objectEncoding: typeof import("../commands/OBJECT_ENCODING"); + OBJECT_FREQ: typeof import("../commands/OBJECT_FREQ"); + objectFreq: typeof import("../commands/OBJECT_FREQ"); + OBJECT_IDLETIME: typeof import("../commands/OBJECT_IDLETIME"); + objectIdleTime: typeof import("../commands/OBJECT_IDLETIME"); + OBJECT_REFCOUNT: typeof import("../commands/OBJECT_REFCOUNT"); + objectRefCount: typeof import("../commands/OBJECT_REFCOUNT"); + PERSIST: typeof import("../commands/PERSIST"); + persist: typeof import("../commands/PERSIST"); + PEXPIRE: typeof import("../commands/PEXPIRE"); + pExpire: typeof import("../commands/PEXPIRE"); + PEXPIREAT: typeof import("../commands/PEXPIREAT"); + pExpireAt: typeof import("../commands/PEXPIREAT"); + PEXPIRETIME: typeof import("../commands/PEXPIRETIME"); + pExpireTime: typeof import("../commands/PEXPIRETIME"); + PFADD: typeof import("../commands/PFADD"); + pfAdd: typeof import("../commands/PFADD"); + PFCOUNT: typeof import("../commands/PFCOUNT"); + pfCount: typeof import("../commands/PFCOUNT"); + PFMERGE: typeof import("../commands/PFMERGE"); + pfMerge: typeof import("../commands/PFMERGE"); + PSETEX: typeof import("../commands/PSETEX"); + pSetEx: typeof import("../commands/PSETEX"); + PTTL: typeof import("../commands/PTTL"); + pTTL: typeof import("../commands/PTTL"); + PUBLISH: typeof import("../commands/PUBLISH"); + publish: typeof import("../commands/PUBLISH"); + RENAME: typeof import("../commands/RENAME"); + rename: typeof import("../commands/RENAME"); + RENAMENX: typeof import("../commands/RENAMENX"); + renameNX: typeof import("../commands/RENAMENX"); + RESTORE: typeof import("../commands/RESTORE"); + restore: typeof import("../commands/RESTORE"); + RPOP_COUNT: typeof import("../commands/RPOP_COUNT"); + rPopCount: typeof import("../commands/RPOP_COUNT"); + RPOP: typeof import("../commands/RPOP"); + rPop: typeof import("../commands/RPOP"); + RPOPLPUSH: typeof import("../commands/RPOPLPUSH"); + rPopLPush: typeof import("../commands/RPOPLPUSH"); + RPUSH: typeof import("../commands/RPUSH"); + rPush: typeof import("../commands/RPUSH"); + RPUSHX: typeof import("../commands/RPUSHX"); + rPushX: typeof import("../commands/RPUSHX"); + SADD: typeof import("../commands/SADD"); + sAdd: typeof import("../commands/SADD"); + SCARD: typeof import("../commands/SCARD"); + sCard: typeof import("../commands/SCARD"); + SDIFF: typeof import("../commands/SDIFF"); + sDiff: typeof import("../commands/SDIFF"); + SDIFFSTORE: typeof import("../commands/SDIFFSTORE"); + sDiffStore: typeof import("../commands/SDIFFSTORE"); + SINTER: typeof import("../commands/SINTER"); + sInter: typeof import("../commands/SINTER"); + SINTERCARD: typeof import("../commands/SINTERCARD"); + sInterCard: typeof import("../commands/SINTERCARD"); + SINTERSTORE: typeof import("../commands/SINTERSTORE"); + sInterStore: typeof import("../commands/SINTERSTORE"); + SET: typeof import("../commands/SET"); + set: typeof import("../commands/SET"); + SETBIT: typeof import("../commands/SETBIT"); + setBit: typeof import("../commands/SETBIT"); + SETEX: typeof import("../commands/SETEX"); + setEx: typeof import("../commands/SETEX"); + SETNX: typeof import("../commands/SETNX"); + setNX: typeof import("../commands/SETNX"); + SETRANGE: typeof import("../commands/SETRANGE"); + setRange: typeof import("../commands/SETRANGE"); + SISMEMBER: typeof import("../commands/SISMEMBER"); + sIsMember: typeof import("../commands/SISMEMBER"); + SMEMBERS: typeof import("../commands/SMEMBERS"); + sMembers: typeof import("../commands/SMEMBERS"); + SMISMEMBER: typeof import("../commands/SMISMEMBER"); + smIsMember: typeof import("../commands/SMISMEMBER"); + SMOVE: typeof import("../commands/SMOVE"); + sMove: typeof import("../commands/SMOVE"); + SORT_RO: typeof import("../commands/SORT_RO"); + sortRo: typeof import("../commands/SORT_RO"); + SORT_STORE: typeof import("../commands/SORT_STORE"); + sortStore: typeof import("../commands/SORT_STORE"); + SORT: typeof import("../commands/SORT"); + sort: typeof import("../commands/SORT"); + SPOP: typeof import("../commands/SPOP"); + sPop: typeof import("../commands/SPOP"); + SPUBLISH: typeof import("../commands/SPUBLISH"); + sPublish: typeof import("../commands/SPUBLISH"); + SRANDMEMBER_COUNT: typeof import("../commands/SRANDMEMBER_COUNT"); + sRandMemberCount: typeof import("../commands/SRANDMEMBER_COUNT"); + SRANDMEMBER: typeof import("../commands/SRANDMEMBER"); + sRandMember: typeof import("../commands/SRANDMEMBER"); + SREM: typeof import("../commands/SREM"); + sRem: typeof import("../commands/SREM"); + SSCAN: typeof import("../commands/SSCAN"); + sScan: typeof import("../commands/SSCAN"); + STRLEN: typeof import("../commands/STRLEN"); + strLen: typeof import("../commands/STRLEN"); + SUNION: typeof import("../commands/SUNION"); + sUnion: typeof import("../commands/SUNION"); + SUNIONSTORE: typeof import("../commands/SUNIONSTORE"); + sUnionStore: typeof import("../commands/SUNIONSTORE"); + TOUCH: typeof import("../commands/TOUCH"); + touch: typeof import("../commands/TOUCH"); + TTL: typeof import("../commands/TTL"); + ttl: typeof import("../commands/TTL"); + TYPE: typeof import("../commands/TYPE"); + type: typeof import("../commands/TYPE"); + UNLINK: typeof import("../commands/UNLINK"); + unlink: typeof import("../commands/UNLINK"); + WATCH: typeof import("../commands/WATCH"); + watch: typeof import("../commands/WATCH"); + XACK: typeof import("../commands/XACK"); + xAck: typeof import("../commands/XACK"); + XADD: typeof import("../commands/XADD"); + xAdd: typeof import("../commands/XADD"); + XAUTOCLAIM_JUSTID: typeof import("../commands/XAUTOCLAIM_JUSTID"); + xAutoClaimJustId: typeof import("../commands/XAUTOCLAIM_JUSTID"); + XAUTOCLAIM: typeof import("../commands/XAUTOCLAIM"); + xAutoClaim: typeof import("../commands/XAUTOCLAIM"); + XCLAIM: typeof import("../commands/XCLAIM"); + xClaim: typeof import("../commands/XCLAIM"); + XCLAIM_JUSTID: typeof import("../commands/XCLAIM_JUSTID"); + xClaimJustId: typeof import("../commands/XCLAIM_JUSTID"); + XDEL: typeof import("../commands/XDEL"); + xDel: typeof import("../commands/XDEL"); + XGROUP_CREATE: typeof import("../commands/XGROUP_CREATE"); + xGroupCreate: typeof import("../commands/XGROUP_CREATE"); + XGROUP_CREATECONSUMER: typeof import("../commands/XGROUP_CREATECONSUMER"); + xGroupCreateConsumer: typeof import("../commands/XGROUP_CREATECONSUMER"); + XGROUP_DELCONSUMER: typeof import("../commands/XGROUP_DELCONSUMER"); + xGroupDelConsumer: typeof import("../commands/XGROUP_DELCONSUMER"); + XGROUP_DESTROY: typeof import("../commands/XGROUP_DESTROY"); + xGroupDestroy: typeof import("../commands/XGROUP_DESTROY"); + XGROUP_SETID: typeof import("../commands/XGROUP_SETID"); + xGroupSetId: typeof import("../commands/XGROUP_SETID"); + XINFO_CONSUMERS: typeof import("../commands/XINFO_CONSUMERS"); + xInfoConsumers: typeof import("../commands/XINFO_CONSUMERS"); + XINFO_GROUPS: typeof import("../commands/XINFO_GROUPS"); + xInfoGroups: typeof import("../commands/XINFO_GROUPS"); + XINFO_STREAM: typeof import("../commands/XINFO_STREAM"); + xInfoStream: typeof import("../commands/XINFO_STREAM"); + XLEN: typeof import("../commands/XLEN"); + xLen: typeof import("../commands/XLEN"); + XPENDING_RANGE: typeof import("../commands/XPENDING_RANGE"); + xPendingRange: typeof import("../commands/XPENDING_RANGE"); + XPENDING: typeof import("../commands/XPENDING"); + xPending: typeof import("../commands/XPENDING"); + XRANGE: typeof import("../commands/XRANGE"); + xRange: typeof import("../commands/XRANGE"); + XREAD: typeof import("../commands/XREAD"); + xRead: typeof import("../commands/XREAD"); + XREADGROUP: typeof import("../commands/XREADGROUP"); + xReadGroup: typeof import("../commands/XREADGROUP"); + XREVRANGE: typeof import("../commands/XREVRANGE"); + xRevRange: typeof import("../commands/XREVRANGE"); + XSETID: typeof import("../commands/XSETID"); + xSetId: typeof import("../commands/XSETID"); + XTRIM: typeof import("../commands/XTRIM"); + xTrim: typeof import("../commands/XTRIM"); + ZADD: typeof import("../commands/ZADD"); + zAdd: typeof import("../commands/ZADD"); + ZCARD: typeof import("../commands/ZCARD"); + zCard: typeof import("../commands/ZCARD"); + ZCOUNT: typeof import("../commands/ZCOUNT"); + zCount: typeof import("../commands/ZCOUNT"); + ZDIFF_WITHSCORES: typeof import("../commands/ZDIFF_WITHSCORES"); + zDiffWithScores: typeof import("../commands/ZDIFF_WITHSCORES"); + ZDIFF: typeof import("../commands/ZDIFF"); + zDiff: typeof import("../commands/ZDIFF"); + ZDIFFSTORE: typeof import("../commands/ZDIFFSTORE"); + zDiffStore: typeof import("../commands/ZDIFFSTORE"); + ZINCRBY: typeof import("../commands/ZINCRBY"); + zIncrBy: typeof import("../commands/ZINCRBY"); + ZINTER_WITHSCORES: typeof import("../commands/ZINTER_WITHSCORES"); + zInterWithScores: typeof import("../commands/ZINTER_WITHSCORES"); + ZINTER: typeof import("../commands/ZINTER"); + zInter: typeof import("../commands/ZINTER"); + ZINTERCARD: typeof import("../commands/ZINTERCARD"); + zInterCard: typeof import("../commands/ZINTERCARD"); + ZINTERSTORE: typeof import("../commands/ZINTERSTORE"); + zInterStore: typeof import("../commands/ZINTERSTORE"); + ZLEXCOUNT: typeof import("../commands/ZLEXCOUNT"); + zLexCount: typeof import("../commands/ZLEXCOUNT"); + ZMPOP: typeof import("../commands/ZMPOP"); + zmPop: typeof import("../commands/ZMPOP"); + ZMSCORE: typeof import("../commands/ZMSCORE"); + zmScore: typeof import("../commands/ZMSCORE"); + ZPOPMAX_COUNT: typeof import("../commands/ZPOPMAX_COUNT"); + zPopMaxCount: typeof import("../commands/ZPOPMAX_COUNT"); + ZPOPMAX: typeof import("../commands/ZPOPMAX"); + zPopMax: typeof import("../commands/ZPOPMAX"); + ZPOPMIN_COUNT: typeof import("../commands/ZPOPMIN_COUNT"); + zPopMinCount: typeof import("../commands/ZPOPMIN_COUNT"); + ZPOPMIN: typeof import("../commands/ZPOPMIN"); + zPopMin: typeof import("../commands/ZPOPMIN"); + ZRANDMEMBER_COUNT_WITHSCORES: typeof import("../commands/ZRANDMEMBER_COUNT_WITHSCORES"); + zRandMemberCountWithScores: typeof import("../commands/ZRANDMEMBER_COUNT_WITHSCORES"); + ZRANDMEMBER_COUNT: typeof import("../commands/ZRANDMEMBER_COUNT"); + zRandMemberCount: typeof import("../commands/ZRANDMEMBER_COUNT"); + ZRANDMEMBER: typeof import("../commands/ZRANDMEMBER"); + zRandMember: typeof import("../commands/ZRANDMEMBER"); + ZRANGE_WITHSCORES: typeof import("../commands/ZRANGE_WITHSCORES"); + zRangeWithScores: typeof import("../commands/ZRANGE_WITHSCORES"); + ZRANGE: typeof import("../commands/ZRANGE"); + zRange: typeof import("../commands/ZRANGE"); + ZRANGEBYLEX: typeof import("../commands/ZRANGEBYLEX"); + zRangeByLex: typeof import("../commands/ZRANGEBYLEX"); + ZRANGEBYSCORE_WITHSCORES: typeof import("../commands/ZRANGEBYSCORE_WITHSCORES"); + zRangeByScoreWithScores: typeof import("../commands/ZRANGEBYSCORE_WITHSCORES"); + ZRANGEBYSCORE: typeof import("../commands/ZRANGEBYSCORE"); + zRangeByScore: typeof import("../commands/ZRANGEBYSCORE"); + ZRANGESTORE: typeof import("../commands/ZRANGESTORE"); + zRangeStore: typeof import("../commands/ZRANGESTORE"); + ZRANK: typeof import("../commands/ZRANK"); + zRank: typeof import("../commands/ZRANK"); + ZREM: typeof import("../commands/ZREM"); + zRem: typeof import("../commands/ZREM"); + ZREMRANGEBYLEX: typeof import("../commands/ZREMRANGEBYLEX"); + zRemRangeByLex: typeof import("../commands/ZREMRANGEBYLEX"); + ZREMRANGEBYRANK: typeof import("../commands/ZREMRANGEBYRANK"); + zRemRangeByRank: typeof import("../commands/ZREMRANGEBYRANK"); + ZREMRANGEBYSCORE: typeof import("../commands/ZREMRANGEBYSCORE"); + zRemRangeByScore: typeof import("../commands/ZREMRANGEBYSCORE"); + ZREVRANK: typeof import("../commands/ZREVRANK"); + zRevRank: typeof import("../commands/ZREVRANK"); + ZSCAN: typeof import("../commands/ZSCAN"); + zScan: typeof import("../commands/ZSCAN"); + ZSCORE: typeof import("../commands/ZSCORE"); + zScore: typeof import("../commands/ZSCORE"); + ZUNION_WITHSCORES: typeof import("../commands/ZUNION_WITHSCORES"); + zUnionWithScores: typeof import("../commands/ZUNION_WITHSCORES"); + ZUNION: typeof import("../commands/ZUNION"); + zUnion: typeof import("../commands/ZUNION"); + ZUNIONSTORE: typeof import("../commands/ZUNIONSTORE"); + zUnionStore: typeof import("../commands/ZUNIONSTORE"); +}; +export default _default; diff --git a/node_modules/@redis/client/dist/lib/client/commands.js b/node_modules/@redis/client/dist/lib/client/commands.js new file mode 100644 index 0000000..4b4dfb6 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/commands.js @@ -0,0 +1,375 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const commands_1 = require("../cluster/commands"); +const ACL_CAT = require("../commands/ACL_CAT"); +const ACL_DELUSER = require("../commands/ACL_DELUSER"); +const ACL_DRYRUN = require("../commands/ACL_DRYRUN"); +const ACL_GENPASS = require("../commands/ACL_GENPASS"); +const ACL_GETUSER = require("../commands/ACL_GETUSER"); +const ACL_LIST = require("../commands/ACL_LIST"); +const ACL_LOAD = require("../commands/ACL_LOAD"); +const ACL_LOG_RESET = require("../commands/ACL_LOG_RESET"); +const ACL_LOG = require("../commands/ACL_LOG"); +const ACL_SAVE = require("../commands/ACL_SAVE"); +const ACL_SETUSER = require("../commands/ACL_SETUSER"); +const ACL_USERS = require("../commands/ACL_USERS"); +const ACL_WHOAMI = require("../commands/ACL_WHOAMI"); +const ASKING = require("../commands/ASKING"); +const AUTH = require("../commands/AUTH"); +const BGREWRITEAOF = require("../commands/BGREWRITEAOF"); +const BGSAVE = require("../commands/BGSAVE"); +const CLIENT_CACHING = require("../commands/CLIENT_CACHING"); +const CLIENT_GETNAME = require("../commands/CLIENT_GETNAME"); +const CLIENT_GETREDIR = require("../commands/CLIENT_GETREDIR"); +const CLIENT_ID = require("../commands/CLIENT_ID"); +const CLIENT_KILL = require("../commands/CLIENT_KILL"); +const CLIENT_LIST = require("../commands/CLIENT_LIST"); +const CLIENT_NO_EVICT = require("../commands/CLIENT_NO-EVICT"); +const CLIENT_NO_TOUCH = require("../commands/CLIENT_NO-TOUCH"); +const CLIENT_PAUSE = require("../commands/CLIENT_PAUSE"); +const CLIENT_SETNAME = require("../commands/CLIENT_SETNAME"); +const CLIENT_TRACKING = require("../commands/CLIENT_TRACKING"); +const CLIENT_TRACKINGINFO = require("../commands/CLIENT_TRACKINGINFO"); +const CLIENT_UNPAUSE = require("../commands/CLIENT_UNPAUSE"); +const CLIENT_INFO = require("../commands/CLIENT_INFO"); +const CLUSTER_ADDSLOTS = require("../commands/CLUSTER_ADDSLOTS"); +const CLUSTER_ADDSLOTSRANGE = require("../commands/CLUSTER_ADDSLOTSRANGE"); +const CLUSTER_BUMPEPOCH = require("../commands/CLUSTER_BUMPEPOCH"); +const CLUSTER_COUNT_FAILURE_REPORTS = require("../commands/CLUSTER_COUNT-FAILURE-REPORTS"); +const CLUSTER_COUNTKEYSINSLOT = require("../commands/CLUSTER_COUNTKEYSINSLOT"); +const CLUSTER_DELSLOTS = require("../commands/CLUSTER_DELSLOTS"); +const CLUSTER_DELSLOTSRANGE = require("../commands/CLUSTER_DELSLOTSRANGE"); +const CLUSTER_FAILOVER = require("../commands/CLUSTER_FAILOVER"); +const CLUSTER_FLUSHSLOTS = require("../commands/CLUSTER_FLUSHSLOTS"); +const CLUSTER_FORGET = require("../commands/CLUSTER_FORGET"); +const CLUSTER_GETKEYSINSLOT = require("../commands/CLUSTER_GETKEYSINSLOT"); +const CLUSTER_INFO = require("../commands/CLUSTER_INFO"); +const CLUSTER_KEYSLOT = require("../commands/CLUSTER_KEYSLOT"); +const CLUSTER_LINKS = require("../commands/CLUSTER_LINKS"); +const CLUSTER_MEET = require("../commands/CLUSTER_MEET"); +const CLUSTER_MYID = require("../commands/CLUSTER_MYID"); +const CLUSTER_MYSHARDID = require("../commands/CLUSTER_MYSHARDID"); +const CLUSTER_NODES = require("../commands/CLUSTER_NODES"); +const CLUSTER_REPLICAS = require("../commands/CLUSTER_REPLICAS"); +const CLUSTER_REPLICATE = require("../commands/CLUSTER_REPLICATE"); +const CLUSTER_RESET = require("../commands/CLUSTER_RESET"); +const CLUSTER_SAVECONFIG = require("../commands/CLUSTER_SAVECONFIG"); +const CLUSTER_SET_CONFIG_EPOCH = require("../commands/CLUSTER_SET-CONFIG-EPOCH"); +const CLUSTER_SETSLOT = require("../commands/CLUSTER_SETSLOT"); +const CLUSTER_SLOTS = require("../commands/CLUSTER_SLOTS"); +const COMMAND_COUNT = require("../commands/COMMAND_COUNT"); +const COMMAND_GETKEYS = require("../commands/COMMAND_GETKEYS"); +const COMMAND_GETKEYSANDFLAGS = require("../commands/COMMAND_GETKEYSANDFLAGS"); +const COMMAND_INFO = require("../commands/COMMAND_INFO"); +const COMMAND_LIST = require("../commands/COMMAND_LIST"); +const COMMAND = require("../commands/COMMAND"); +const CONFIG_GET = require("../commands/CONFIG_GET"); +const CONFIG_RESETASTAT = require("../commands/CONFIG_RESETSTAT"); +const CONFIG_REWRITE = require("../commands/CONFIG_REWRITE"); +const CONFIG_SET = require("../commands/CONFIG_SET"); +const DBSIZE = require("../commands/DBSIZE"); +const DISCARD = require("../commands/DISCARD"); +const ECHO = require("../commands/ECHO"); +const FAILOVER = require("../commands/FAILOVER"); +const FLUSHALL = require("../commands/FLUSHALL"); +const FLUSHDB = require("../commands/FLUSHDB"); +const FUNCTION_DELETE = require("../commands/FUNCTION_DELETE"); +const FUNCTION_DUMP = require("../commands/FUNCTION_DUMP"); +const FUNCTION_FLUSH = require("../commands/FUNCTION_FLUSH"); +const FUNCTION_KILL = require("../commands/FUNCTION_KILL"); +const FUNCTION_LIST_WITHCODE = require("../commands/FUNCTION_LIST_WITHCODE"); +const FUNCTION_LIST = require("../commands/FUNCTION_LIST"); +const FUNCTION_LOAD = require("../commands/FUNCTION_LOAD"); +const FUNCTION_RESTORE = require("../commands/FUNCTION_RESTORE"); +const FUNCTION_STATS = require("../commands/FUNCTION_STATS"); +const HELLO = require("../commands/HELLO"); +const INFO = require("../commands/INFO"); +const KEYS = require("../commands/KEYS"); +const LASTSAVE = require("../commands/LASTSAVE"); +const LATENCY_DOCTOR = require("../commands/LATENCY_DOCTOR"); +const LATENCY_GRAPH = require("../commands/LATENCY_GRAPH"); +const LATENCY_HISTORY = require("../commands/LATENCY_HISTORY"); +const LATENCY_LATEST = require("../commands/LATENCY_LATEST"); +const LOLWUT = require("../commands/LOLWUT"); +const MEMORY_DOCTOR = require("../commands/MEMORY_DOCTOR"); +const MEMORY_MALLOC_STATS = require("../commands/MEMORY_MALLOC-STATS"); +const MEMORY_PURGE = require("../commands/MEMORY_PURGE"); +const MEMORY_STATS = require("../commands/MEMORY_STATS"); +const MEMORY_USAGE = require("../commands/MEMORY_USAGE"); +const MODULE_LIST = require("../commands/MODULE_LIST"); +const MODULE_LOAD = require("../commands/MODULE_LOAD"); +const MODULE_UNLOAD = require("../commands/MODULE_UNLOAD"); +const MOVE = require("../commands/MOVE"); +const PING = require("../commands/PING"); +const PUBSUB_CHANNELS = require("../commands/PUBSUB_CHANNELS"); +const PUBSUB_NUMPAT = require("../commands/PUBSUB_NUMPAT"); +const PUBSUB_NUMSUB = require("../commands/PUBSUB_NUMSUB"); +const PUBSUB_SHARDCHANNELS = require("../commands/PUBSUB_SHARDCHANNELS"); +const PUBSUB_SHARDNUMSUB = require("../commands/PUBSUB_SHARDNUMSUB"); +const RANDOMKEY = require("../commands/RANDOMKEY"); +const READONLY = require("../commands/READONLY"); +const READWRITE = require("../commands/READWRITE"); +const REPLICAOF = require("../commands/REPLICAOF"); +const RESTORE_ASKING = require("../commands/RESTORE-ASKING"); +const ROLE = require("../commands/ROLE"); +const SAVE = require("../commands/SAVE"); +const SCAN = require("../commands/SCAN"); +const SCRIPT_DEBUG = require("../commands/SCRIPT_DEBUG"); +const SCRIPT_EXISTS = require("../commands/SCRIPT_EXISTS"); +const SCRIPT_FLUSH = require("../commands/SCRIPT_FLUSH"); +const SCRIPT_KILL = require("../commands/SCRIPT_KILL"); +const SCRIPT_LOAD = require("../commands/SCRIPT_LOAD"); +const SHUTDOWN = require("../commands/SHUTDOWN"); +const SWAPDB = require("../commands/SWAPDB"); +const TIME = require("../commands/TIME"); +const UNWATCH = require("../commands/UNWATCH"); +const WAIT = require("../commands/WAIT"); +exports.default = { + ...commands_1.default, + ACL_CAT, + aclCat: ACL_CAT, + ACL_DELUSER, + aclDelUser: ACL_DELUSER, + ACL_DRYRUN, + aclDryRun: ACL_DRYRUN, + ACL_GENPASS, + aclGenPass: ACL_GENPASS, + ACL_GETUSER, + aclGetUser: ACL_GETUSER, + ACL_LIST, + aclList: ACL_LIST, + ACL_LOAD, + aclLoad: ACL_LOAD, + ACL_LOG_RESET, + aclLogReset: ACL_LOG_RESET, + ACL_LOG, + aclLog: ACL_LOG, + ACL_SAVE, + aclSave: ACL_SAVE, + ACL_SETUSER, + aclSetUser: ACL_SETUSER, + ACL_USERS, + aclUsers: ACL_USERS, + ACL_WHOAMI, + aclWhoAmI: ACL_WHOAMI, + ASKING, + asking: ASKING, + AUTH, + auth: AUTH, + BGREWRITEAOF, + bgRewriteAof: BGREWRITEAOF, + BGSAVE, + bgSave: BGSAVE, + CLIENT_CACHING, + clientCaching: CLIENT_CACHING, + CLIENT_GETNAME, + clientGetName: CLIENT_GETNAME, + CLIENT_GETREDIR, + clientGetRedir: CLIENT_GETREDIR, + CLIENT_ID, + clientId: CLIENT_ID, + CLIENT_KILL, + clientKill: CLIENT_KILL, + 'CLIENT_NO-EVICT': CLIENT_NO_EVICT, + clientNoEvict: CLIENT_NO_EVICT, + 'CLIENT_NO-TOUCH': CLIENT_NO_TOUCH, + clientNoTouch: CLIENT_NO_TOUCH, + CLIENT_LIST, + clientList: CLIENT_LIST, + CLIENT_PAUSE, + clientPause: CLIENT_PAUSE, + CLIENT_SETNAME, + clientSetName: CLIENT_SETNAME, + CLIENT_TRACKING, + clientTracking: CLIENT_TRACKING, + CLIENT_TRACKINGINFO, + clientTrackingInfo: CLIENT_TRACKINGINFO, + CLIENT_UNPAUSE, + clientUnpause: CLIENT_UNPAUSE, + CLIENT_INFO, + clientInfo: CLIENT_INFO, + CLUSTER_ADDSLOTS, + clusterAddSlots: CLUSTER_ADDSLOTS, + CLUSTER_ADDSLOTSRANGE, + clusterAddSlotsRange: CLUSTER_ADDSLOTSRANGE, + CLUSTER_BUMPEPOCH, + clusterBumpEpoch: CLUSTER_BUMPEPOCH, + CLUSTER_COUNT_FAILURE_REPORTS, + clusterCountFailureReports: CLUSTER_COUNT_FAILURE_REPORTS, + CLUSTER_COUNTKEYSINSLOT, + clusterCountKeysInSlot: CLUSTER_COUNTKEYSINSLOT, + CLUSTER_DELSLOTS, + clusterDelSlots: CLUSTER_DELSLOTS, + CLUSTER_DELSLOTSRANGE, + clusterDelSlotsRange: CLUSTER_DELSLOTSRANGE, + CLUSTER_FAILOVER, + clusterFailover: CLUSTER_FAILOVER, + CLUSTER_FLUSHSLOTS, + clusterFlushSlots: CLUSTER_FLUSHSLOTS, + CLUSTER_FORGET, + clusterForget: CLUSTER_FORGET, + CLUSTER_GETKEYSINSLOT, + clusterGetKeysInSlot: CLUSTER_GETKEYSINSLOT, + CLUSTER_INFO, + clusterInfo: CLUSTER_INFO, + CLUSTER_KEYSLOT, + clusterKeySlot: CLUSTER_KEYSLOT, + CLUSTER_LINKS, + clusterLinks: CLUSTER_LINKS, + CLUSTER_MEET, + clusterMeet: CLUSTER_MEET, + CLUSTER_MYID, + clusterMyId: CLUSTER_MYID, + CLUSTER_MYSHARDID, + clusterMyShardId: CLUSTER_MYSHARDID, + CLUSTER_NODES, + clusterNodes: CLUSTER_NODES, + CLUSTER_REPLICAS, + clusterReplicas: CLUSTER_REPLICAS, + CLUSTER_REPLICATE, + clusterReplicate: CLUSTER_REPLICATE, + CLUSTER_RESET, + clusterReset: CLUSTER_RESET, + CLUSTER_SAVECONFIG, + clusterSaveConfig: CLUSTER_SAVECONFIG, + CLUSTER_SET_CONFIG_EPOCH, + clusterSetConfigEpoch: CLUSTER_SET_CONFIG_EPOCH, + CLUSTER_SETSLOT, + clusterSetSlot: CLUSTER_SETSLOT, + CLUSTER_SLOTS, + clusterSlots: CLUSTER_SLOTS, + COMMAND_COUNT, + commandCount: COMMAND_COUNT, + COMMAND_GETKEYS, + commandGetKeys: COMMAND_GETKEYS, + COMMAND_GETKEYSANDFLAGS, + commandGetKeysAndFlags: COMMAND_GETKEYSANDFLAGS, + COMMAND_INFO, + commandInfo: COMMAND_INFO, + COMMAND_LIST, + commandList: COMMAND_LIST, + COMMAND, + command: COMMAND, + CONFIG_GET, + configGet: CONFIG_GET, + CONFIG_RESETASTAT, + configResetStat: CONFIG_RESETASTAT, + CONFIG_REWRITE, + configRewrite: CONFIG_REWRITE, + CONFIG_SET, + configSet: CONFIG_SET, + DBSIZE, + dbSize: DBSIZE, + DISCARD, + discard: DISCARD, + ECHO, + echo: ECHO, + FAILOVER, + failover: FAILOVER, + FLUSHALL, + flushAll: FLUSHALL, + FLUSHDB, + flushDb: FLUSHDB, + FUNCTION_DELETE, + functionDelete: FUNCTION_DELETE, + FUNCTION_DUMP, + functionDump: FUNCTION_DUMP, + FUNCTION_FLUSH, + functionFlush: FUNCTION_FLUSH, + FUNCTION_KILL, + functionKill: FUNCTION_KILL, + FUNCTION_LIST_WITHCODE, + functionListWithCode: FUNCTION_LIST_WITHCODE, + FUNCTION_LIST, + functionList: FUNCTION_LIST, + FUNCTION_LOAD, + functionLoad: FUNCTION_LOAD, + FUNCTION_RESTORE, + functionRestore: FUNCTION_RESTORE, + FUNCTION_STATS, + functionStats: FUNCTION_STATS, + HELLO, + hello: HELLO, + INFO, + info: INFO, + KEYS, + keys: KEYS, + LASTSAVE, + lastSave: LASTSAVE, + LATENCY_DOCTOR, + latencyDoctor: LATENCY_DOCTOR, + LATENCY_GRAPH, + latencyGraph: LATENCY_GRAPH, + LATENCY_HISTORY, + latencyHistory: LATENCY_HISTORY, + LATENCY_LATEST, + latencyLatest: LATENCY_LATEST, + LOLWUT, + lolwut: LOLWUT, + MEMORY_DOCTOR, + memoryDoctor: MEMORY_DOCTOR, + 'MEMORY_MALLOC-STATS': MEMORY_MALLOC_STATS, + memoryMallocStats: MEMORY_MALLOC_STATS, + MEMORY_PURGE, + memoryPurge: MEMORY_PURGE, + MEMORY_STATS, + memoryStats: MEMORY_STATS, + MEMORY_USAGE, + memoryUsage: MEMORY_USAGE, + MODULE_LIST, + moduleList: MODULE_LIST, + MODULE_LOAD, + moduleLoad: MODULE_LOAD, + MODULE_UNLOAD, + moduleUnload: MODULE_UNLOAD, + MOVE, + move: MOVE, + PING, + ping: PING, + PUBSUB_CHANNELS, + pubSubChannels: PUBSUB_CHANNELS, + PUBSUB_NUMPAT, + pubSubNumPat: PUBSUB_NUMPAT, + PUBSUB_NUMSUB, + pubSubNumSub: PUBSUB_NUMSUB, + PUBSUB_SHARDCHANNELS, + pubSubShardChannels: PUBSUB_SHARDCHANNELS, + PUBSUB_SHARDNUMSUB, + pubSubShardNumSub: PUBSUB_SHARDNUMSUB, + RANDOMKEY, + randomKey: RANDOMKEY, + READONLY, + readonly: READONLY, + READWRITE, + readwrite: READWRITE, + REPLICAOF, + replicaOf: REPLICAOF, + 'RESTORE-ASKING': RESTORE_ASKING, + restoreAsking: RESTORE_ASKING, + ROLE, + role: ROLE, + SAVE, + save: SAVE, + SCAN, + scan: SCAN, + SCRIPT_DEBUG, + scriptDebug: SCRIPT_DEBUG, + SCRIPT_EXISTS, + scriptExists: SCRIPT_EXISTS, + SCRIPT_FLUSH, + scriptFlush: SCRIPT_FLUSH, + SCRIPT_KILL, + scriptKill: SCRIPT_KILL, + SCRIPT_LOAD, + scriptLoad: SCRIPT_LOAD, + SHUTDOWN, + shutdown: SHUTDOWN, + SWAPDB, + swapDb: SWAPDB, + TIME, + time: TIME, + UNWATCH, + unwatch: UNWATCH, + WAIT, + wait: WAIT +}; diff --git a/node_modules/@redis/client/dist/lib/client/index.d.ts b/node_modules/@redis/client/dist/lib/client/index.d.ts new file mode 100644 index 0000000..e4f2202 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/index.d.ts @@ -0,0 +1,148 @@ +/// +import COMMANDS from './commands'; +import { RedisCommand, RedisCommandArgument, RedisCommandArguments, RedisCommandRawReply, RedisCommandReply, RedisFunctions, RedisModules, RedisExtensions, RedisScript, RedisScripts, RedisCommandSignature, ConvertArgumentType, RedisFunction, ExcludeMappedString } from '../commands'; +import { RedisSocketOptions } from './socket'; +import { QueueCommandOptions } from './commands-queue'; +import { RedisClientMultiCommandType } from './multi-command'; +import { RedisMultiQueuedCommand } from '../multi-command'; +import { EventEmitter } from 'events'; +import { CommandOptions } from '../command-options'; +import { ScanOptions, ZMember } from '../commands/generic-transformers'; +import { ScanCommandOptions } from '../commands/SCAN'; +import { HScanTuple } from '../commands/HSCAN'; +import { Options as PoolOptions } from 'generic-pool'; +import { PubSubType, PubSubListener, PubSubTypeListeners, ChannelListeners } from './pub-sub'; +export interface RedisClientOptions extends RedisExtensions { + /** + * `redis[s]://[[username][:password]@][host][:port][/db-number]` + * See [`redis`](https://www.iana.org/assignments/uri-schemes/prov/redis) and [`rediss`](https://www.iana.org/assignments/uri-schemes/prov/rediss) IANA registration for more details + */ + url?: string; + /** + * Socket connection properties + */ + socket?: RedisSocketOptions; + /** + * ACL username ([see ACL guide](https://redis.io/topics/acl)) + */ + username?: string; + /** + * ACL password or the old "--requirepass" password + */ + password?: string; + /** + * Client name ([see `CLIENT SETNAME`](https://redis.io/commands/client-setname)) + */ + name?: string; + /** + * Redis database number (see [`SELECT`](https://redis.io/commands/select) command) + */ + database?: number; + /** + * Maximum length of the client's internal command queue + */ + commandsQueueMaxLength?: number; + /** + * When `true`, commands are rejected when the client is reconnecting. + * When `false`, commands are queued for execution after reconnection. + */ + disableOfflineQueue?: boolean; + /** + * Connect in [`READONLY`](https://redis.io/commands/readonly) mode + */ + readonly?: boolean; + legacyMode?: boolean; + isolationPoolOptions?: PoolOptions; + /** + * Send `PING` command at interval (in ms). + * Useful with Redis deployments that do not use TCP Keep-Alive. + */ + pingInterval?: number; + /** + * If set to true, disables sending client identifier (user-agent like message) to the redis server + */ + disableClientInfo?: boolean; + /** + * Tag to append to library name that is sent to the Redis server + */ + clientInfoTag?: string; +} +type WithCommands = { + [P in keyof typeof COMMANDS]: RedisCommandSignature<(typeof COMMANDS)[P]>; +}; +export type WithModules = { + [P in keyof M as ExcludeMappedString

]: { + [C in keyof M[P] as ExcludeMappedString]: RedisCommandSignature; + }; +}; +export type WithFunctions = { + [P in keyof F as ExcludeMappedString

]: { + [FF in keyof F[P] as ExcludeMappedString]: RedisCommandSignature; + }; +}; +export type WithScripts = { + [P in keyof S as ExcludeMappedString

]: RedisCommandSignature; +}; +export type RedisClientType, F extends RedisFunctions = Record, S extends RedisScripts = Record> = RedisClient & WithCommands & WithModules & WithFunctions & WithScripts; +export type InstantiableRedisClient = new (options?: RedisClientOptions) => RedisClientType; +export interface ClientCommandOptions extends QueueCommandOptions { + isolated?: boolean; +} +export default class RedisClient extends EventEmitter { + #private; + static commandOptions(options: T): CommandOptions; + commandOptions: typeof RedisClient.commandOptions; + static extend(extensions?: RedisExtensions): InstantiableRedisClient; + static create(options?: RedisClientOptions): RedisClientType; + static parseURL(url: string): RedisClientOptions; + get options(): RedisClientOptions | undefined; + get isOpen(): boolean; + get isReady(): boolean; + get isPubSubActive(): boolean; + get v4(): Record; + constructor(options?: RedisClientOptions); + duplicate(overrides?: Partial>): RedisClientType; + connect(): Promise>; + commandsExecutor(command: C, args: Array): Promise>; + sendCommand(args: RedisCommandArguments, options?: ClientCommandOptions): Promise; + functionsExecuter(fn: F, args: Array, name: string): Promise>; + executeFunction(name: string, fn: RedisFunction, args: RedisCommandArguments, options?: ClientCommandOptions): Promise; + scriptsExecuter(script: S, args: Array): Promise>; + executeScript(script: RedisScript, args: RedisCommandArguments, options?: ClientCommandOptions): Promise; + SELECT(db: number): Promise; + SELECT(options: CommandOptions, db: number): Promise; + select: { + (db: number): Promise; + (options: CommandOptions, db: number): Promise; + }; + SUBSCRIBE(channels: string | Array, listener: PubSubListener, bufferMode?: T): Promise; + subscribe: (channels: string | Array, listener: PubSubListener, bufferMode?: T | undefined) => Promise; + UNSUBSCRIBE(channels?: string | Array, listener?: PubSubListener, bufferMode?: T): Promise; + unsubscribe: (channels?: string | Array, listener?: PubSubListener | undefined, bufferMode?: T | undefined) => Promise; + PSUBSCRIBE(patterns: string | Array, listener: PubSubListener, bufferMode?: T): Promise; + pSubscribe: (patterns: string | Array, listener: PubSubListener, bufferMode?: T | undefined) => Promise; + PUNSUBSCRIBE(patterns?: string | Array, listener?: PubSubListener, bufferMode?: T): Promise; + pUnsubscribe: (patterns?: string | Array, listener?: PubSubListener | undefined, bufferMode?: T | undefined) => Promise; + SSUBSCRIBE(channels: string | Array, listener: PubSubListener, bufferMode?: T): Promise; + sSubscribe: (channels: string | Array, listener: PubSubListener, bufferMode?: T | undefined) => Promise; + SUNSUBSCRIBE(channels?: string | Array, listener?: PubSubListener, bufferMode?: T): Promise; + sUnsubscribe: (channels?: string | Array, listener?: PubSubListener | undefined, bufferMode?: T | undefined) => Promise; + getPubSubListeners(type: PubSubType): PubSubTypeListeners; + extendPubSubChannelListeners(type: PubSubType, channel: string, listeners: ChannelListeners): Promise; + extendPubSubListeners(type: PubSubType, listeners: PubSubTypeListeners): Promise; + QUIT(): Promise; + quit: () => Promise; + executeIsolated(fn: (client: RedisClientType) => T | Promise): Promise; + MULTI(): RedisClientMultiCommandType; + multi: () => RedisClientMultiCommandType; + multiExecutor(commands: Array, selectedDB?: number, chainId?: symbol): Promise>; + scanIterator(options?: ScanCommandOptions): AsyncIterable; + hScanIterator(key: string, options?: ScanOptions): AsyncIterable>; + hScanNoValuesIterator(key: string, options?: ScanOptions): AsyncIterable>; + sScanIterator(key: string, options?: ScanOptions): AsyncIterable; + zScanIterator(key: string, options?: ScanOptions): AsyncIterable>; + disconnect(): Promise; + ref(): void; + unref(): void; +} +export {}; diff --git a/node_modules/@redis/client/dist/lib/client/index.js b/node_modules/@redis/client/dist/lib/client/index.js new file mode 100644 index 0000000..5b7d58e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/index.js @@ -0,0 +1,563 @@ +"use strict"; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _RedisClient_instances, _a, _RedisClient_options, _RedisClient_socket, _RedisClient_queue, _RedisClient_isolationPool, _RedisClient_v4, _RedisClient_selectedDB, _RedisClient_initiateOptions, _RedisClient_initiateQueue, _RedisClient_initiateSocket, _RedisClient_initiateIsolationPool, _RedisClient_legacyMode, _RedisClient_legacySendCommand, _RedisClient_defineLegacyCommand, _RedisClient_pingTimer, _RedisClient_setPingTimer, _RedisClient_sendCommand, _RedisClient_pubSubCommand, _RedisClient_tick, _RedisClient_addMultiCommands, _RedisClient_destroyIsolationPool; +Object.defineProperty(exports, "__esModule", { value: true }); +const commands_1 = require("./commands"); +const socket_1 = require("./socket"); +const commands_queue_1 = require("./commands-queue"); +const multi_command_1 = require("./multi-command"); +const events_1 = require("events"); +const command_options_1 = require("../command-options"); +const commander_1 = require("../commander"); +const generic_pool_1 = require("generic-pool"); +const errors_1 = require("../errors"); +const url_1 = require("url"); +const pub_sub_1 = require("./pub-sub"); +const package_json_1 = require("../../package.json"); +class RedisClient extends events_1.EventEmitter { + static commandOptions(options) { + return (0, command_options_1.commandOptions)(options); + } + static extend(extensions) { + const Client = (0, commander_1.attachExtensions)({ + BaseClass: _a, + modulesExecutor: _a.prototype.commandsExecutor, + modules: extensions?.modules, + functionsExecutor: _a.prototype.functionsExecuter, + functions: extensions?.functions, + scriptsExecutor: _a.prototype.scriptsExecuter, + scripts: extensions?.scripts + }); + if (Client !== _a) { + Client.prototype.Multi = multi_command_1.default.extend(extensions); + } + return Client; + } + static create(options) { + return new (_a.extend(options))(options); + } + static parseURL(url) { + // https://www.iana.org/assignments/uri-schemes/prov/redis + const { hostname, port, protocol, username, password, pathname } = new url_1.URL(url), parsed = { + socket: { + host: hostname + } + }; + if (protocol === 'rediss:') { + parsed.socket.tls = true; + } + else if (protocol !== 'redis:') { + throw new TypeError('Invalid protocol'); + } + if (port) { + parsed.socket.port = Number(port); + } + if (username) { + parsed.username = decodeURIComponent(username); + } + if (password) { + parsed.password = decodeURIComponent(password); + } + if (pathname.length > 1) { + const database = Number(pathname.substring(1)); + if (isNaN(database)) { + throw new TypeError('Invalid pathname'); + } + parsed.database = database; + } + return parsed; + } + get options() { + return __classPrivateFieldGet(this, _RedisClient_options, "f"); + } + get isOpen() { + return __classPrivateFieldGet(this, _RedisClient_socket, "f").isOpen; + } + get isReady() { + return __classPrivateFieldGet(this, _RedisClient_socket, "f").isReady; + } + get isPubSubActive() { + return __classPrivateFieldGet(this, _RedisClient_queue, "f").isPubSubActive; + } + get v4() { + if (!__classPrivateFieldGet(this, _RedisClient_options, "f")?.legacyMode) { + throw new Error('the client is not in "legacy mode"'); + } + return __classPrivateFieldGet(this, _RedisClient_v4, "f"); + } + constructor(options) { + super(); + _RedisClient_instances.add(this); + Object.defineProperty(this, "commandOptions", { + enumerable: true, + configurable: true, + writable: true, + value: _a.commandOptions + }); + _RedisClient_options.set(this, void 0); + _RedisClient_socket.set(this, void 0); + _RedisClient_queue.set(this, void 0); + _RedisClient_isolationPool.set(this, void 0); + _RedisClient_v4.set(this, {}); + _RedisClient_selectedDB.set(this, 0); + _RedisClient_pingTimer.set(this, void 0); + Object.defineProperty(this, "select", { + enumerable: true, + configurable: true, + writable: true, + value: this.SELECT + }); + Object.defineProperty(this, "subscribe", { + enumerable: true, + configurable: true, + writable: true, + value: this.SUBSCRIBE + }); + Object.defineProperty(this, "unsubscribe", { + enumerable: true, + configurable: true, + writable: true, + value: this.UNSUBSCRIBE + }); + Object.defineProperty(this, "pSubscribe", { + enumerable: true, + configurable: true, + writable: true, + value: this.PSUBSCRIBE + }); + Object.defineProperty(this, "pUnsubscribe", { + enumerable: true, + configurable: true, + writable: true, + value: this.PUNSUBSCRIBE + }); + Object.defineProperty(this, "sSubscribe", { + enumerable: true, + configurable: true, + writable: true, + value: this.SSUBSCRIBE + }); + Object.defineProperty(this, "sUnsubscribe", { + enumerable: true, + configurable: true, + writable: true, + value: this.SUNSUBSCRIBE + }); + Object.defineProperty(this, "quit", { + enumerable: true, + configurable: true, + writable: true, + value: this.QUIT + }); + Object.defineProperty(this, "multi", { + enumerable: true, + configurable: true, + writable: true, + value: this.MULTI + }); + __classPrivateFieldSet(this, _RedisClient_options, __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_initiateOptions).call(this, options), "f"); + __classPrivateFieldSet(this, _RedisClient_queue, __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_initiateQueue).call(this), "f"); + __classPrivateFieldSet(this, _RedisClient_socket, __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_initiateSocket).call(this), "f"); + // should be initiated in connect, not here + // TODO: consider breaking in v5 + __classPrivateFieldSet(this, _RedisClient_isolationPool, __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_initiateIsolationPool).call(this), "f"); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_legacyMode).call(this); + } + duplicate(overrides) { + return new (Object.getPrototypeOf(this).constructor)({ + ...__classPrivateFieldGet(this, _RedisClient_options, "f"), + ...overrides + }); + } + async connect() { + // see comment in constructor + __classPrivateFieldSet(this, _RedisClient_isolationPool, __classPrivateFieldGet(this, _RedisClient_isolationPool, "f") ?? __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_initiateIsolationPool).call(this), "f"); + await __classPrivateFieldGet(this, _RedisClient_socket, "f").connect(); + return this; + } + async commandsExecutor(command, args) { + const { args: redisArgs, options } = (0, commander_1.transformCommandArguments)(command, args); + return (0, commander_1.transformCommandReply)(command, await __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_sendCommand).call(this, redisArgs, options), redisArgs.preserve); + } + sendCommand(args, options) { + return __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_sendCommand).call(this, args, options); + } + async functionsExecuter(fn, args, name) { + const { args: redisArgs, options } = (0, commander_1.transformCommandArguments)(fn, args); + return (0, commander_1.transformCommandReply)(fn, await this.executeFunction(name, fn, redisArgs, options), redisArgs.preserve); + } + executeFunction(name, fn, args, options) { + return __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_sendCommand).call(this, (0, commander_1.fCallArguments)(name, fn, args), options); + } + async scriptsExecuter(script, args) { + const { args: redisArgs, options } = (0, commander_1.transformCommandArguments)(script, args); + return (0, commander_1.transformCommandReply)(script, await this.executeScript(script, redisArgs, options), redisArgs.preserve); + } + async executeScript(script, args, options) { + const redisArgs = ['EVALSHA', script.SHA1]; + if (script.NUMBER_OF_KEYS !== undefined) { + redisArgs.push(script.NUMBER_OF_KEYS.toString()); + } + redisArgs.push(...args); + try { + return await __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_sendCommand).call(this, redisArgs, options); + } + catch (err) { + if (!err?.message?.startsWith?.('NOSCRIPT')) { + throw err; + } + redisArgs[0] = 'EVAL'; + redisArgs[1] = script.SCRIPT; + return __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_sendCommand).call(this, redisArgs, options); + } + } + async SELECT(options, db) { + if (!(0, command_options_1.isCommandOptions)(options)) { + db = options; + options = null; + } + await __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_sendCommand).call(this, ['SELECT', db.toString()], options); + __classPrivateFieldSet(this, _RedisClient_selectedDB, db, "f"); + } + SUBSCRIBE(channels, listener, bufferMode) { + return __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_pubSubCommand).call(this, __classPrivateFieldGet(this, _RedisClient_queue, "f").subscribe(pub_sub_1.PubSubType.CHANNELS, channels, listener, bufferMode)); + } + UNSUBSCRIBE(channels, listener, bufferMode) { + return __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_pubSubCommand).call(this, __classPrivateFieldGet(this, _RedisClient_queue, "f").unsubscribe(pub_sub_1.PubSubType.CHANNELS, channels, listener, bufferMode)); + } + PSUBSCRIBE(patterns, listener, bufferMode) { + return __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_pubSubCommand).call(this, __classPrivateFieldGet(this, _RedisClient_queue, "f").subscribe(pub_sub_1.PubSubType.PATTERNS, patterns, listener, bufferMode)); + } + PUNSUBSCRIBE(patterns, listener, bufferMode) { + return __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_pubSubCommand).call(this, __classPrivateFieldGet(this, _RedisClient_queue, "f").unsubscribe(pub_sub_1.PubSubType.PATTERNS, patterns, listener, bufferMode)); + } + SSUBSCRIBE(channels, listener, bufferMode) { + return __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_pubSubCommand).call(this, __classPrivateFieldGet(this, _RedisClient_queue, "f").subscribe(pub_sub_1.PubSubType.SHARDED, channels, listener, bufferMode)); + } + SUNSUBSCRIBE(channels, listener, bufferMode) { + return __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_pubSubCommand).call(this, __classPrivateFieldGet(this, _RedisClient_queue, "f").unsubscribe(pub_sub_1.PubSubType.SHARDED, channels, listener, bufferMode)); + } + getPubSubListeners(type) { + return __classPrivateFieldGet(this, _RedisClient_queue, "f").getPubSubListeners(type); + } + extendPubSubChannelListeners(type, channel, listeners) { + return __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_pubSubCommand).call(this, __classPrivateFieldGet(this, _RedisClient_queue, "f").extendPubSubChannelListeners(type, channel, listeners)); + } + extendPubSubListeners(type, listeners) { + return __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_pubSubCommand).call(this, __classPrivateFieldGet(this, _RedisClient_queue, "f").extendPubSubListeners(type, listeners)); + } + QUIT() { + return __classPrivateFieldGet(this, _RedisClient_socket, "f").quit(async () => { + if (__classPrivateFieldGet(this, _RedisClient_pingTimer, "f")) + clearTimeout(__classPrivateFieldGet(this, _RedisClient_pingTimer, "f")); + const quitPromise = __classPrivateFieldGet(this, _RedisClient_queue, "f").addCommand(['QUIT']); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_tick).call(this); + const [reply] = await Promise.all([ + quitPromise, + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_destroyIsolationPool).call(this) + ]); + return reply; + }); + } + executeIsolated(fn) { + if (!__classPrivateFieldGet(this, _RedisClient_isolationPool, "f")) + return Promise.reject(new errors_1.ClientClosedError()); + return __classPrivateFieldGet(this, _RedisClient_isolationPool, "f").use(fn); + } + MULTI() { + return new this.Multi(this.multiExecutor.bind(this), __classPrivateFieldGet(this, _RedisClient_options, "f")?.legacyMode); + } + async multiExecutor(commands, selectedDB, chainId) { + if (!__classPrivateFieldGet(this, _RedisClient_socket, "f").isOpen) { + return Promise.reject(new errors_1.ClientClosedError()); + } + const promise = chainId ? + // if `chainId` has a value, it's a `MULTI` (and not "pipeline") - need to add the `MULTI` and `EXEC` commands + Promise.all([ + __classPrivateFieldGet(this, _RedisClient_queue, "f").addCommand(['MULTI'], { chainId }), + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_addMultiCommands).call(this, commands, chainId), + __classPrivateFieldGet(this, _RedisClient_queue, "f").addCommand(['EXEC'], { chainId }) + ]) : + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_addMultiCommands).call(this, commands); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_tick).call(this); + const results = await promise; + if (selectedDB !== undefined) { + __classPrivateFieldSet(this, _RedisClient_selectedDB, selectedDB, "f"); + } + return results; + } + async *scanIterator(options) { + let cursor = 0; + do { + const reply = await this.scan(cursor, options); + cursor = reply.cursor; + for (const key of reply.keys) { + yield key; + } + } while (cursor !== 0); + } + async *hScanIterator(key, options) { + let cursor = 0; + do { + const reply = await this.hScan(key, cursor, options); + cursor = reply.cursor; + for (const tuple of reply.tuples) { + yield tuple; + } + } while (cursor !== 0); + } + async *hScanNoValuesIterator(key, options) { + let cursor = 0; + do { + const reply = await this.hScanNoValues(key, cursor, options); + cursor = reply.cursor; + for (const k of reply.keys) { + yield k; + } + } while (cursor !== 0); + } + async *sScanIterator(key, options) { + let cursor = 0; + do { + const reply = await this.sScan(key, cursor, options); + cursor = reply.cursor; + for (const member of reply.members) { + yield member; + } + } while (cursor !== 0); + } + async *zScanIterator(key, options) { + let cursor = 0; + do { + const reply = await this.zScan(key, cursor, options); + cursor = reply.cursor; + for (const member of reply.members) { + yield member; + } + } while (cursor !== 0); + } + async disconnect() { + if (__classPrivateFieldGet(this, _RedisClient_pingTimer, "f")) + clearTimeout(__classPrivateFieldGet(this, _RedisClient_pingTimer, "f")); + __classPrivateFieldGet(this, _RedisClient_queue, "f").flushAll(new errors_1.DisconnectsClientError()); + __classPrivateFieldGet(this, _RedisClient_socket, "f").disconnect(); + await __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_destroyIsolationPool).call(this); + } + ref() { + __classPrivateFieldGet(this, _RedisClient_socket, "f").ref(); + } + unref() { + __classPrivateFieldGet(this, _RedisClient_socket, "f").unref(); + } +} +_a = RedisClient, _RedisClient_options = new WeakMap(), _RedisClient_socket = new WeakMap(), _RedisClient_queue = new WeakMap(), _RedisClient_isolationPool = new WeakMap(), _RedisClient_v4 = new WeakMap(), _RedisClient_selectedDB = new WeakMap(), _RedisClient_pingTimer = new WeakMap(), _RedisClient_instances = new WeakSet(), _RedisClient_initiateOptions = function _RedisClient_initiateOptions(options) { + if (options?.url) { + const parsed = _a.parseURL(options.url); + if (options.socket) { + parsed.socket = Object.assign(options.socket, parsed.socket); + } + Object.assign(options, parsed); + } + if (options?.database) { + __classPrivateFieldSet(this, _RedisClient_selectedDB, options.database, "f"); + } + return options; +}, _RedisClient_initiateQueue = function _RedisClient_initiateQueue() { + return new commands_queue_1.default(__classPrivateFieldGet(this, _RedisClient_options, "f")?.commandsQueueMaxLength, (channel, listeners) => this.emit('sharded-channel-moved', channel, listeners)); +}, _RedisClient_initiateSocket = function _RedisClient_initiateSocket() { + const socketInitiator = async () => { + const promises = []; + if (__classPrivateFieldGet(this, _RedisClient_selectedDB, "f") !== 0) { + promises.push(__classPrivateFieldGet(this, _RedisClient_queue, "f").addCommand(['SELECT', __classPrivateFieldGet(this, _RedisClient_selectedDB, "f").toString()], { asap: true })); + } + if (__classPrivateFieldGet(this, _RedisClient_options, "f")?.readonly) { + promises.push(__classPrivateFieldGet(this, _RedisClient_queue, "f").addCommand(commands_1.default.READONLY.transformArguments(), { asap: true })); + } + if (!__classPrivateFieldGet(this, _RedisClient_options, "f")?.disableClientInfo) { + promises.push(__classPrivateFieldGet(this, _RedisClient_queue, "f").addCommand(['CLIENT', 'SETINFO', 'LIB-VER', package_json_1.version], { asap: true }).catch(err => { + if (!(err instanceof errors_1.ErrorReply)) { + throw err; + } + })); + promises.push(__classPrivateFieldGet(this, _RedisClient_queue, "f").addCommand([ + 'CLIENT', 'SETINFO', 'LIB-NAME', + __classPrivateFieldGet(this, _RedisClient_options, "f")?.clientInfoTag ? `node-redis(${__classPrivateFieldGet(this, _RedisClient_options, "f").clientInfoTag})` : 'node-redis' + ], { asap: true }).catch(err => { + if (!(err instanceof errors_1.ErrorReply)) { + throw err; + } + })); + } + if (__classPrivateFieldGet(this, _RedisClient_options, "f")?.name) { + promises.push(__classPrivateFieldGet(this, _RedisClient_queue, "f").addCommand(commands_1.default.CLIENT_SETNAME.transformArguments(__classPrivateFieldGet(this, _RedisClient_options, "f").name), { asap: true })); + } + if (__classPrivateFieldGet(this, _RedisClient_options, "f")?.username || __classPrivateFieldGet(this, _RedisClient_options, "f")?.password) { + promises.push(__classPrivateFieldGet(this, _RedisClient_queue, "f").addCommand(commands_1.default.AUTH.transformArguments({ + username: __classPrivateFieldGet(this, _RedisClient_options, "f").username, + password: __classPrivateFieldGet(this, _RedisClient_options, "f").password ?? '' + }), { asap: true })); + } + const resubscribePromise = __classPrivateFieldGet(this, _RedisClient_queue, "f").resubscribe(); + if (resubscribePromise) { + promises.push(resubscribePromise); + } + if (promises.length) { + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_tick).call(this, true); + await Promise.all(promises); + } + }; + return new socket_1.default(socketInitiator, __classPrivateFieldGet(this, _RedisClient_options, "f")?.socket) + .on('data', chunk => __classPrivateFieldGet(this, _RedisClient_queue, "f").onReplyChunk(chunk)) + .on('error', err => { + this.emit('error', err); + if (__classPrivateFieldGet(this, _RedisClient_socket, "f").isOpen && !__classPrivateFieldGet(this, _RedisClient_options, "f")?.disableOfflineQueue) { + __classPrivateFieldGet(this, _RedisClient_queue, "f").flushWaitingForReply(err); + } + else { + __classPrivateFieldGet(this, _RedisClient_queue, "f").flushAll(err); + } + }) + .on('connect', () => { + this.emit('connect'); + }) + .on('ready', () => { + this.emit('ready'); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_setPingTimer).call(this); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_tick).call(this); + }) + .on('reconnecting', () => this.emit('reconnecting')) + .on('drain', () => __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_tick).call(this)) + .on('end', () => this.emit('end')); +}, _RedisClient_initiateIsolationPool = function _RedisClient_initiateIsolationPool() { + return (0, generic_pool_1.createPool)({ + create: async () => { + const duplicate = this.duplicate({ + isolationPoolOptions: undefined + }).on('error', err => this.emit('error', err)); + await duplicate.connect(); + return duplicate; + }, + destroy: client => client.disconnect() + }, __classPrivateFieldGet(this, _RedisClient_options, "f")?.isolationPoolOptions); +}, _RedisClient_legacyMode = function _RedisClient_legacyMode() { + var _b, _c; + if (!__classPrivateFieldGet(this, _RedisClient_options, "f")?.legacyMode) + return; + __classPrivateFieldGet(this, _RedisClient_v4, "f").sendCommand = __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_sendCommand).bind(this); + this.sendCommand = (...args) => { + const result = __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_legacySendCommand).call(this, ...args); + if (result) { + result.promise + .then(reply => result.callback(null, reply)) + .catch(err => result.callback(err)); + } + }; + for (const [name, command] of Object.entries(commands_1.default)) { + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_defineLegacyCommand).call(this, name, command); + (_b = this)[_c = name.toLowerCase()] ?? (_b[_c] = this[name]); + } + // hard coded commands + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_defineLegacyCommand).call(this, 'SELECT'); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_defineLegacyCommand).call(this, 'select'); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_defineLegacyCommand).call(this, 'SUBSCRIBE'); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_defineLegacyCommand).call(this, 'subscribe'); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_defineLegacyCommand).call(this, 'PSUBSCRIBE'); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_defineLegacyCommand).call(this, 'pSubscribe'); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_defineLegacyCommand).call(this, 'UNSUBSCRIBE'); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_defineLegacyCommand).call(this, 'unsubscribe'); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_defineLegacyCommand).call(this, 'PUNSUBSCRIBE'); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_defineLegacyCommand).call(this, 'pUnsubscribe'); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_defineLegacyCommand).call(this, 'QUIT'); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_defineLegacyCommand).call(this, 'quit'); +}, _RedisClient_legacySendCommand = function _RedisClient_legacySendCommand(...args) { + const callback = typeof args[args.length - 1] === 'function' ? + args.pop() : + undefined; + const promise = __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_sendCommand).call(this, (0, commander_1.transformLegacyCommandArguments)(args)); + if (callback) + return { + promise, + callback + }; + promise.catch(err => this.emit('error', err)); +}, _RedisClient_defineLegacyCommand = function _RedisClient_defineLegacyCommand(name, command) { + __classPrivateFieldGet(this, _RedisClient_v4, "f")[name] = this[name].bind(this); + this[name] = command && command.TRANSFORM_LEGACY_REPLY && command.transformReply ? + (...args) => { + const result = __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_legacySendCommand).call(this, name, ...args); + if (result) { + result.promise + .then(reply => result.callback(null, command.transformReply(reply))) + .catch(err => result.callback(err)); + } + } : + (...args) => this.sendCommand(name, ...args); +}, _RedisClient_setPingTimer = function _RedisClient_setPingTimer() { + if (!__classPrivateFieldGet(this, _RedisClient_options, "f")?.pingInterval || !__classPrivateFieldGet(this, _RedisClient_socket, "f").isReady) + return; + clearTimeout(__classPrivateFieldGet(this, _RedisClient_pingTimer, "f")); + __classPrivateFieldSet(this, _RedisClient_pingTimer, setTimeout(() => { + if (!__classPrivateFieldGet(this, _RedisClient_socket, "f").isReady) + return; + // using #sendCommand to support legacy mode + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_sendCommand).call(this, ['PING']) + .then(reply => this.emit('ping-interval', reply)) + .catch(err => this.emit('error', err)) + .finally(() => __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_setPingTimer).call(this)); + }, __classPrivateFieldGet(this, _RedisClient_options, "f").pingInterval), "f"); +}, _RedisClient_sendCommand = function _RedisClient_sendCommand(args, options) { + if (!__classPrivateFieldGet(this, _RedisClient_socket, "f").isOpen) { + return Promise.reject(new errors_1.ClientClosedError()); + } + else if (options?.isolated) { + return this.executeIsolated(isolatedClient => isolatedClient.sendCommand(args, { + ...options, + isolated: false + })); + } + else if (!__classPrivateFieldGet(this, _RedisClient_socket, "f").isReady && __classPrivateFieldGet(this, _RedisClient_options, "f")?.disableOfflineQueue) { + return Promise.reject(new errors_1.ClientOfflineError()); + } + const promise = __classPrivateFieldGet(this, _RedisClient_queue, "f").addCommand(args, options); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_tick).call(this); + return promise; +}, _RedisClient_pubSubCommand = function _RedisClient_pubSubCommand(promise) { + if (promise === undefined) + return Promise.resolve(); + __classPrivateFieldGet(this, _RedisClient_instances, "m", _RedisClient_tick).call(this); + return promise; +}, _RedisClient_tick = function _RedisClient_tick(force = false) { + if (__classPrivateFieldGet(this, _RedisClient_socket, "f").writableNeedDrain || (!force && !__classPrivateFieldGet(this, _RedisClient_socket, "f").isReady)) { + return; + } + __classPrivateFieldGet(this, _RedisClient_socket, "f").cork(); + while (!__classPrivateFieldGet(this, _RedisClient_socket, "f").writableNeedDrain) { + const args = __classPrivateFieldGet(this, _RedisClient_queue, "f").getCommandToSend(); + if (args === undefined) + break; + __classPrivateFieldGet(this, _RedisClient_socket, "f").writeCommand(args); + } +}, _RedisClient_addMultiCommands = function _RedisClient_addMultiCommands(commands, chainId) { + return Promise.all(commands.map(({ args }) => __classPrivateFieldGet(this, _RedisClient_queue, "f").addCommand(args, { chainId }))); +}, _RedisClient_destroyIsolationPool = async function _RedisClient_destroyIsolationPool() { + await __classPrivateFieldGet(this, _RedisClient_isolationPool, "f").drain(); + await __classPrivateFieldGet(this, _RedisClient_isolationPool, "f").clear(); + __classPrivateFieldSet(this, _RedisClient_isolationPool, undefined, "f"); +}; +exports.default = RedisClient; +(0, commander_1.attachCommands)({ + BaseClass: RedisClient, + commands: commands_1.default, + executor: RedisClient.prototype.commandsExecutor +}); +RedisClient.prototype.Multi = multi_command_1.default; diff --git a/node_modules/@redis/client/dist/lib/client/multi-command.d.ts b/node_modules/@redis/client/dist/lib/client/multi-command.d.ts new file mode 100644 index 0000000..0893589 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/multi-command.d.ts @@ -0,0 +1,39 @@ +import COMMANDS from './commands'; +import { RedisCommand, RedisCommandArguments, RedisCommandRawReply, RedisFunctions, RedisModules, RedisExtensions, RedisScript, RedisScripts, ExcludeMappedString, RedisFunction } from '../commands'; +import { RedisMultiQueuedCommand } from '../multi-command'; +type CommandSignature = (...args: Parameters) => RedisClientMultiCommandType; +type WithCommands = { + [P in keyof typeof COMMANDS]: CommandSignature<(typeof COMMANDS)[P], M, F, S>; +}; +type WithModules = { + [P in keyof M as ExcludeMappedString

]: { + [C in keyof M[P] as ExcludeMappedString]: CommandSignature; + }; +}; +type WithFunctions = { + [P in keyof F as ExcludeMappedString

]: { + [FF in keyof F[P] as ExcludeMappedString]: CommandSignature; + }; +}; +type WithScripts = { + [P in keyof S as ExcludeMappedString

]: CommandSignature; +}; +export type RedisClientMultiCommandType = RedisClientMultiCommand & WithCommands & WithModules & WithFunctions & WithScripts; +type InstantiableRedisMultiCommand = new (...args: ConstructorParameters) => RedisClientMultiCommandType; +export type RedisClientMultiExecutor = (queue: Array, selectedDB?: number, chainId?: symbol) => Promise>; +export default class RedisClientMultiCommand { + #private; + static extend(extensions?: RedisExtensions): InstantiableRedisMultiCommand; + readonly v4: Record; + constructor(executor: RedisClientMultiExecutor, legacyMode?: boolean); + commandsExecutor(command: RedisCommand, args: Array): this; + SELECT(db: number, transformReply?: RedisCommand['transformReply']): this; + select: (db: number, transformReply?: RedisCommand['transformReply']) => this; + addCommand(args: RedisCommandArguments, transformReply?: RedisCommand['transformReply']): this; + functionsExecutor(fn: RedisFunction, args: Array, name: string): this; + scriptsExecutor(script: RedisScript, args: Array): this; + exec(execAsPipeline?: boolean): Promise>; + EXEC: (execAsPipeline?: boolean) => Promise>; + execAsPipeline(): Promise>; +} +export {}; diff --git a/node_modules/@redis/client/dist/lib/client/multi-command.js b/node_modules/@redis/client/dist/lib/client/multi-command.js new file mode 100644 index 0000000..4d43c8f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/multi-command.js @@ -0,0 +1,130 @@ +"use strict"; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _RedisClientMultiCommand_instances, _RedisClientMultiCommand_multi, _RedisClientMultiCommand_executor, _RedisClientMultiCommand_selectedDB, _RedisClientMultiCommand_legacyMode, _RedisClientMultiCommand_defineLegacyCommand; +Object.defineProperty(exports, "__esModule", { value: true }); +const commands_1 = require("./commands"); +const multi_command_1 = require("../multi-command"); +const commander_1 = require("../commander"); +class RedisClientMultiCommand { + static extend(extensions) { + return (0, commander_1.attachExtensions)({ + BaseClass: RedisClientMultiCommand, + modulesExecutor: RedisClientMultiCommand.prototype.commandsExecutor, + modules: extensions?.modules, + functionsExecutor: RedisClientMultiCommand.prototype.functionsExecutor, + functions: extensions?.functions, + scriptsExecutor: RedisClientMultiCommand.prototype.scriptsExecutor, + scripts: extensions?.scripts + }); + } + constructor(executor, legacyMode = false) { + _RedisClientMultiCommand_instances.add(this); + _RedisClientMultiCommand_multi.set(this, new multi_command_1.default()); + _RedisClientMultiCommand_executor.set(this, void 0); + Object.defineProperty(this, "v4", { + enumerable: true, + configurable: true, + writable: true, + value: {} + }); + _RedisClientMultiCommand_selectedDB.set(this, void 0); + Object.defineProperty(this, "select", { + enumerable: true, + configurable: true, + writable: true, + value: this.SELECT + }); + Object.defineProperty(this, "EXEC", { + enumerable: true, + configurable: true, + writable: true, + value: this.exec + }); + __classPrivateFieldSet(this, _RedisClientMultiCommand_executor, executor, "f"); + if (legacyMode) { + __classPrivateFieldGet(this, _RedisClientMultiCommand_instances, "m", _RedisClientMultiCommand_legacyMode).call(this); + } + } + commandsExecutor(command, args) { + return this.addCommand(command.transformArguments(...args), command.transformReply); + } + SELECT(db, transformReply) { + __classPrivateFieldSet(this, _RedisClientMultiCommand_selectedDB, db, "f"); + return this.addCommand(['SELECT', db.toString()], transformReply); + } + addCommand(args, transformReply) { + __classPrivateFieldGet(this, _RedisClientMultiCommand_multi, "f").addCommand(args, transformReply); + return this; + } + functionsExecutor(fn, args, name) { + __classPrivateFieldGet(this, _RedisClientMultiCommand_multi, "f").addFunction(name, fn, args); + return this; + } + scriptsExecutor(script, args) { + __classPrivateFieldGet(this, _RedisClientMultiCommand_multi, "f").addScript(script, args); + return this; + } + async exec(execAsPipeline = false) { + if (execAsPipeline) { + return this.execAsPipeline(); + } + return __classPrivateFieldGet(this, _RedisClientMultiCommand_multi, "f").handleExecReplies(await __classPrivateFieldGet(this, _RedisClientMultiCommand_executor, "f").call(this, __classPrivateFieldGet(this, _RedisClientMultiCommand_multi, "f").queue, __classPrivateFieldGet(this, _RedisClientMultiCommand_selectedDB, "f"), multi_command_1.default.generateChainId())); + } + async execAsPipeline() { + if (__classPrivateFieldGet(this, _RedisClientMultiCommand_multi, "f").queue.length === 0) + return []; + return __classPrivateFieldGet(this, _RedisClientMultiCommand_multi, "f").transformReplies(await __classPrivateFieldGet(this, _RedisClientMultiCommand_executor, "f").call(this, __classPrivateFieldGet(this, _RedisClientMultiCommand_multi, "f").queue, __classPrivateFieldGet(this, _RedisClientMultiCommand_selectedDB, "f"))); + } +} +_RedisClientMultiCommand_multi = new WeakMap(), _RedisClientMultiCommand_executor = new WeakMap(), _RedisClientMultiCommand_selectedDB = new WeakMap(), _RedisClientMultiCommand_instances = new WeakSet(), _RedisClientMultiCommand_legacyMode = function _RedisClientMultiCommand_legacyMode() { + var _a, _b; + this.v4.addCommand = this.addCommand.bind(this); + this.addCommand = (...args) => { + __classPrivateFieldGet(this, _RedisClientMultiCommand_multi, "f").addCommand((0, commander_1.transformLegacyCommandArguments)(args)); + return this; + }; + this.v4.exec = this.exec.bind(this); + this.exec = (callback) => { + this.v4.exec() + .then((reply) => { + if (!callback) + return; + callback(null, reply); + }) + .catch((err) => { + if (!callback) { + // this.emit('error', err); + return; + } + callback(err); + }); + }; + for (const [name, command] of Object.entries(commands_1.default)) { + __classPrivateFieldGet(this, _RedisClientMultiCommand_instances, "m", _RedisClientMultiCommand_defineLegacyCommand).call(this, name, command); + (_a = this)[_b = name.toLowerCase()] ?? (_a[_b] = this[name]); + } +}, _RedisClientMultiCommand_defineLegacyCommand = function _RedisClientMultiCommand_defineLegacyCommand(name, command) { + this.v4[name] = this[name].bind(this.v4); + this[name] = command && command.TRANSFORM_LEGACY_REPLY && command.transformReply ? + (...args) => { + __classPrivateFieldGet(this, _RedisClientMultiCommand_multi, "f").addCommand([name, ...(0, commander_1.transformLegacyCommandArguments)(args)], command.transformReply); + return this; + } : + (...args) => this.addCommand(name, ...args); +}; +exports.default = RedisClientMultiCommand; +(0, commander_1.attachCommands)({ + BaseClass: RedisClientMultiCommand, + commands: commands_1.default, + executor: RedisClientMultiCommand.prototype.commandsExecutor +}); diff --git a/node_modules/@redis/client/dist/lib/client/pub-sub.d.ts b/node_modules/@redis/client/dist/lib/client/pub-sub.d.ts new file mode 100644 index 0000000..ffd2783 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/pub-sub.d.ts @@ -0,0 +1,50 @@ +/// +import { RedisCommandArgument } from "../commands"; +export declare enum PubSubType { + CHANNELS = "CHANNELS", + PATTERNS = "PATTERNS", + SHARDED = "SHARDED" +} +export type PubSubListener = (message: T, channel: T) => unknown; +export interface ChannelListeners { + unsubscribing: boolean; + buffers: Set>; + strings: Set>; +} +export type PubSubTypeListeners = Map; +export type PubSubCommand = ReturnType; +export declare class PubSub { + #private; + static isStatusReply(reply: Array): boolean; + static isShardedUnsubscribe(reply: Array): boolean; + get isActive(): boolean; + subscribe(type: PubSubType, channels: string | Array, listener: PubSubListener, returnBuffers?: T): { + args: RedisCommandArgument[]; + channelsCounter: number; + resolve: () => void; + reject: () => void; + } | undefined; + extendChannelListeners(type: PubSubType, channel: string, listeners: ChannelListeners): { + args: (string | Buffer)[]; + channelsCounter: number; + resolve: () => number; + reject: () => void; + } | undefined; + extendTypeListeners(type: PubSubType, listeners: PubSubTypeListeners): { + args: RedisCommandArgument[]; + channelsCounter: number; + resolve: () => number; + reject: () => void; + } | undefined; + unsubscribe(type: PubSubType, channels?: string | Array, listener?: PubSubListener, returnBuffers?: T): { + args: RedisCommandArgument[]; + channelsCounter: number; + resolve: () => void; + reject: undefined; + } | undefined; + reset(): void; + resubscribe(): Array; + handleMessageReply(reply: Array): boolean; + removeShardedListeners(channel: string): ChannelListeners; + getTypeListeners(type: PubSubType): PubSubTypeListeners; +} diff --git a/node_modules/@redis/client/dist/lib/client/pub-sub.js b/node_modules/@redis/client/dist/lib/client/pub-sub.js new file mode 100644 index 0000000..fabaa99 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/pub-sub.js @@ -0,0 +1,306 @@ +"use strict"; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _PubSub_instances, _a, _PubSub_channelsArray, _PubSub_listenersSet, _PubSub_subscribing, _PubSub_isActive, _PubSub_listeners, _PubSub_extendChannelListeners, _PubSub_unsubscribeCommand, _PubSub_updateIsActive, _PubSub_emitPubSubMessage; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PubSub = exports.PubSubType = void 0; +var PubSubType; +(function (PubSubType) { + PubSubType["CHANNELS"] = "CHANNELS"; + PubSubType["PATTERNS"] = "PATTERNS"; + PubSubType["SHARDED"] = "SHARDED"; +})(PubSubType || (exports.PubSubType = PubSubType = {})); +const COMMANDS = { + [PubSubType.CHANNELS]: { + subscribe: Buffer.from('subscribe'), + unsubscribe: Buffer.from('unsubscribe'), + message: Buffer.from('message') + }, + [PubSubType.PATTERNS]: { + subscribe: Buffer.from('psubscribe'), + unsubscribe: Buffer.from('punsubscribe'), + message: Buffer.from('pmessage') + }, + [PubSubType.SHARDED]: { + subscribe: Buffer.from('ssubscribe'), + unsubscribe: Buffer.from('sunsubscribe'), + message: Buffer.from('smessage') + } +}; +class PubSub { + constructor() { + _PubSub_instances.add(this); + _PubSub_subscribing.set(this, 0); + _PubSub_isActive.set(this, false); + _PubSub_listeners.set(this, { + [PubSubType.CHANNELS]: new Map(), + [PubSubType.PATTERNS]: new Map(), + [PubSubType.SHARDED]: new Map() + }); + } + static isStatusReply(reply) { + return (COMMANDS[PubSubType.CHANNELS].subscribe.equals(reply[0]) || + COMMANDS[PubSubType.CHANNELS].unsubscribe.equals(reply[0]) || + COMMANDS[PubSubType.PATTERNS].subscribe.equals(reply[0]) || + COMMANDS[PubSubType.PATTERNS].unsubscribe.equals(reply[0]) || + COMMANDS[PubSubType.SHARDED].subscribe.equals(reply[0])); + } + static isShardedUnsubscribe(reply) { + return COMMANDS[PubSubType.SHARDED].unsubscribe.equals(reply[0]); + } + get isActive() { + return __classPrivateFieldGet(this, _PubSub_isActive, "f"); + } + subscribe(type, channels, listener, returnBuffers) { + var _b; + const args = [COMMANDS[type].subscribe], channelsArray = __classPrivateFieldGet(_a, _a, "m", _PubSub_channelsArray).call(_a, channels); + for (const channel of channelsArray) { + let channelListeners = __classPrivateFieldGet(this, _PubSub_listeners, "f")[type].get(channel); + if (!channelListeners || channelListeners.unsubscribing) { + args.push(channel); + } + } + if (args.length === 1) { + // all channels are already subscribed, add listeners without issuing a command + for (const channel of channelsArray) { + __classPrivateFieldGet(_a, _a, "m", _PubSub_listenersSet).call(_a, __classPrivateFieldGet(this, _PubSub_listeners, "f")[type].get(channel), returnBuffers).add(listener); + } + return; + } + __classPrivateFieldSet(this, _PubSub_isActive, true, "f"); + __classPrivateFieldSet(this, _PubSub_subscribing, (_b = __classPrivateFieldGet(this, _PubSub_subscribing, "f"), _b++, _b), "f"); + return { + args, + channelsCounter: args.length - 1, + resolve: () => { + var _b; + __classPrivateFieldSet(this, _PubSub_subscribing, (_b = __classPrivateFieldGet(this, _PubSub_subscribing, "f"), _b--, _b), "f"); + for (const channel of channelsArray) { + let listeners = __classPrivateFieldGet(this, _PubSub_listeners, "f")[type].get(channel); + if (!listeners) { + listeners = { + unsubscribing: false, + buffers: new Set(), + strings: new Set() + }; + __classPrivateFieldGet(this, _PubSub_listeners, "f")[type].set(channel, listeners); + } + __classPrivateFieldGet(_a, _a, "m", _PubSub_listenersSet).call(_a, listeners, returnBuffers).add(listener); + } + }, + reject: () => { + var _b; + __classPrivateFieldSet(this, _PubSub_subscribing, (_b = __classPrivateFieldGet(this, _PubSub_subscribing, "f"), _b--, _b), "f"); + __classPrivateFieldGet(this, _PubSub_instances, "m", _PubSub_updateIsActive).call(this); + } + }; + } + extendChannelListeners(type, channel, listeners) { + var _b; + if (!__classPrivateFieldGet(this, _PubSub_instances, "m", _PubSub_extendChannelListeners).call(this, type, channel, listeners)) + return; + __classPrivateFieldSet(this, _PubSub_isActive, true, "f"); + __classPrivateFieldSet(this, _PubSub_subscribing, (_b = __classPrivateFieldGet(this, _PubSub_subscribing, "f"), _b++, _b), "f"); + return { + args: [ + COMMANDS[type].subscribe, + channel + ], + channelsCounter: 1, + resolve: () => { var _b, _c; return __classPrivateFieldSet(this, _PubSub_subscribing, (_c = __classPrivateFieldGet(this, _PubSub_subscribing, "f"), _b = _c--, _c), "f"), _b; }, + reject: () => { + var _b; + __classPrivateFieldSet(this, _PubSub_subscribing, (_b = __classPrivateFieldGet(this, _PubSub_subscribing, "f"), _b--, _b), "f"); + __classPrivateFieldGet(this, _PubSub_instances, "m", _PubSub_updateIsActive).call(this); + } + }; + } + extendTypeListeners(type, listeners) { + var _b; + const args = [COMMANDS[type].subscribe]; + for (const [channel, channelListeners] of listeners) { + if (__classPrivateFieldGet(this, _PubSub_instances, "m", _PubSub_extendChannelListeners).call(this, type, channel, channelListeners)) { + args.push(channel); + } + } + if (args.length === 1) + return; + __classPrivateFieldSet(this, _PubSub_isActive, true, "f"); + __classPrivateFieldSet(this, _PubSub_subscribing, (_b = __classPrivateFieldGet(this, _PubSub_subscribing, "f"), _b++, _b), "f"); + return { + args, + channelsCounter: args.length - 1, + resolve: () => { var _b, _c; return __classPrivateFieldSet(this, _PubSub_subscribing, (_c = __classPrivateFieldGet(this, _PubSub_subscribing, "f"), _b = _c--, _c), "f"), _b; }, + reject: () => { + var _b; + __classPrivateFieldSet(this, _PubSub_subscribing, (_b = __classPrivateFieldGet(this, _PubSub_subscribing, "f"), _b--, _b), "f"); + __classPrivateFieldGet(this, _PubSub_instances, "m", _PubSub_updateIsActive).call(this); + } + }; + } + unsubscribe(type, channels, listener, returnBuffers) { + const listeners = __classPrivateFieldGet(this, _PubSub_listeners, "f")[type]; + if (!channels) { + return __classPrivateFieldGet(this, _PubSub_instances, "m", _PubSub_unsubscribeCommand).call(this, [COMMANDS[type].unsubscribe], + // cannot use `this.#subscribed` because there might be some `SUBSCRIBE` commands in the queue + // cannot use `this.#subscribed + this.#subscribing` because some `SUBSCRIBE` commands might fail + NaN, () => listeners.clear()); + } + const channelsArray = __classPrivateFieldGet(_a, _a, "m", _PubSub_channelsArray).call(_a, channels); + if (!listener) { + return __classPrivateFieldGet(this, _PubSub_instances, "m", _PubSub_unsubscribeCommand).call(this, [COMMANDS[type].unsubscribe, ...channelsArray], channelsArray.length, () => { + for (const channel of channelsArray) { + listeners.delete(channel); + } + }); + } + const args = [COMMANDS[type].unsubscribe]; + for (const channel of channelsArray) { + const sets = listeners.get(channel); + if (sets) { + let current, other; + if (returnBuffers) { + current = sets.buffers; + other = sets.strings; + } + else { + current = sets.strings; + other = sets.buffers; + } + const currentSize = current.has(listener) ? current.size - 1 : current.size; + if (currentSize !== 0 || other.size !== 0) + continue; + sets.unsubscribing = true; + } + args.push(channel); + } + if (args.length === 1) { + // all channels has other listeners, + // delete the listeners without issuing a command + for (const channel of channelsArray) { + __classPrivateFieldGet(_a, _a, "m", _PubSub_listenersSet).call(_a, listeners.get(channel), returnBuffers).delete(listener); + } + return; + } + return __classPrivateFieldGet(this, _PubSub_instances, "m", _PubSub_unsubscribeCommand).call(this, args, args.length - 1, () => { + for (const channel of channelsArray) { + const sets = listeners.get(channel); + if (!sets) + continue; + (returnBuffers ? sets.buffers : sets.strings).delete(listener); + if (sets.buffers.size === 0 && sets.strings.size === 0) { + listeners.delete(channel); + } + } + }); + } + reset() { + __classPrivateFieldSet(this, _PubSub_isActive, false, "f"); + __classPrivateFieldSet(this, _PubSub_subscribing, 0, "f"); + } + resubscribe() { + var _b; + const commands = []; + for (const [type, listeners] of Object.entries(__classPrivateFieldGet(this, _PubSub_listeners, "f"))) { + if (!listeners.size) + continue; + __classPrivateFieldSet(this, _PubSub_isActive, true, "f"); + __classPrivateFieldSet(this, _PubSub_subscribing, (_b = __classPrivateFieldGet(this, _PubSub_subscribing, "f"), _b++, _b), "f"); + const callback = () => { var _b, _c; return __classPrivateFieldSet(this, _PubSub_subscribing, (_c = __classPrivateFieldGet(this, _PubSub_subscribing, "f"), _b = _c--, _c), "f"), _b; }; + commands.push({ + args: [ + COMMANDS[type].subscribe, + ...listeners.keys() + ], + channelsCounter: listeners.size, + resolve: callback, + reject: callback + }); + } + return commands; + } + handleMessageReply(reply) { + if (COMMANDS[PubSubType.CHANNELS].message.equals(reply[0])) { + __classPrivateFieldGet(this, _PubSub_instances, "m", _PubSub_emitPubSubMessage).call(this, PubSubType.CHANNELS, reply[2], reply[1]); + return true; + } + else if (COMMANDS[PubSubType.PATTERNS].message.equals(reply[0])) { + __classPrivateFieldGet(this, _PubSub_instances, "m", _PubSub_emitPubSubMessage).call(this, PubSubType.PATTERNS, reply[3], reply[2], reply[1]); + return true; + } + else if (COMMANDS[PubSubType.SHARDED].message.equals(reply[0])) { + __classPrivateFieldGet(this, _PubSub_instances, "m", _PubSub_emitPubSubMessage).call(this, PubSubType.SHARDED, reply[2], reply[1]); + return true; + } + return false; + } + removeShardedListeners(channel) { + const listeners = __classPrivateFieldGet(this, _PubSub_listeners, "f")[PubSubType.SHARDED].get(channel); + __classPrivateFieldGet(this, _PubSub_listeners, "f")[PubSubType.SHARDED].delete(channel); + __classPrivateFieldGet(this, _PubSub_instances, "m", _PubSub_updateIsActive).call(this); + return listeners; + } + getTypeListeners(type) { + return __classPrivateFieldGet(this, _PubSub_listeners, "f")[type]; + } +} +exports.PubSub = PubSub; +_a = PubSub, _PubSub_subscribing = new WeakMap(), _PubSub_isActive = new WeakMap(), _PubSub_listeners = new WeakMap(), _PubSub_instances = new WeakSet(), _PubSub_channelsArray = function _PubSub_channelsArray(channels) { + return (Array.isArray(channels) ? channels : [channels]); +}, _PubSub_listenersSet = function _PubSub_listenersSet(listeners, returnBuffers) { + return (returnBuffers ? listeners.buffers : listeners.strings); +}, _PubSub_extendChannelListeners = function _PubSub_extendChannelListeners(type, channel, listeners) { + const existingListeners = __classPrivateFieldGet(this, _PubSub_listeners, "f")[type].get(channel); + if (!existingListeners) { + __classPrivateFieldGet(this, _PubSub_listeners, "f")[type].set(channel, listeners); + return true; + } + for (const listener of listeners.buffers) { + existingListeners.buffers.add(listener); + } + for (const listener of listeners.strings) { + existingListeners.strings.add(listener); + } + return false; +}, _PubSub_unsubscribeCommand = function _PubSub_unsubscribeCommand(args, channelsCounter, removeListeners) { + return { + args, + channelsCounter, + resolve: () => { + removeListeners(); + __classPrivateFieldGet(this, _PubSub_instances, "m", _PubSub_updateIsActive).call(this); + }, + reject: undefined // use the same structure as `subscribe` + }; +}, _PubSub_updateIsActive = function _PubSub_updateIsActive() { + __classPrivateFieldSet(this, _PubSub_isActive, (__classPrivateFieldGet(this, _PubSub_listeners, "f")[PubSubType.CHANNELS].size !== 0 || + __classPrivateFieldGet(this, _PubSub_listeners, "f")[PubSubType.PATTERNS].size !== 0 || + __classPrivateFieldGet(this, _PubSub_listeners, "f")[PubSubType.SHARDED].size !== 0 || + __classPrivateFieldGet(this, _PubSub_subscribing, "f") !== 0), "f"); +}, _PubSub_emitPubSubMessage = function _PubSub_emitPubSubMessage(type, message, channel, pattern) { + const keyString = (pattern ?? channel).toString(), listeners = __classPrivateFieldGet(this, _PubSub_listeners, "f")[type].get(keyString); + if (!listeners) + return; + for (const listener of listeners.buffers) { + listener(message, channel); + } + if (!listeners.strings.size) + return; + const channelString = pattern ? channel.toString() : keyString, messageString = channelString === '__redis__:invalidate' ? + // https://github.com/redis/redis/pull/7469 + // https://github.com/redis/redis/issues/7463 + (message === null ? null : message.map(x => x.toString())) : + message.toString(); + for (const listener of listeners.strings) { + listener(messageString, channelString); + } +}; diff --git a/node_modules/@redis/client/dist/lib/client/socket.d.ts b/node_modules/@redis/client/dist/lib/client/socket.d.ts new file mode 100644 index 0000000..e793807 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/socket.d.ts @@ -0,0 +1,52 @@ +/// +/// +/// +import { EventEmitter } from 'events'; +import * as net from 'net'; +import * as tls from 'tls'; +import { RedisCommandArguments } from '../commands'; +export interface RedisSocketCommonOptions { + /** + * Connection Timeout (in milliseconds) + */ + connectTimeout?: number; + /** + * Toggle [`Nagle's algorithm`](https://nodejs.org/api/net.html#net_socket_setnodelay_nodelay) + */ + noDelay?: boolean; + /** + * Toggle [`keep-alive`](https://nodejs.org/api/net.html#net_socket_setkeepalive_enable_initialdelay) + */ + keepAlive?: number | false; + /** + * When the socket closes unexpectedly (without calling `.quit()`/`.disconnect()`), the client uses `reconnectStrategy` to decide what to do. The following values are supported: + * 1. `false` -> do not reconnect, close the client and flush the command queue. + * 2. `number` -> wait for `X` milliseconds before reconnecting. + * 3. `(retries: number, cause: Error) => false | number | Error` -> `number` is the same as configuring a `number` directly, `Error` is the same as `false`, but with a custom error. + * Defaults to `retries => Math.min(retries * 50, 500)` + */ + reconnectStrategy?: false | number | ((retries: number, cause: Error) => false | Error | number); +} +type RedisNetSocketOptions = Partial & { + tls?: false; +}; +export interface RedisTlsSocketOptions extends tls.ConnectionOptions { + tls: true; +} +export type RedisSocketOptions = RedisSocketCommonOptions & (RedisNetSocketOptions | RedisTlsSocketOptions); +export type RedisSocketInitiator = () => Promise; +export default class RedisSocket extends EventEmitter { + #private; + get isOpen(): boolean; + get isReady(): boolean; + get writableNeedDrain(): boolean; + constructor(initiator: RedisSocketInitiator, options?: RedisSocketOptions); + connect(): Promise; + writeCommand(args: RedisCommandArguments): void; + disconnect(): void; + quit(fn: () => Promise): Promise; + cork(): void; + ref(): void; + unref(): void; +} +export {}; diff --git a/node_modules/@redis/client/dist/lib/client/socket.js b/node_modules/@redis/client/dist/lib/client/socket.js new file mode 100644 index 0000000..3baa08b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/client/socket.js @@ -0,0 +1,233 @@ +"use strict"; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _RedisSocket_instances, _a, _RedisSocket_initiateOptions, _RedisSocket_isTlsSocket, _RedisSocket_initiator, _RedisSocket_options, _RedisSocket_socket, _RedisSocket_isOpen, _RedisSocket_isReady, _RedisSocket_writableNeedDrain, _RedisSocket_isSocketUnrefed, _RedisSocket_reconnectStrategy, _RedisSocket_shouldReconnect, _RedisSocket_connect, _RedisSocket_createSocket, _RedisSocket_createNetSocket, _RedisSocket_createTlsSocket, _RedisSocket_onSocketError, _RedisSocket_disconnect, _RedisSocket_isCorked; +Object.defineProperty(exports, "__esModule", { value: true }); +const events_1 = require("events"); +const net = require("net"); +const tls = require("tls"); +const errors_1 = require("../errors"); +const utils_1 = require("../utils"); +class RedisSocket extends events_1.EventEmitter { + get isOpen() { + return __classPrivateFieldGet(this, _RedisSocket_isOpen, "f"); + } + get isReady() { + return __classPrivateFieldGet(this, _RedisSocket_isReady, "f"); + } + get writableNeedDrain() { + return __classPrivateFieldGet(this, _RedisSocket_writableNeedDrain, "f"); + } + constructor(initiator, options) { + super(); + _RedisSocket_instances.add(this); + _RedisSocket_initiator.set(this, void 0); + _RedisSocket_options.set(this, void 0); + _RedisSocket_socket.set(this, void 0); + _RedisSocket_isOpen.set(this, false); + _RedisSocket_isReady.set(this, false); + // `writable.writableNeedDrain` was added in v15.2.0 and therefore can't be used + // https://nodejs.org/api/stream.html#stream_writable_writableneeddrain + _RedisSocket_writableNeedDrain.set(this, false); + _RedisSocket_isSocketUnrefed.set(this, false); + _RedisSocket_isCorked.set(this, false); + __classPrivateFieldSet(this, _RedisSocket_initiator, initiator, "f"); + __classPrivateFieldSet(this, _RedisSocket_options, __classPrivateFieldGet(_a, _a, "m", _RedisSocket_initiateOptions).call(_a, options), "f"); + } + async connect() { + if (__classPrivateFieldGet(this, _RedisSocket_isOpen, "f")) { + throw new Error('Socket already opened'); + } + __classPrivateFieldSet(this, _RedisSocket_isOpen, true, "f"); + return __classPrivateFieldGet(this, _RedisSocket_instances, "m", _RedisSocket_connect).call(this); + } + writeCommand(args) { + if (!__classPrivateFieldGet(this, _RedisSocket_socket, "f")) { + throw new errors_1.ClientClosedError(); + } + for (const toWrite of args) { + __classPrivateFieldSet(this, _RedisSocket_writableNeedDrain, !__classPrivateFieldGet(this, _RedisSocket_socket, "f").write(toWrite), "f"); + } + } + disconnect() { + if (!__classPrivateFieldGet(this, _RedisSocket_isOpen, "f")) { + throw new errors_1.ClientClosedError(); + } + __classPrivateFieldSet(this, _RedisSocket_isOpen, false, "f"); + __classPrivateFieldGet(this, _RedisSocket_instances, "m", _RedisSocket_disconnect).call(this); + } + async quit(fn) { + if (!__classPrivateFieldGet(this, _RedisSocket_isOpen, "f")) { + throw new errors_1.ClientClosedError(); + } + __classPrivateFieldSet(this, _RedisSocket_isOpen, false, "f"); + const reply = await fn(); + __classPrivateFieldGet(this, _RedisSocket_instances, "m", _RedisSocket_disconnect).call(this); + return reply; + } + cork() { + if (!__classPrivateFieldGet(this, _RedisSocket_socket, "f") || __classPrivateFieldGet(this, _RedisSocket_isCorked, "f")) { + return; + } + __classPrivateFieldGet(this, _RedisSocket_socket, "f").cork(); + __classPrivateFieldSet(this, _RedisSocket_isCorked, true, "f"); + setImmediate(() => { + __classPrivateFieldGet(this, _RedisSocket_socket, "f")?.uncork(); + __classPrivateFieldSet(this, _RedisSocket_isCorked, false, "f"); + }); + } + ref() { + __classPrivateFieldSet(this, _RedisSocket_isSocketUnrefed, false, "f"); + __classPrivateFieldGet(this, _RedisSocket_socket, "f")?.ref(); + } + unref() { + __classPrivateFieldSet(this, _RedisSocket_isSocketUnrefed, true, "f"); + __classPrivateFieldGet(this, _RedisSocket_socket, "f")?.unref(); + } +} +_a = RedisSocket, _RedisSocket_initiator = new WeakMap(), _RedisSocket_options = new WeakMap(), _RedisSocket_socket = new WeakMap(), _RedisSocket_isOpen = new WeakMap(), _RedisSocket_isReady = new WeakMap(), _RedisSocket_writableNeedDrain = new WeakMap(), _RedisSocket_isSocketUnrefed = new WeakMap(), _RedisSocket_isCorked = new WeakMap(), _RedisSocket_instances = new WeakSet(), _RedisSocket_initiateOptions = function _RedisSocket_initiateOptions(options) { + var _b, _c; + options ?? (options = {}); + if (!options.path) { + (_b = options).port ?? (_b.port = 6379); + (_c = options).host ?? (_c.host = 'localhost'); + } + options.connectTimeout ?? (options.connectTimeout = 5000); + options.keepAlive ?? (options.keepAlive = 5000); + options.noDelay ?? (options.noDelay = true); + return options; +}, _RedisSocket_isTlsSocket = function _RedisSocket_isTlsSocket(options) { + return options.tls === true; +}, _RedisSocket_reconnectStrategy = function _RedisSocket_reconnectStrategy(retries, cause) { + if (__classPrivateFieldGet(this, _RedisSocket_options, "f").reconnectStrategy === false) { + return false; + } + else if (typeof __classPrivateFieldGet(this, _RedisSocket_options, "f").reconnectStrategy === 'number') { + return __classPrivateFieldGet(this, _RedisSocket_options, "f").reconnectStrategy; + } + else if (__classPrivateFieldGet(this, _RedisSocket_options, "f").reconnectStrategy) { + try { + const retryIn = __classPrivateFieldGet(this, _RedisSocket_options, "f").reconnectStrategy(retries, cause); + if (retryIn !== false && !(retryIn instanceof Error) && typeof retryIn !== 'number') { + throw new TypeError(`Reconnect strategy should return \`false | Error | number\`, got ${retryIn} instead`); + } + return retryIn; + } + catch (err) { + this.emit('error', err); + } + } + return Math.min(retries * 50, 500); +}, _RedisSocket_shouldReconnect = function _RedisSocket_shouldReconnect(retries, cause) { + const retryIn = __classPrivateFieldGet(this, _RedisSocket_instances, "m", _RedisSocket_reconnectStrategy).call(this, retries, cause); + if (retryIn === false) { + __classPrivateFieldSet(this, _RedisSocket_isOpen, false, "f"); + this.emit('error', cause); + return cause; + } + else if (retryIn instanceof Error) { + __classPrivateFieldSet(this, _RedisSocket_isOpen, false, "f"); + this.emit('error', cause); + return new errors_1.ReconnectStrategyError(retryIn, cause); + } + return retryIn; +}, _RedisSocket_connect = async function _RedisSocket_connect() { + let retries = 0; + do { + try { + __classPrivateFieldSet(this, _RedisSocket_socket, await __classPrivateFieldGet(this, _RedisSocket_instances, "m", _RedisSocket_createSocket).call(this), "f"); + __classPrivateFieldSet(this, _RedisSocket_writableNeedDrain, false, "f"); + this.emit('connect'); + try { + await __classPrivateFieldGet(this, _RedisSocket_initiator, "f").call(this); + } + catch (err) { + __classPrivateFieldGet(this, _RedisSocket_socket, "f").destroy(); + __classPrivateFieldSet(this, _RedisSocket_socket, undefined, "f"); + throw err; + } + __classPrivateFieldSet(this, _RedisSocket_isReady, true, "f"); + this.emit('ready'); + } + catch (err) { + const retryIn = __classPrivateFieldGet(this, _RedisSocket_instances, "m", _RedisSocket_shouldReconnect).call(this, retries++, err); + if (typeof retryIn !== 'number') { + throw retryIn; + } + this.emit('error', err); + await (0, utils_1.promiseTimeout)(retryIn); + this.emit('reconnecting'); + } + } while (__classPrivateFieldGet(this, _RedisSocket_isOpen, "f") && !__classPrivateFieldGet(this, _RedisSocket_isReady, "f")); +}, _RedisSocket_createSocket = function _RedisSocket_createSocket() { + return new Promise((resolve, reject) => { + const { connectEvent, socket } = __classPrivateFieldGet(_a, _a, "m", _RedisSocket_isTlsSocket).call(_a, __classPrivateFieldGet(this, _RedisSocket_options, "f")) ? + __classPrivateFieldGet(this, _RedisSocket_instances, "m", _RedisSocket_createTlsSocket).call(this) : + __classPrivateFieldGet(this, _RedisSocket_instances, "m", _RedisSocket_createNetSocket).call(this); + if (__classPrivateFieldGet(this, _RedisSocket_options, "f").connectTimeout) { + socket.setTimeout(__classPrivateFieldGet(this, _RedisSocket_options, "f").connectTimeout, () => socket.destroy(new errors_1.ConnectionTimeoutError())); + } + if (__classPrivateFieldGet(this, _RedisSocket_isSocketUnrefed, "f")) { + socket.unref(); + } + socket + .setNoDelay(__classPrivateFieldGet(this, _RedisSocket_options, "f").noDelay) + .once('error', reject) + .once(connectEvent, () => { + socket + .setTimeout(0) + // https://github.com/nodejs/node/issues/31663 + .setKeepAlive(__classPrivateFieldGet(this, _RedisSocket_options, "f").keepAlive !== false, __classPrivateFieldGet(this, _RedisSocket_options, "f").keepAlive || 0) + .off('error', reject) + .once('error', (err) => __classPrivateFieldGet(this, _RedisSocket_instances, "m", _RedisSocket_onSocketError).call(this, err)) + .once('close', hadError => { + if (!hadError && __classPrivateFieldGet(this, _RedisSocket_isOpen, "f") && __classPrivateFieldGet(this, _RedisSocket_socket, "f") === socket) { + __classPrivateFieldGet(this, _RedisSocket_instances, "m", _RedisSocket_onSocketError).call(this, new errors_1.SocketClosedUnexpectedlyError()); + } + }) + .on('drain', () => { + __classPrivateFieldSet(this, _RedisSocket_writableNeedDrain, false, "f"); + this.emit('drain'); + }) + .on('data', data => this.emit('data', data)); + resolve(socket); + }); + }); +}, _RedisSocket_createNetSocket = function _RedisSocket_createNetSocket() { + return { + connectEvent: 'connect', + socket: net.connect(__classPrivateFieldGet(this, _RedisSocket_options, "f")) // TODO + }; +}, _RedisSocket_createTlsSocket = function _RedisSocket_createTlsSocket() { + return { + connectEvent: 'secureConnect', + socket: tls.connect(__classPrivateFieldGet(this, _RedisSocket_options, "f")) // TODO + }; +}, _RedisSocket_onSocketError = function _RedisSocket_onSocketError(err) { + const wasReady = __classPrivateFieldGet(this, _RedisSocket_isReady, "f"); + __classPrivateFieldSet(this, _RedisSocket_isReady, false, "f"); + this.emit('error', err); + if (!wasReady || !__classPrivateFieldGet(this, _RedisSocket_isOpen, "f") || typeof __classPrivateFieldGet(this, _RedisSocket_instances, "m", _RedisSocket_shouldReconnect).call(this, 0, err) !== 'number') + return; + this.emit('reconnecting'); + __classPrivateFieldGet(this, _RedisSocket_instances, "m", _RedisSocket_connect).call(this).catch(() => { + // the error was already emitted, silently ignore it + }); +}, _RedisSocket_disconnect = function _RedisSocket_disconnect() { + __classPrivateFieldSet(this, _RedisSocket_isReady, false, "f"); + if (__classPrivateFieldGet(this, _RedisSocket_socket, "f")) { + __classPrivateFieldGet(this, _RedisSocket_socket, "f").destroy(); + __classPrivateFieldSet(this, _RedisSocket_socket, undefined, "f"); + } + this.emit('end'); +}; +exports.default = RedisSocket; diff --git a/node_modules/@redis/client/dist/lib/cluster/cluster-slots.d.ts b/node_modules/@redis/client/dist/lib/cluster/cluster-slots.d.ts new file mode 100644 index 0000000..1a2d153 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/cluster/cluster-slots.d.ts @@ -0,0 +1,60 @@ +/// +import { RedisClientType } from '../client'; +import { RedisClusterOptions } from '.'; +import { RedisCommandArgument, RedisFunctions, RedisModules, RedisScripts } from '../commands'; +import { ChannelListeners } from '../client/pub-sub'; +import { EventEmitter } from 'stream'; +interface NodeAddress { + host: string; + port: number; +} +export type NodeAddressMap = { + [address: string]: NodeAddress; +} | ((address: string) => NodeAddress | undefined); +type ValueOrPromise = T | Promise; +type ClientOrPromise = ValueOrPromise>; +export interface Node { + address: string; + client?: ClientOrPromise; +} +export interface ShardNode extends Node { + id: string; + host: string; + port: number; + readonly: boolean; +} +export interface MasterNode extends ShardNode { + pubSubClient?: ClientOrPromise; +} +export interface Shard { + master: MasterNode; + replicas?: Array>; + nodesIterator?: IterableIterator>; +} +export type PubSubNode = Required>; +export type OnShardedChannelMovedError = (err: unknown, channel: string, listeners?: ChannelListeners) => void; +export default class RedisClusterSlots { + #private; + slots: Shard[]; + shards: Shard[]; + masters: ShardNode[]; + replicas: ShardNode[]; + readonly nodeByAddress: Map | MasterNode>; + pubSubNode?: PubSubNode; + get isOpen(): boolean; + constructor(options: RedisClusterOptions, emit: EventEmitter['emit']); + connect(): Promise; + nodeClient(node: ShardNode): ClientOrPromise; + rediscover(startWith: RedisClientType): Promise; + quit(): Promise; + disconnect(): Promise; + getClient(firstKey: RedisCommandArgument | undefined, isReadonly: boolean | undefined): ClientOrPromise; + getRandomNode(): ShardNode; + getSlotRandomNode(slotNumber: number): ShardNode; + getMasterByAddress(address: string): ClientOrPromise | undefined; + getPubSubClient(): ClientOrPromise; + executeUnsubscribeCommand(unsubscribe: (client: RedisClientType) => Promise): Promise; + getShardedPubSubClient(channel: string): ClientOrPromise; + executeShardedUnsubscribeCommand(channel: string, unsubscribe: (client: RedisClientType) => Promise): Promise; +} +export {}; diff --git a/node_modules/@redis/client/dist/lib/cluster/cluster-slots.js b/node_modules/@redis/client/dist/lib/cluster/cluster-slots.js new file mode 100644 index 0000000..d1862e5 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/cluster/cluster-slots.js @@ -0,0 +1,434 @@ +"use strict"; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _RedisClusterSlots_instances, _a, _RedisClusterSlots_SLOTS, _RedisClusterSlots_options, _RedisClusterSlots_Client, _RedisClusterSlots_emit, _RedisClusterSlots_isOpen, _RedisClusterSlots_discoverWithRootNodes, _RedisClusterSlots_resetSlots, _RedisClusterSlots_discover, _RedisClusterSlots_getShards, _RedisClusterSlots_getNodeAddress, _RedisClusterSlots_clientOptionsDefaults, _RedisClusterSlots_initiateSlotNode, _RedisClusterSlots_createClient, _RedisClusterSlots_createNodeClient, _RedisClusterSlots_runningRediscoverPromise, _RedisClusterSlots_rediscover, _RedisClusterSlots_destroy, _RedisClusterSlots_execOnNodeClient, _RedisClusterSlots_iterateAllNodes, _RedisClusterSlots_randomNodeIterator, _RedisClusterSlots_slotNodesIterator, _RedisClusterSlots_initiatePubSubClient, _RedisClusterSlots_initiateShardedPubSubClient; +Object.defineProperty(exports, "__esModule", { value: true }); +const client_1 = require("../client"); +const errors_1 = require("../errors"); +const util_1 = require("util"); +const pub_sub_1 = require("../client/pub-sub"); +// We need to use 'require', because it's not possible with Typescript to import +// function that are exported as 'module.exports = function`, without esModuleInterop +// set to true. +const calculateSlot = require('cluster-key-slot'); +class RedisClusterSlots { + get isOpen() { + return __classPrivateFieldGet(this, _RedisClusterSlots_isOpen, "f"); + } + constructor(options, emit) { + _RedisClusterSlots_instances.add(this); + _RedisClusterSlots_options.set(this, void 0); + _RedisClusterSlots_Client.set(this, void 0); + _RedisClusterSlots_emit.set(this, void 0); + Object.defineProperty(this, "slots", { + enumerable: true, + configurable: true, + writable: true, + value: new Array(__classPrivateFieldGet(_a, _a, "f", _RedisClusterSlots_SLOTS)) + }); + Object.defineProperty(this, "shards", { + enumerable: true, + configurable: true, + writable: true, + value: new Array() + }); + Object.defineProperty(this, "masters", { + enumerable: true, + configurable: true, + writable: true, + value: new Array() + }); + Object.defineProperty(this, "replicas", { + enumerable: true, + configurable: true, + writable: true, + value: new Array() + }); + Object.defineProperty(this, "nodeByAddress", { + enumerable: true, + configurable: true, + writable: true, + value: new Map() + }); + Object.defineProperty(this, "pubSubNode", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + _RedisClusterSlots_isOpen.set(this, false); + _RedisClusterSlots_runningRediscoverPromise.set(this, void 0); + _RedisClusterSlots_randomNodeIterator.set(this, void 0); + __classPrivateFieldSet(this, _RedisClusterSlots_options, options, "f"); + __classPrivateFieldSet(this, _RedisClusterSlots_Client, client_1.default.extend(options), "f"); + __classPrivateFieldSet(this, _RedisClusterSlots_emit, emit, "f"); + } + async connect() { + if (__classPrivateFieldGet(this, _RedisClusterSlots_isOpen, "f")) { + throw new Error('Cluster already open'); + } + __classPrivateFieldSet(this, _RedisClusterSlots_isOpen, true, "f"); + try { + await __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_discoverWithRootNodes).call(this); + } + catch (err) { + __classPrivateFieldSet(this, _RedisClusterSlots_isOpen, false, "f"); + throw err; + } + } + nodeClient(node) { + return node.client ?? __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_createNodeClient).call(this, node); + } + async rediscover(startWith) { + __classPrivateFieldSet(this, _RedisClusterSlots_runningRediscoverPromise, __classPrivateFieldGet(this, _RedisClusterSlots_runningRediscoverPromise, "f") ?? __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_rediscover).call(this, startWith) + .finally(() => __classPrivateFieldSet(this, _RedisClusterSlots_runningRediscoverPromise, undefined, "f")), "f"); + return __classPrivateFieldGet(this, _RedisClusterSlots_runningRediscoverPromise, "f"); + } + quit() { + return __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_destroy).call(this, client => client.quit()); + } + disconnect() { + return __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_destroy).call(this, client => client.disconnect()); + } + getClient(firstKey, isReadonly) { + if (!firstKey) { + return this.nodeClient(this.getRandomNode()); + } + const slotNumber = calculateSlot(firstKey); + if (!isReadonly) { + return this.nodeClient(this.slots[slotNumber].master); + } + return this.nodeClient(this.getSlotRandomNode(slotNumber)); + } + getRandomNode() { + __classPrivateFieldSet(this, _RedisClusterSlots_randomNodeIterator, __classPrivateFieldGet(this, _RedisClusterSlots_randomNodeIterator, "f") ?? __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_iterateAllNodes).call(this), "f"); + return __classPrivateFieldGet(this, _RedisClusterSlots_randomNodeIterator, "f").next().value; + } + getSlotRandomNode(slotNumber) { + const slot = this.slots[slotNumber]; + if (!slot.replicas?.length) { + return slot.master; + } + slot.nodesIterator ?? (slot.nodesIterator = __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_slotNodesIterator).call(this, slot)); + return slot.nodesIterator.next().value; + } + getMasterByAddress(address) { + const master = this.nodeByAddress.get(address); + if (!master) + return; + return this.nodeClient(master); + } + getPubSubClient() { + return this.pubSubNode ? + this.pubSubNode.client : + __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_initiatePubSubClient).call(this); + } + async executeUnsubscribeCommand(unsubscribe) { + const client = await this.getPubSubClient(); + await unsubscribe(client); + if (!client.isPubSubActive && client.isOpen) { + await client.disconnect(); + this.pubSubNode = undefined; + } + } + getShardedPubSubClient(channel) { + const { master } = this.slots[calculateSlot(channel)]; + return master.pubSubClient ?? __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_initiateShardedPubSubClient).call(this, master); + } + async executeShardedUnsubscribeCommand(channel, unsubscribe) { + const { master } = this.slots[calculateSlot(channel)]; + if (!master.pubSubClient) + return Promise.resolve(); + const client = await master.pubSubClient; + await unsubscribe(client); + if (!client.isPubSubActive && client.isOpen) { + await client.disconnect(); + master.pubSubClient = undefined; + } + } +} +_a = RedisClusterSlots, _RedisClusterSlots_options = new WeakMap(), _RedisClusterSlots_Client = new WeakMap(), _RedisClusterSlots_emit = new WeakMap(), _RedisClusterSlots_isOpen = new WeakMap(), _RedisClusterSlots_runningRediscoverPromise = new WeakMap(), _RedisClusterSlots_randomNodeIterator = new WeakMap(), _RedisClusterSlots_instances = new WeakSet(), _RedisClusterSlots_discoverWithRootNodes = async function _RedisClusterSlots_discoverWithRootNodes() { + let start = Math.floor(Math.random() * __classPrivateFieldGet(this, _RedisClusterSlots_options, "f").rootNodes.length); + for (let i = start; i < __classPrivateFieldGet(this, _RedisClusterSlots_options, "f").rootNodes.length; i++) { + if (await __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_discover).call(this, __classPrivateFieldGet(this, _RedisClusterSlots_options, "f").rootNodes[i])) + return; + } + for (let i = 0; i < start; i++) { + if (await __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_discover).call(this, __classPrivateFieldGet(this, _RedisClusterSlots_options, "f").rootNodes[i])) + return; + } + throw new errors_1.RootNodesUnavailableError(); +}, _RedisClusterSlots_resetSlots = function _RedisClusterSlots_resetSlots() { + this.slots = new Array(__classPrivateFieldGet(_a, _a, "f", _RedisClusterSlots_SLOTS)); + this.shards = []; + this.masters = []; + this.replicas = []; + __classPrivateFieldSet(this, _RedisClusterSlots_randomNodeIterator, undefined, "f"); +}, _RedisClusterSlots_discover = async function _RedisClusterSlots_discover(rootNode) { + const addressesInUse = new Set(); + try { + const shards = await __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_getShards).call(this, rootNode), promises = [], eagerConnect = __classPrivateFieldGet(this, _RedisClusterSlots_options, "f").minimizeConnections !== true; + __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_resetSlots).call(this); + for (const { from, to, master, replicas } of shards) { + const shard = { + master: __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_initiateSlotNode).call(this, master, false, eagerConnect, addressesInUse, promises) + }; + if (__classPrivateFieldGet(this, _RedisClusterSlots_options, "f").useReplicas) { + shard.replicas = replicas.map(replica => __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_initiateSlotNode).call(this, replica, true, eagerConnect, addressesInUse, promises)); + } + this.shards.push(shard); + for (let i = from; i <= to; i++) { + this.slots[i] = shard; + } + } + if (this.pubSubNode && !addressesInUse.has(this.pubSubNode.address)) { + if (util_1.types.isPromise(this.pubSubNode.client)) { + promises.push(this.pubSubNode.client.then(client => client.disconnect())); + this.pubSubNode = undefined; + } + else { + promises.push(this.pubSubNode.client.disconnect()); + const channelsListeners = this.pubSubNode.client.getPubSubListeners(pub_sub_1.PubSubType.CHANNELS), patternsListeners = this.pubSubNode.client.getPubSubListeners(pub_sub_1.PubSubType.PATTERNS); + if (channelsListeners.size || patternsListeners.size) { + promises.push(__classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_initiatePubSubClient).call(this, { + [pub_sub_1.PubSubType.CHANNELS]: channelsListeners, + [pub_sub_1.PubSubType.PATTERNS]: patternsListeners + })); + } + } + } + for (const [address, node] of this.nodeByAddress.entries()) { + if (addressesInUse.has(address)) + continue; + if (node.client) { + promises.push(__classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_execOnNodeClient).call(this, node.client, client => client.disconnect())); + } + const { pubSubClient } = node; + if (pubSubClient) { + promises.push(__classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_execOnNodeClient).call(this, pubSubClient, client => client.disconnect())); + } + this.nodeByAddress.delete(address); + } + await Promise.all(promises); + return true; + } + catch (err) { + __classPrivateFieldGet(this, _RedisClusterSlots_emit, "f").call(this, 'error', err); + return false; + } +}, _RedisClusterSlots_getShards = async function _RedisClusterSlots_getShards(rootNode) { + const client = new (__classPrivateFieldGet(this, _RedisClusterSlots_Client, "f"))(__classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_clientOptionsDefaults).call(this, rootNode, true)); + client.on('error', err => __classPrivateFieldGet(this, _RedisClusterSlots_emit, "f").call(this, 'error', err)); + await client.connect(); + try { + // using `CLUSTER SLOTS` and not `CLUSTER SHARDS` to support older versions + return await client.clusterSlots(); + } + finally { + await client.disconnect(); + } +}, _RedisClusterSlots_getNodeAddress = function _RedisClusterSlots_getNodeAddress(address) { + switch (typeof __classPrivateFieldGet(this, _RedisClusterSlots_options, "f").nodeAddressMap) { + case 'object': + return __classPrivateFieldGet(this, _RedisClusterSlots_options, "f").nodeAddressMap[address]; + case 'function': + return __classPrivateFieldGet(this, _RedisClusterSlots_options, "f").nodeAddressMap(address); + } +}, _RedisClusterSlots_clientOptionsDefaults = function _RedisClusterSlots_clientOptionsDefaults(options, disableReconnect) { + let result; + if (__classPrivateFieldGet(this, _RedisClusterSlots_options, "f").defaults) { + let socket; + if (__classPrivateFieldGet(this, _RedisClusterSlots_options, "f").defaults.socket) { + socket = { + ...__classPrivateFieldGet(this, _RedisClusterSlots_options, "f").defaults.socket, + ...options?.socket + }; + } + else { + socket = options?.socket; + } + result = { + ...__classPrivateFieldGet(this, _RedisClusterSlots_options, "f").defaults, + ...options, + socket + }; + } + else { + result = options; + } + if (disableReconnect) { + result ?? (result = {}); + result.socket ?? (result.socket = {}); + result.socket.reconnectStrategy = false; + } + return result; +}, _RedisClusterSlots_initiateSlotNode = function _RedisClusterSlots_initiateSlotNode({ id, ip, port }, readonly, eagerConnent, addressesInUse, promises) { + const address = `${ip}:${port}`; + addressesInUse.add(address); + let node = this.nodeByAddress.get(address); + if (!node) { + node = { + id, + host: ip, + port, + address, + readonly, + client: undefined + }; + if (eagerConnent) { + promises.push(__classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_createNodeClient).call(this, node)); + } + this.nodeByAddress.set(address, node); + } + (readonly ? this.replicas : this.masters).push(node); + return node; +}, _RedisClusterSlots_createClient = async function _RedisClusterSlots_createClient(node, readonly = node.readonly) { + const client = new (__classPrivateFieldGet(this, _RedisClusterSlots_Client, "f"))(__classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_clientOptionsDefaults).call(this, { + socket: __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_getNodeAddress).call(this, node.address) ?? { + host: node.host, + port: node.port + }, + readonly + })); + client.on('error', err => __classPrivateFieldGet(this, _RedisClusterSlots_emit, "f").call(this, 'error', err)); + await client.connect(); + return client; +}, _RedisClusterSlots_createNodeClient = function _RedisClusterSlots_createNodeClient(node) { + const promise = __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_createClient).call(this, node) + .then(client => { + node.client = client; + return client; + }) + .catch(err => { + node.client = undefined; + throw err; + }); + node.client = promise; + return promise; +}, _RedisClusterSlots_rediscover = async function _RedisClusterSlots_rediscover(startWith) { + if (await __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_discover).call(this, startWith.options)) + return; + return __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_discoverWithRootNodes).call(this); +}, _RedisClusterSlots_destroy = async function _RedisClusterSlots_destroy(fn) { + __classPrivateFieldSet(this, _RedisClusterSlots_isOpen, false, "f"); + const promises = []; + for (const { master, replicas } of this.shards) { + if (master.client) { + promises.push(__classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_execOnNodeClient).call(this, master.client, fn)); + } + if (master.pubSubClient) { + promises.push(__classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_execOnNodeClient).call(this, master.pubSubClient, fn)); + } + if (replicas) { + for (const { client } of replicas) { + if (client) { + promises.push(__classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_execOnNodeClient).call(this, client, fn)); + } + } + } + } + if (this.pubSubNode) { + promises.push(__classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_execOnNodeClient).call(this, this.pubSubNode.client, fn)); + this.pubSubNode = undefined; + } + __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_resetSlots).call(this); + this.nodeByAddress.clear(); + await Promise.allSettled(promises); +}, _RedisClusterSlots_execOnNodeClient = function _RedisClusterSlots_execOnNodeClient(client, fn) { + return util_1.types.isPromise(client) ? + client.then(fn) : + fn(client); +}, _RedisClusterSlots_iterateAllNodes = function* _RedisClusterSlots_iterateAllNodes() { + let i = Math.floor(Math.random() * (this.masters.length + this.replicas.length)); + if (i < this.masters.length) { + do { + yield this.masters[i]; + } while (++i < this.masters.length); + for (const replica of this.replicas) { + yield replica; + } + } + else { + i -= this.masters.length; + do { + yield this.replicas[i]; + } while (++i < this.replicas.length); + } + while (true) { + for (const master of this.masters) { + yield master; + } + for (const replica of this.replicas) { + yield replica; + } + } +}, _RedisClusterSlots_slotNodesIterator = function* _RedisClusterSlots_slotNodesIterator(slot) { + let i = Math.floor(Math.random() * (1 + slot.replicas.length)); + if (i < slot.replicas.length) { + do { + yield slot.replicas[i]; + } while (++i < slot.replicas.length); + } + while (true) { + yield slot.master; + for (const replica of slot.replicas) { + yield replica; + } + } +}, _RedisClusterSlots_initiatePubSubClient = async function _RedisClusterSlots_initiatePubSubClient(toResubscribe) { + const index = Math.floor(Math.random() * (this.masters.length + this.replicas.length)), node = index < this.masters.length ? + this.masters[index] : + this.replicas[index - this.masters.length]; + this.pubSubNode = { + address: node.address, + client: __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_createClient).call(this, node, false) + .then(async (client) => { + if (toResubscribe) { + await Promise.all([ + client.extendPubSubListeners(pub_sub_1.PubSubType.CHANNELS, toResubscribe[pub_sub_1.PubSubType.CHANNELS]), + client.extendPubSubListeners(pub_sub_1.PubSubType.PATTERNS, toResubscribe[pub_sub_1.PubSubType.PATTERNS]) + ]); + } + this.pubSubNode.client = client; + return client; + }) + .catch(err => { + this.pubSubNode = undefined; + throw err; + }) + }; + return this.pubSubNode.client; +}, _RedisClusterSlots_initiateShardedPubSubClient = function _RedisClusterSlots_initiateShardedPubSubClient(master) { + const promise = __classPrivateFieldGet(this, _RedisClusterSlots_instances, "m", _RedisClusterSlots_createClient).call(this, master, false) + .then(client => { + client.on('server-sunsubscribe', async (channel, listeners) => { + try { + await this.rediscover(client); + const redirectTo = await this.getShardedPubSubClient(channel); + redirectTo.extendPubSubChannelListeners(pub_sub_1.PubSubType.SHARDED, channel, listeners); + } + catch (err) { + __classPrivateFieldGet(this, _RedisClusterSlots_emit, "f").call(this, 'sharded-shannel-moved-error', err, channel, listeners); + } + }); + master.pubSubClient = client; + return client; + }) + .catch(err => { + master.pubSubClient = undefined; + throw err; + }); + master.pubSubClient = promise; + return promise; +}; +_RedisClusterSlots_SLOTS = { value: 16384 }; +exports.default = RedisClusterSlots; diff --git a/node_modules/@redis/client/dist/lib/cluster/commands.d.ts b/node_modules/@redis/client/dist/lib/cluster/commands.d.ts new file mode 100644 index 0000000..a07f0d4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/cluster/commands.d.ts @@ -0,0 +1,669 @@ +import * as APPEND from '../commands/APPEND'; +import * as BITCOUNT from '../commands/BITCOUNT'; +import * as BITFIELD_RO from '../commands/BITFIELD_RO'; +import * as BITFIELD from '../commands/BITFIELD'; +import * as BITOP from '../commands/BITOP'; +import * as BITPOS from '../commands/BITPOS'; +import * as BLMOVE from '../commands/BLMOVE'; +import * as BLMPOP from '../commands/BLMPOP'; +import * as BLPOP from '../commands/BLPOP'; +import * as BRPOP from '../commands/BRPOP'; +import * as BRPOPLPUSH from '../commands/BRPOPLPUSH'; +import * as BZMPOP from '../commands/BZMPOP'; +import * as BZPOPMAX from '../commands/BZPOPMAX'; +import * as BZPOPMIN from '../commands/BZPOPMIN'; +import * as COPY from '../commands/COPY'; +import * as DECR from '../commands/DECR'; +import * as DECRBY from '../commands/DECRBY'; +import * as DEL from '../commands/DEL'; +import * as DUMP from '../commands/DUMP'; +import * as EVAL_RO from '../commands/EVAL_RO'; +import * as EVAL from '../commands/EVAL'; +import * as EVALSHA_RO from '../commands/EVALSHA_RO'; +import * as EVALSHA from '../commands/EVALSHA'; +import * as EXISTS from '../commands/EXISTS'; +import * as EXPIRE from '../commands/EXPIRE'; +import * as EXPIREAT from '../commands/EXPIREAT'; +import * as EXPIRETIME from '../commands/EXPIRETIME'; +import * as FCALL_RO from '../commands/FCALL_RO'; +import * as FCALL from '../commands/FCALL'; +import * as GEOADD from '../commands/GEOADD'; +import * as GEODIST from '../commands/GEODIST'; +import * as GEOHASH from '../commands/GEOHASH'; +import * as GEOPOS from '../commands/GEOPOS'; +import * as GEORADIUS_RO_WITH from '../commands/GEORADIUS_RO_WITH'; +import * as GEORADIUS_RO from '../commands/GEORADIUS_RO'; +import * as GEORADIUS_WITH from '../commands/GEORADIUS_WITH'; +import * as GEORADIUS from '../commands/GEORADIUS'; +import * as GEORADIUSBYMEMBER_RO_WITH from '../commands/GEORADIUSBYMEMBER_RO_WITH'; +import * as GEORADIUSBYMEMBER_RO from '../commands/GEORADIUSBYMEMBER_RO'; +import * as GEORADIUSBYMEMBER_WITH from '../commands/GEORADIUSBYMEMBER_WITH'; +import * as GEORADIUSBYMEMBER from '../commands/GEORADIUSBYMEMBER'; +import * as GEORADIUSBYMEMBERSTORE from '../commands/GEORADIUSBYMEMBERSTORE'; +import * as GEORADIUSSTORE from '../commands/GEORADIUSSTORE'; +import * as GEOSEARCH_WITH from '../commands/GEOSEARCH_WITH'; +import * as GEOSEARCH from '../commands/GEOSEARCH'; +import * as GEOSEARCHSTORE from '../commands/GEOSEARCHSTORE'; +import * as GET from '../commands/GET'; +import * as GETBIT from '../commands/GETBIT'; +import * as GETDEL from '../commands/GETDEL'; +import * as GETEX from '../commands/GETEX'; +import * as GETRANGE from '../commands/GETRANGE'; +import * as GETSET from '../commands/GETSET'; +import * as HDEL from '../commands/HDEL'; +import * as HEXISTS from '../commands/HEXISTS'; +import * as HEXPIRE from '../commands/HEXPIRE'; +import * as HEXPIREAT from '../commands/HEXPIREAT'; +import * as HEXPIRETIME from '../commands/HEXPIRETIME'; +import * as HGET from '../commands/HGET'; +import * as HGETALL from '../commands/HGETALL'; +import * as HINCRBY from '../commands/HINCRBY'; +import * as HINCRBYFLOAT from '../commands/HINCRBYFLOAT'; +import * as HKEYS from '../commands/HKEYS'; +import * as HLEN from '../commands/HLEN'; +import * as HMGET from '../commands/HMGET'; +import * as HPERSIST from '../commands/HPERSIST'; +import * as HPEXPIRE from '../commands/HPEXPIRE'; +import * as HPEXPIREAT from '../commands/HPEXPIREAT'; +import * as HPEXPIRETIME from '../commands/HPEXPIRETIME'; +import * as HPTTL from '../commands/HPTTL'; +import * as HRANDFIELD_COUNT_WITHVALUES from '../commands/HRANDFIELD_COUNT_WITHVALUES'; +import * as HRANDFIELD_COUNT from '../commands/HRANDFIELD_COUNT'; +import * as HRANDFIELD from '../commands/HRANDFIELD'; +import * as HSCAN from '../commands/HSCAN'; +import * as HSCAN_NOVALUES from '../commands/HSCAN_NOVALUES'; +import * as HSET from '../commands/HSET'; +import * as HSETNX from '../commands/HSETNX'; +import * as HSTRLEN from '../commands/HSTRLEN'; +import * as HTTL from '../commands/HTTL'; +import * as HVALS from '../commands/HVALS'; +import * as INCR from '../commands/INCR'; +import * as INCRBY from '../commands/INCRBY'; +import * as INCRBYFLOAT from '../commands/INCRBYFLOAT'; +import * as LCS_IDX_WITHMATCHLEN from '../commands/LCS_IDX_WITHMATCHLEN'; +import * as LCS_IDX from '../commands/LCS_IDX'; +import * as LCS_LEN from '../commands/LCS_LEN'; +import * as LCS from '../commands/LCS'; +import * as LINDEX from '../commands/LINDEX'; +import * as LINSERT from '../commands/LINSERT'; +import * as LLEN from '../commands/LLEN'; +import * as LMOVE from '../commands/LMOVE'; +import * as LMPOP from '../commands/LMPOP'; +import * as LPOP_COUNT from '../commands/LPOP_COUNT'; +import * as LPOP from '../commands/LPOP'; +import * as LPOS_COUNT from '../commands/LPOS_COUNT'; +import * as LPOS from '../commands/LPOS'; +import * as LPUSH from '../commands/LPUSH'; +import * as LPUSHX from '../commands/LPUSHX'; +import * as LRANGE from '../commands/LRANGE'; +import * as LREM from '../commands/LREM'; +import * as LSET from '../commands/LSET'; +import * as LTRIM from '../commands/LTRIM'; +import * as MGET from '../commands/MGET'; +import * as MIGRATE from '../commands/MIGRATE'; +import * as MSET from '../commands/MSET'; +import * as MSETNX from '../commands/MSETNX'; +import * as OBJECT_ENCODING from '../commands/OBJECT_ENCODING'; +import * as OBJECT_FREQ from '../commands/OBJECT_FREQ'; +import * as OBJECT_IDLETIME from '../commands/OBJECT_IDLETIME'; +import * as OBJECT_REFCOUNT from '../commands/OBJECT_REFCOUNT'; +import * as PERSIST from '../commands/PERSIST'; +import * as PEXPIRE from '../commands/PEXPIRE'; +import * as PEXPIREAT from '../commands/PEXPIREAT'; +import * as PEXPIRETIME from '../commands/PEXPIRETIME'; +import * as PFADD from '../commands/PFADD'; +import * as PFCOUNT from '../commands/PFCOUNT'; +import * as PFMERGE from '../commands/PFMERGE'; +import * as PSETEX from '../commands/PSETEX'; +import * as PTTL from '../commands/PTTL'; +import * as PUBLISH from '../commands/PUBLISH'; +import * as RENAME from '../commands/RENAME'; +import * as RENAMENX from '../commands/RENAMENX'; +import * as RESTORE from '../commands/RESTORE'; +import * as RPOP_COUNT from '../commands/RPOP_COUNT'; +import * as RPOP from '../commands/RPOP'; +import * as RPOPLPUSH from '../commands/RPOPLPUSH'; +import * as RPUSH from '../commands/RPUSH'; +import * as RPUSHX from '../commands/RPUSHX'; +import * as SADD from '../commands/SADD'; +import * as SCARD from '../commands/SCARD'; +import * as SDIFF from '../commands/SDIFF'; +import * as SDIFFSTORE from '../commands/SDIFFSTORE'; +import * as SET from '../commands/SET'; +import * as SETBIT from '../commands/SETBIT'; +import * as SETEX from '../commands/SETEX'; +import * as SETNX from '../commands/SETNX'; +import * as SETRANGE from '../commands/SETRANGE'; +import * as SINTER from '../commands/SINTER'; +import * as SINTERCARD from '../commands/SINTERCARD'; +import * as SINTERSTORE from '../commands/SINTERSTORE'; +import * as SISMEMBER from '../commands/SISMEMBER'; +import * as SMEMBERS from '../commands/SMEMBERS'; +import * as SMISMEMBER from '../commands/SMISMEMBER'; +import * as SMOVE from '../commands/SMOVE'; +import * as SORT_RO from '../commands/SORT_RO'; +import * as SORT_STORE from '../commands/SORT_STORE'; +import * as SORT from '../commands/SORT'; +import * as SPOP from '../commands/SPOP'; +import * as SPUBLISH from '../commands/SPUBLISH'; +import * as SRANDMEMBER_COUNT from '../commands/SRANDMEMBER_COUNT'; +import * as SRANDMEMBER from '../commands/SRANDMEMBER'; +import * as SREM from '../commands/SREM'; +import * as SSCAN from '../commands/SSCAN'; +import * as STRLEN from '../commands/STRLEN'; +import * as SUNION from '../commands/SUNION'; +import * as SUNIONSTORE from '../commands/SUNIONSTORE'; +import * as TOUCH from '../commands/TOUCH'; +import * as TTL from '../commands/TTL'; +import * as TYPE from '../commands/TYPE'; +import * as UNLINK from '../commands/UNLINK'; +import * as WATCH from '../commands/WATCH'; +import * as XACK from '../commands/XACK'; +import * as XADD from '../commands/XADD'; +import * as XAUTOCLAIM_JUSTID from '../commands/XAUTOCLAIM_JUSTID'; +import * as XAUTOCLAIM from '../commands/XAUTOCLAIM'; +import * as XCLAIM_JUSTID from '../commands/XCLAIM_JUSTID'; +import * as XCLAIM from '../commands/XCLAIM'; +import * as XDEL from '../commands/XDEL'; +import * as XGROUP_CREATE from '../commands/XGROUP_CREATE'; +import * as XGROUP_CREATECONSUMER from '../commands/XGROUP_CREATECONSUMER'; +import * as XGROUP_DELCONSUMER from '../commands/XGROUP_DELCONSUMER'; +import * as XGROUP_DESTROY from '../commands/XGROUP_DESTROY'; +import * as XGROUP_SETID from '../commands/XGROUP_SETID'; +import * as XINFO_CONSUMERS from '../commands/XINFO_CONSUMERS'; +import * as XINFO_GROUPS from '../commands/XINFO_GROUPS'; +import * as XINFO_STREAM from '../commands/XINFO_STREAM'; +import * as XLEN from '../commands/XLEN'; +import * as XPENDING_RANGE from '../commands/XPENDING_RANGE'; +import * as XPENDING from '../commands/XPENDING'; +import * as XRANGE from '../commands/XRANGE'; +import * as XREAD from '../commands/XREAD'; +import * as XREADGROUP from '../commands/XREADGROUP'; +import * as XREVRANGE from '../commands/XREVRANGE'; +import * as XSETID from '../commands/XSETID'; +import * as XTRIM from '../commands/XTRIM'; +import * as ZADD from '../commands/ZADD'; +import * as ZCARD from '../commands/ZCARD'; +import * as ZCOUNT from '../commands/ZCOUNT'; +import * as ZDIFF_WITHSCORES from '../commands/ZDIFF_WITHSCORES'; +import * as ZDIFF from '../commands/ZDIFF'; +import * as ZDIFFSTORE from '../commands/ZDIFFSTORE'; +import * as ZINCRBY from '../commands/ZINCRBY'; +import * as ZINTER_WITHSCORES from '../commands/ZINTER_WITHSCORES'; +import * as ZINTER from '../commands/ZINTER'; +import * as ZINTERCARD from '../commands/ZINTERCARD'; +import * as ZINTERSTORE from '../commands/ZINTERSTORE'; +import * as ZLEXCOUNT from '../commands/ZLEXCOUNT'; +import * as ZMPOP from '../commands/ZMPOP'; +import * as ZMSCORE from '../commands/ZMSCORE'; +import * as ZPOPMAX_COUNT from '../commands/ZPOPMAX_COUNT'; +import * as ZPOPMAX from '../commands/ZPOPMAX'; +import * as ZPOPMIN_COUNT from '../commands/ZPOPMIN_COUNT'; +import * as ZPOPMIN from '../commands/ZPOPMIN'; +import * as ZRANDMEMBER_COUNT_WITHSCORES from '../commands/ZRANDMEMBER_COUNT_WITHSCORES'; +import * as ZRANDMEMBER_COUNT from '../commands/ZRANDMEMBER_COUNT'; +import * as ZRANDMEMBER from '../commands/ZRANDMEMBER'; +import * as ZRANGE_WITHSCORES from '../commands/ZRANGE_WITHSCORES'; +import * as ZRANGE from '../commands/ZRANGE'; +import * as ZRANGEBYLEX from '../commands/ZRANGEBYLEX'; +import * as ZRANGEBYSCORE_WITHSCORES from '../commands/ZRANGEBYSCORE_WITHSCORES'; +import * as ZRANGEBYSCORE from '../commands/ZRANGEBYSCORE'; +import * as ZRANGESTORE from '../commands/ZRANGESTORE'; +import * as ZRANK from '../commands/ZRANK'; +import * as ZREM from '../commands/ZREM'; +import * as ZREMRANGEBYLEX from '../commands/ZREMRANGEBYLEX'; +import * as ZREMRANGEBYRANK from '../commands/ZREMRANGEBYRANK'; +import * as ZREMRANGEBYSCORE from '../commands/ZREMRANGEBYSCORE'; +import * as ZREVRANK from '../commands/ZREVRANK'; +import * as ZSCAN from '../commands/ZSCAN'; +import * as ZSCORE from '../commands/ZSCORE'; +import * as ZUNION_WITHSCORES from '../commands/ZUNION_WITHSCORES'; +import * as ZUNION from '../commands/ZUNION'; +import * as ZUNIONSTORE from '../commands/ZUNIONSTORE'; +declare const _default: { + APPEND: typeof APPEND; + append: typeof APPEND; + BITCOUNT: typeof BITCOUNT; + bitCount: typeof BITCOUNT; + BITFIELD_RO: typeof BITFIELD_RO; + bitFieldRo: typeof BITFIELD_RO; + BITFIELD: typeof BITFIELD; + bitField: typeof BITFIELD; + BITOP: typeof BITOP; + bitOp: typeof BITOP; + BITPOS: typeof BITPOS; + bitPos: typeof BITPOS; + BLMOVE: typeof BLMOVE; + blMove: typeof BLMOVE; + BLMPOP: typeof BLMPOP; + blmPop: typeof BLMPOP; + BLPOP: typeof BLPOP; + blPop: typeof BLPOP; + BRPOP: typeof BRPOP; + brPop: typeof BRPOP; + BRPOPLPUSH: typeof BRPOPLPUSH; + brPopLPush: typeof BRPOPLPUSH; + BZMPOP: typeof BZMPOP; + bzmPop: typeof BZMPOP; + BZPOPMAX: typeof BZPOPMAX; + bzPopMax: typeof BZPOPMAX; + BZPOPMIN: typeof BZPOPMIN; + bzPopMin: typeof BZPOPMIN; + COPY: typeof COPY; + copy: typeof COPY; + DECR: typeof DECR; + decr: typeof DECR; + DECRBY: typeof DECRBY; + decrBy: typeof DECRBY; + DEL: typeof DEL; + del: typeof DEL; + DUMP: typeof DUMP; + dump: typeof DUMP; + EVAL_RO: typeof EVAL_RO; + evalRo: typeof EVAL_RO; + EVAL: typeof EVAL; + eval: typeof EVAL; + EVALSHA: typeof EVALSHA; + evalSha: typeof EVALSHA; + EVALSHA_RO: typeof EVALSHA_RO; + evalShaRo: typeof EVALSHA_RO; + EXISTS: typeof EXISTS; + exists: typeof EXISTS; + EXPIRE: typeof EXPIRE; + expire: typeof EXPIRE; + EXPIREAT: typeof EXPIREAT; + expireAt: typeof EXPIREAT; + EXPIRETIME: typeof EXPIRETIME; + expireTime: typeof EXPIRETIME; + FCALL_RO: typeof FCALL_RO; + fCallRo: typeof FCALL_RO; + FCALL: typeof FCALL; + fCall: typeof FCALL; + GEOADD: typeof GEOADD; + geoAdd: typeof GEOADD; + GEODIST: typeof GEODIST; + geoDist: typeof GEODIST; + GEOHASH: typeof GEOHASH; + geoHash: typeof GEOHASH; + GEOPOS: typeof GEOPOS; + geoPos: typeof GEOPOS; + GEORADIUS_RO_WITH: typeof GEORADIUS_RO_WITH; + geoRadiusRoWith: typeof GEORADIUS_RO_WITH; + GEORADIUS_RO: typeof GEORADIUS_RO; + geoRadiusRo: typeof GEORADIUS_RO; + GEORADIUS_WITH: typeof GEORADIUS_WITH; + geoRadiusWith: typeof GEORADIUS_WITH; + GEORADIUS: typeof GEORADIUS; + geoRadius: typeof GEORADIUS; + GEORADIUSBYMEMBER_RO_WITH: typeof GEORADIUSBYMEMBER_RO_WITH; + geoRadiusByMemberRoWith: typeof GEORADIUSBYMEMBER_RO_WITH; + GEORADIUSBYMEMBER_RO: typeof GEORADIUSBYMEMBER_RO; + geoRadiusByMemberRo: typeof GEORADIUSBYMEMBER_RO; + GEORADIUSBYMEMBER_WITH: typeof GEORADIUSBYMEMBER_WITH; + geoRadiusByMemberWith: typeof GEORADIUSBYMEMBER_WITH; + GEORADIUSBYMEMBER: typeof GEORADIUSBYMEMBER; + geoRadiusByMember: typeof GEORADIUSBYMEMBER; + GEORADIUSBYMEMBERSTORE: typeof GEORADIUSBYMEMBERSTORE; + geoRadiusByMemberStore: typeof GEORADIUSBYMEMBERSTORE; + GEORADIUSSTORE: typeof GEORADIUSSTORE; + geoRadiusStore: typeof GEORADIUSSTORE; + GEOSEARCH_WITH: typeof GEOSEARCH_WITH; + geoSearchWith: typeof GEOSEARCH_WITH; + GEOSEARCH: typeof GEOSEARCH; + geoSearch: typeof GEOSEARCH; + GEOSEARCHSTORE: typeof GEOSEARCHSTORE; + geoSearchStore: typeof GEOSEARCHSTORE; + GET: typeof GET; + get: typeof GET; + GETBIT: typeof GETBIT; + getBit: typeof GETBIT; + GETDEL: typeof GETDEL; + getDel: typeof GETDEL; + GETEX: typeof GETEX; + getEx: typeof GETEX; + GETRANGE: typeof GETRANGE; + getRange: typeof GETRANGE; + GETSET: typeof GETSET; + getSet: typeof GETSET; + HDEL: typeof HDEL; + hDel: typeof HDEL; + HEXISTS: typeof HEXISTS; + hExists: typeof HEXISTS; + HEXPIRE: typeof HEXPIRE; + hExpire: typeof HEXPIRE; + HEXPIREAT: typeof HEXPIREAT; + hExpireAt: typeof HEXPIREAT; + HEXPIRETIME: typeof HEXPIRETIME; + hExpireTime: typeof HEXPIRETIME; + HGET: typeof HGET; + hGet: typeof HGET; + HGETALL: typeof HGETALL; + hGetAll: typeof HGETALL; + HINCRBY: typeof HINCRBY; + hIncrBy: typeof HINCRBY; + HINCRBYFLOAT: typeof HINCRBYFLOAT; + hIncrByFloat: typeof HINCRBYFLOAT; + HKEYS: typeof HKEYS; + hKeys: typeof HKEYS; + HLEN: typeof HLEN; + hLen: typeof HLEN; + HMGET: typeof HMGET; + hmGet: typeof HMGET; + HPERSIST: typeof HPERSIST; + hPersist: typeof HPERSIST; + HPEXPIRE: typeof HPEXPIRE; + hpExpire: typeof HPEXPIRE; + HPEXPIREAT: typeof HPEXPIREAT; + hpExpireAt: typeof HPEXPIREAT; + HPEXPIRETIME: typeof HPEXPIRETIME; + hpExpireTime: typeof HPEXPIRETIME; + HPTTL: typeof HPTTL; + hpTTL: typeof HPTTL; + HRANDFIELD_COUNT_WITHVALUES: typeof HRANDFIELD_COUNT_WITHVALUES; + hRandFieldCountWithValues: typeof HRANDFIELD_COUNT_WITHVALUES; + HRANDFIELD_COUNT: typeof HRANDFIELD_COUNT; + hRandFieldCount: typeof HRANDFIELD_COUNT; + HRANDFIELD: typeof HRANDFIELD; + hRandField: typeof HRANDFIELD; + HSCAN: typeof HSCAN; + hScan: typeof HSCAN; + HSCAN_NOVALUES: typeof HSCAN_NOVALUES; + hScanNoValues: typeof HSCAN_NOVALUES; + HSET: typeof HSET; + hSet: typeof HSET; + HSETNX: typeof HSETNX; + hSetNX: typeof HSETNX; + HSTRLEN: typeof HSTRLEN; + hStrLen: typeof HSTRLEN; + HTTL: typeof HTTL; + hTTL: typeof HTTL; + HVALS: typeof HVALS; + hVals: typeof HVALS; + INCR: typeof INCR; + incr: typeof INCR; + INCRBY: typeof INCRBY; + incrBy: typeof INCRBY; + INCRBYFLOAT: typeof INCRBYFLOAT; + incrByFloat: typeof INCRBYFLOAT; + LCS_IDX_WITHMATCHLEN: typeof LCS_IDX_WITHMATCHLEN; + lcsIdxWithMatchLen: typeof LCS_IDX_WITHMATCHLEN; + LCS_IDX: typeof LCS_IDX; + lcsIdx: typeof LCS_IDX; + LCS_LEN: typeof LCS_LEN; + lcsLen: typeof LCS_LEN; + LCS: typeof LCS; + lcs: typeof LCS; + LINDEX: typeof LINDEX; + lIndex: typeof LINDEX; + LINSERT: typeof LINSERT; + lInsert: typeof LINSERT; + LLEN: typeof LLEN; + lLen: typeof LLEN; + LMOVE: typeof LMOVE; + lMove: typeof LMOVE; + LMPOP: typeof LMPOP; + lmPop: typeof LMPOP; + LPOP_COUNT: typeof LPOP_COUNT; + lPopCount: typeof LPOP_COUNT; + LPOP: typeof LPOP; + lPop: typeof LPOP; + LPOS_COUNT: typeof LPOS_COUNT; + lPosCount: typeof LPOS_COUNT; + LPOS: typeof LPOS; + lPos: typeof LPOS; + LPUSH: typeof LPUSH; + lPush: typeof LPUSH; + LPUSHX: typeof LPUSHX; + lPushX: typeof LPUSHX; + LRANGE: typeof LRANGE; + lRange: typeof LRANGE; + LREM: typeof LREM; + lRem: typeof LREM; + LSET: typeof LSET; + lSet: typeof LSET; + LTRIM: typeof LTRIM; + lTrim: typeof LTRIM; + MGET: typeof MGET; + mGet: typeof MGET; + MIGRATE: typeof MIGRATE; + migrate: typeof MIGRATE; + MSET: typeof MSET; + mSet: typeof MSET; + MSETNX: typeof MSETNX; + mSetNX: typeof MSETNX; + OBJECT_ENCODING: typeof OBJECT_ENCODING; + objectEncoding: typeof OBJECT_ENCODING; + OBJECT_FREQ: typeof OBJECT_FREQ; + objectFreq: typeof OBJECT_FREQ; + OBJECT_IDLETIME: typeof OBJECT_IDLETIME; + objectIdleTime: typeof OBJECT_IDLETIME; + OBJECT_REFCOUNT: typeof OBJECT_REFCOUNT; + objectRefCount: typeof OBJECT_REFCOUNT; + PERSIST: typeof PERSIST; + persist: typeof PERSIST; + PEXPIRE: typeof PEXPIRE; + pExpire: typeof PEXPIRE; + PEXPIREAT: typeof PEXPIREAT; + pExpireAt: typeof PEXPIREAT; + PEXPIRETIME: typeof PEXPIRETIME; + pExpireTime: typeof PEXPIRETIME; + PFADD: typeof PFADD; + pfAdd: typeof PFADD; + PFCOUNT: typeof PFCOUNT; + pfCount: typeof PFCOUNT; + PFMERGE: typeof PFMERGE; + pfMerge: typeof PFMERGE; + PSETEX: typeof PSETEX; + pSetEx: typeof PSETEX; + PTTL: typeof PTTL; + pTTL: typeof PTTL; + PUBLISH: typeof PUBLISH; + publish: typeof PUBLISH; + RENAME: typeof RENAME; + rename: typeof RENAME; + RENAMENX: typeof RENAMENX; + renameNX: typeof RENAMENX; + RESTORE: typeof RESTORE; + restore: typeof RESTORE; + RPOP_COUNT: typeof RPOP_COUNT; + rPopCount: typeof RPOP_COUNT; + RPOP: typeof RPOP; + rPop: typeof RPOP; + RPOPLPUSH: typeof RPOPLPUSH; + rPopLPush: typeof RPOPLPUSH; + RPUSH: typeof RPUSH; + rPush: typeof RPUSH; + RPUSHX: typeof RPUSHX; + rPushX: typeof RPUSHX; + SADD: typeof SADD; + sAdd: typeof SADD; + SCARD: typeof SCARD; + sCard: typeof SCARD; + SDIFF: typeof SDIFF; + sDiff: typeof SDIFF; + SDIFFSTORE: typeof SDIFFSTORE; + sDiffStore: typeof SDIFFSTORE; + SINTER: typeof SINTER; + sInter: typeof SINTER; + SINTERCARD: typeof SINTERCARD; + sInterCard: typeof SINTERCARD; + SINTERSTORE: typeof SINTERSTORE; + sInterStore: typeof SINTERSTORE; + SET: typeof SET; + set: typeof SET; + SETBIT: typeof SETBIT; + setBit: typeof SETBIT; + SETEX: typeof SETEX; + setEx: typeof SETEX; + SETNX: typeof SETNX; + setNX: typeof SETNX; + SETRANGE: typeof SETRANGE; + setRange: typeof SETRANGE; + SISMEMBER: typeof SISMEMBER; + sIsMember: typeof SISMEMBER; + SMEMBERS: typeof SMEMBERS; + sMembers: typeof SMEMBERS; + SMISMEMBER: typeof SMISMEMBER; + smIsMember: typeof SMISMEMBER; + SMOVE: typeof SMOVE; + sMove: typeof SMOVE; + SORT_RO: typeof SORT_RO; + sortRo: typeof SORT_RO; + SORT_STORE: typeof SORT_STORE; + sortStore: typeof SORT_STORE; + SORT: typeof SORT; + sort: typeof SORT; + SPOP: typeof SPOP; + sPop: typeof SPOP; + SPUBLISH: typeof SPUBLISH; + sPublish: typeof SPUBLISH; + SRANDMEMBER_COUNT: typeof SRANDMEMBER_COUNT; + sRandMemberCount: typeof SRANDMEMBER_COUNT; + SRANDMEMBER: typeof SRANDMEMBER; + sRandMember: typeof SRANDMEMBER; + SREM: typeof SREM; + sRem: typeof SREM; + SSCAN: typeof SSCAN; + sScan: typeof SSCAN; + STRLEN: typeof STRLEN; + strLen: typeof STRLEN; + SUNION: typeof SUNION; + sUnion: typeof SUNION; + SUNIONSTORE: typeof SUNIONSTORE; + sUnionStore: typeof SUNIONSTORE; + TOUCH: typeof TOUCH; + touch: typeof TOUCH; + TTL: typeof TTL; + ttl: typeof TTL; + TYPE: typeof TYPE; + type: typeof TYPE; + UNLINK: typeof UNLINK; + unlink: typeof UNLINK; + WATCH: typeof WATCH; + watch: typeof WATCH; + XACK: typeof XACK; + xAck: typeof XACK; + XADD: typeof XADD; + xAdd: typeof XADD; + XAUTOCLAIM_JUSTID: typeof XAUTOCLAIM_JUSTID; + xAutoClaimJustId: typeof XAUTOCLAIM_JUSTID; + XAUTOCLAIM: typeof XAUTOCLAIM; + xAutoClaim: typeof XAUTOCLAIM; + XCLAIM: typeof XCLAIM; + xClaim: typeof XCLAIM; + XCLAIM_JUSTID: typeof XCLAIM_JUSTID; + xClaimJustId: typeof XCLAIM_JUSTID; + XDEL: typeof XDEL; + xDel: typeof XDEL; + XGROUP_CREATE: typeof XGROUP_CREATE; + xGroupCreate: typeof XGROUP_CREATE; + XGROUP_CREATECONSUMER: typeof XGROUP_CREATECONSUMER; + xGroupCreateConsumer: typeof XGROUP_CREATECONSUMER; + XGROUP_DELCONSUMER: typeof XGROUP_DELCONSUMER; + xGroupDelConsumer: typeof XGROUP_DELCONSUMER; + XGROUP_DESTROY: typeof XGROUP_DESTROY; + xGroupDestroy: typeof XGROUP_DESTROY; + XGROUP_SETID: typeof XGROUP_SETID; + xGroupSetId: typeof XGROUP_SETID; + XINFO_CONSUMERS: typeof XINFO_CONSUMERS; + xInfoConsumers: typeof XINFO_CONSUMERS; + XINFO_GROUPS: typeof XINFO_GROUPS; + xInfoGroups: typeof XINFO_GROUPS; + XINFO_STREAM: typeof XINFO_STREAM; + xInfoStream: typeof XINFO_STREAM; + XLEN: typeof XLEN; + xLen: typeof XLEN; + XPENDING_RANGE: typeof XPENDING_RANGE; + xPendingRange: typeof XPENDING_RANGE; + XPENDING: typeof XPENDING; + xPending: typeof XPENDING; + XRANGE: typeof XRANGE; + xRange: typeof XRANGE; + XREAD: typeof XREAD; + xRead: typeof XREAD; + XREADGROUP: typeof XREADGROUP; + xReadGroup: typeof XREADGROUP; + XREVRANGE: typeof XREVRANGE; + xRevRange: typeof XREVRANGE; + XSETID: typeof XSETID; + xSetId: typeof XSETID; + XTRIM: typeof XTRIM; + xTrim: typeof XTRIM; + ZADD: typeof ZADD; + zAdd: typeof ZADD; + ZCARD: typeof ZCARD; + zCard: typeof ZCARD; + ZCOUNT: typeof ZCOUNT; + zCount: typeof ZCOUNT; + ZDIFF_WITHSCORES: typeof ZDIFF_WITHSCORES; + zDiffWithScores: typeof ZDIFF_WITHSCORES; + ZDIFF: typeof ZDIFF; + zDiff: typeof ZDIFF; + ZDIFFSTORE: typeof ZDIFFSTORE; + zDiffStore: typeof ZDIFFSTORE; + ZINCRBY: typeof ZINCRBY; + zIncrBy: typeof ZINCRBY; + ZINTER_WITHSCORES: typeof ZINTER_WITHSCORES; + zInterWithScores: typeof ZINTER_WITHSCORES; + ZINTER: typeof ZINTER; + zInter: typeof ZINTER; + ZINTERCARD: typeof ZINTERCARD; + zInterCard: typeof ZINTERCARD; + ZINTERSTORE: typeof ZINTERSTORE; + zInterStore: typeof ZINTERSTORE; + ZLEXCOUNT: typeof ZLEXCOUNT; + zLexCount: typeof ZLEXCOUNT; + ZMPOP: typeof ZMPOP; + zmPop: typeof ZMPOP; + ZMSCORE: typeof ZMSCORE; + zmScore: typeof ZMSCORE; + ZPOPMAX_COUNT: typeof ZPOPMAX_COUNT; + zPopMaxCount: typeof ZPOPMAX_COUNT; + ZPOPMAX: typeof ZPOPMAX; + zPopMax: typeof ZPOPMAX; + ZPOPMIN_COUNT: typeof ZPOPMIN_COUNT; + zPopMinCount: typeof ZPOPMIN_COUNT; + ZPOPMIN: typeof ZPOPMIN; + zPopMin: typeof ZPOPMIN; + ZRANDMEMBER_COUNT_WITHSCORES: typeof ZRANDMEMBER_COUNT_WITHSCORES; + zRandMemberCountWithScores: typeof ZRANDMEMBER_COUNT_WITHSCORES; + ZRANDMEMBER_COUNT: typeof ZRANDMEMBER_COUNT; + zRandMemberCount: typeof ZRANDMEMBER_COUNT; + ZRANDMEMBER: typeof ZRANDMEMBER; + zRandMember: typeof ZRANDMEMBER; + ZRANGE_WITHSCORES: typeof ZRANGE_WITHSCORES; + zRangeWithScores: typeof ZRANGE_WITHSCORES; + ZRANGE: typeof ZRANGE; + zRange: typeof ZRANGE; + ZRANGEBYLEX: typeof ZRANGEBYLEX; + zRangeByLex: typeof ZRANGEBYLEX; + ZRANGEBYSCORE_WITHSCORES: typeof ZRANGEBYSCORE_WITHSCORES; + zRangeByScoreWithScores: typeof ZRANGEBYSCORE_WITHSCORES; + ZRANGEBYSCORE: typeof ZRANGEBYSCORE; + zRangeByScore: typeof ZRANGEBYSCORE; + ZRANGESTORE: typeof ZRANGESTORE; + zRangeStore: typeof ZRANGESTORE; + ZRANK: typeof ZRANK; + zRank: typeof ZRANK; + ZREM: typeof ZREM; + zRem: typeof ZREM; + ZREMRANGEBYLEX: typeof ZREMRANGEBYLEX; + zRemRangeByLex: typeof ZREMRANGEBYLEX; + ZREMRANGEBYRANK: typeof ZREMRANGEBYRANK; + zRemRangeByRank: typeof ZREMRANGEBYRANK; + ZREMRANGEBYSCORE: typeof ZREMRANGEBYSCORE; + zRemRangeByScore: typeof ZREMRANGEBYSCORE; + ZREVRANK: typeof ZREVRANK; + zRevRank: typeof ZREVRANK; + ZSCAN: typeof ZSCAN; + zScan: typeof ZSCAN; + ZSCORE: typeof ZSCORE; + zScore: typeof ZSCORE; + ZUNION_WITHSCORES: typeof ZUNION_WITHSCORES; + zUnionWithScores: typeof ZUNION_WITHSCORES; + ZUNION: typeof ZUNION; + zUnion: typeof ZUNION; + ZUNIONSTORE: typeof ZUNIONSTORE; + zUnionStore: typeof ZUNIONSTORE; +}; +export default _default; diff --git a/node_modules/@redis/client/dist/lib/cluster/commands.js b/node_modules/@redis/client/dist/lib/cluster/commands.js new file mode 100644 index 0000000..76719ed --- /dev/null +++ b/node_modules/@redis/client/dist/lib/cluster/commands.js @@ -0,0 +1,670 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const APPEND = require("../commands/APPEND"); +const BITCOUNT = require("../commands/BITCOUNT"); +const BITFIELD_RO = require("../commands/BITFIELD_RO"); +const BITFIELD = require("../commands/BITFIELD"); +const BITOP = require("../commands/BITOP"); +const BITPOS = require("../commands/BITPOS"); +const BLMOVE = require("../commands/BLMOVE"); +const BLMPOP = require("../commands/BLMPOP"); +const BLPOP = require("../commands/BLPOP"); +const BRPOP = require("../commands/BRPOP"); +const BRPOPLPUSH = require("../commands/BRPOPLPUSH"); +const BZMPOP = require("../commands/BZMPOP"); +const BZPOPMAX = require("../commands/BZPOPMAX"); +const BZPOPMIN = require("../commands/BZPOPMIN"); +const COPY = require("../commands/COPY"); +const DECR = require("../commands/DECR"); +const DECRBY = require("../commands/DECRBY"); +const DEL = require("../commands/DEL"); +const DUMP = require("../commands/DUMP"); +const EVAL_RO = require("../commands/EVAL_RO"); +const EVAL = require("../commands/EVAL"); +const EVALSHA_RO = require("../commands/EVALSHA_RO"); +const EVALSHA = require("../commands/EVALSHA"); +const EXISTS = require("../commands/EXISTS"); +const EXPIRE = require("../commands/EXPIRE"); +const EXPIREAT = require("../commands/EXPIREAT"); +const EXPIRETIME = require("../commands/EXPIRETIME"); +const FCALL_RO = require("../commands/FCALL_RO"); +const FCALL = require("../commands/FCALL"); +const GEOADD = require("../commands/GEOADD"); +const GEODIST = require("../commands/GEODIST"); +const GEOHASH = require("../commands/GEOHASH"); +const GEOPOS = require("../commands/GEOPOS"); +const GEORADIUS_RO_WITH = require("../commands/GEORADIUS_RO_WITH"); +const GEORADIUS_RO = require("../commands/GEORADIUS_RO"); +const GEORADIUS_WITH = require("../commands/GEORADIUS_WITH"); +const GEORADIUS = require("../commands/GEORADIUS"); +const GEORADIUSBYMEMBER_RO_WITH = require("../commands/GEORADIUSBYMEMBER_RO_WITH"); +const GEORADIUSBYMEMBER_RO = require("../commands/GEORADIUSBYMEMBER_RO"); +const GEORADIUSBYMEMBER_WITH = require("../commands/GEORADIUSBYMEMBER_WITH"); +const GEORADIUSBYMEMBER = require("../commands/GEORADIUSBYMEMBER"); +const GEORADIUSBYMEMBERSTORE = require("../commands/GEORADIUSBYMEMBERSTORE"); +const GEORADIUSSTORE = require("../commands/GEORADIUSSTORE"); +const GEOSEARCH_WITH = require("../commands/GEOSEARCH_WITH"); +const GEOSEARCH = require("../commands/GEOSEARCH"); +const GEOSEARCHSTORE = require("../commands/GEOSEARCHSTORE"); +const GET = require("../commands/GET"); +const GETBIT = require("../commands/GETBIT"); +const GETDEL = require("../commands/GETDEL"); +const GETEX = require("../commands/GETEX"); +const GETRANGE = require("../commands/GETRANGE"); +const GETSET = require("../commands/GETSET"); +const HDEL = require("../commands/HDEL"); +const HEXISTS = require("../commands/HEXISTS"); +const HEXPIRE = require("../commands/HEXPIRE"); +const HEXPIREAT = require("../commands/HEXPIREAT"); +const HEXPIRETIME = require("../commands/HEXPIRETIME"); +const HGET = require("../commands/HGET"); +const HGETALL = require("../commands/HGETALL"); +const HINCRBY = require("../commands/HINCRBY"); +const HINCRBYFLOAT = require("../commands/HINCRBYFLOAT"); +const HKEYS = require("../commands/HKEYS"); +const HLEN = require("../commands/HLEN"); +const HMGET = require("../commands/HMGET"); +const HPERSIST = require("../commands/HPERSIST"); +const HPEXPIRE = require("../commands/HPEXPIRE"); +const HPEXPIREAT = require("../commands/HPEXPIREAT"); +const HPEXPIRETIME = require("../commands/HPEXPIRETIME"); +const HPTTL = require("../commands/HPTTL"); +const HRANDFIELD_COUNT_WITHVALUES = require("../commands/HRANDFIELD_COUNT_WITHVALUES"); +const HRANDFIELD_COUNT = require("../commands/HRANDFIELD_COUNT"); +const HRANDFIELD = require("../commands/HRANDFIELD"); +const HSCAN = require("../commands/HSCAN"); +const HSCAN_NOVALUES = require("../commands/HSCAN_NOVALUES"); +const HSET = require("../commands/HSET"); +const HSETNX = require("../commands/HSETNX"); +const HSTRLEN = require("../commands/HSTRLEN"); +const HTTL = require("../commands/HTTL"); +const HVALS = require("../commands/HVALS"); +const INCR = require("../commands/INCR"); +const INCRBY = require("../commands/INCRBY"); +const INCRBYFLOAT = require("../commands/INCRBYFLOAT"); +const LCS_IDX_WITHMATCHLEN = require("../commands/LCS_IDX_WITHMATCHLEN"); +const LCS_IDX = require("../commands/LCS_IDX"); +const LCS_LEN = require("../commands/LCS_LEN"); +const LCS = require("../commands/LCS"); +const LINDEX = require("../commands/LINDEX"); +const LINSERT = require("../commands/LINSERT"); +const LLEN = require("../commands/LLEN"); +const LMOVE = require("../commands/LMOVE"); +const LMPOP = require("../commands/LMPOP"); +const LPOP_COUNT = require("../commands/LPOP_COUNT"); +const LPOP = require("../commands/LPOP"); +const LPOS_COUNT = require("../commands/LPOS_COUNT"); +const LPOS = require("../commands/LPOS"); +const LPUSH = require("../commands/LPUSH"); +const LPUSHX = require("../commands/LPUSHX"); +const LRANGE = require("../commands/LRANGE"); +const LREM = require("../commands/LREM"); +const LSET = require("../commands/LSET"); +const LTRIM = require("../commands/LTRIM"); +const MGET = require("../commands/MGET"); +const MIGRATE = require("../commands/MIGRATE"); +const MSET = require("../commands/MSET"); +const MSETNX = require("../commands/MSETNX"); +const OBJECT_ENCODING = require("../commands/OBJECT_ENCODING"); +const OBJECT_FREQ = require("../commands/OBJECT_FREQ"); +const OBJECT_IDLETIME = require("../commands/OBJECT_IDLETIME"); +const OBJECT_REFCOUNT = require("../commands/OBJECT_REFCOUNT"); +const PERSIST = require("../commands/PERSIST"); +const PEXPIRE = require("../commands/PEXPIRE"); +const PEXPIREAT = require("../commands/PEXPIREAT"); +const PEXPIRETIME = require("../commands/PEXPIRETIME"); +const PFADD = require("../commands/PFADD"); +const PFCOUNT = require("../commands/PFCOUNT"); +const PFMERGE = require("../commands/PFMERGE"); +const PSETEX = require("../commands/PSETEX"); +const PTTL = require("../commands/PTTL"); +const PUBLISH = require("../commands/PUBLISH"); +const RENAME = require("../commands/RENAME"); +const RENAMENX = require("../commands/RENAMENX"); +const RESTORE = require("../commands/RESTORE"); +const RPOP_COUNT = require("../commands/RPOP_COUNT"); +const RPOP = require("../commands/RPOP"); +const RPOPLPUSH = require("../commands/RPOPLPUSH"); +const RPUSH = require("../commands/RPUSH"); +const RPUSHX = require("../commands/RPUSHX"); +const SADD = require("../commands/SADD"); +const SCARD = require("../commands/SCARD"); +const SDIFF = require("../commands/SDIFF"); +const SDIFFSTORE = require("../commands/SDIFFSTORE"); +const SET = require("../commands/SET"); +const SETBIT = require("../commands/SETBIT"); +const SETEX = require("../commands/SETEX"); +const SETNX = require("../commands/SETNX"); +const SETRANGE = require("../commands/SETRANGE"); +const SINTER = require("../commands/SINTER"); +const SINTERCARD = require("../commands/SINTERCARD"); +const SINTERSTORE = require("../commands/SINTERSTORE"); +const SISMEMBER = require("../commands/SISMEMBER"); +const SMEMBERS = require("../commands/SMEMBERS"); +const SMISMEMBER = require("../commands/SMISMEMBER"); +const SMOVE = require("../commands/SMOVE"); +const SORT_RO = require("../commands/SORT_RO"); +const SORT_STORE = require("../commands/SORT_STORE"); +const SORT = require("../commands/SORT"); +const SPOP = require("../commands/SPOP"); +const SPUBLISH = require("../commands/SPUBLISH"); +const SRANDMEMBER_COUNT = require("../commands/SRANDMEMBER_COUNT"); +const SRANDMEMBER = require("../commands/SRANDMEMBER"); +const SREM = require("../commands/SREM"); +const SSCAN = require("../commands/SSCAN"); +const STRLEN = require("../commands/STRLEN"); +const SUNION = require("../commands/SUNION"); +const SUNIONSTORE = require("../commands/SUNIONSTORE"); +const TOUCH = require("../commands/TOUCH"); +const TTL = require("../commands/TTL"); +const TYPE = require("../commands/TYPE"); +const UNLINK = require("../commands/UNLINK"); +const WATCH = require("../commands/WATCH"); +const XACK = require("../commands/XACK"); +const XADD = require("../commands/XADD"); +const XAUTOCLAIM_JUSTID = require("../commands/XAUTOCLAIM_JUSTID"); +const XAUTOCLAIM = require("../commands/XAUTOCLAIM"); +const XCLAIM_JUSTID = require("../commands/XCLAIM_JUSTID"); +const XCLAIM = require("../commands/XCLAIM"); +const XDEL = require("../commands/XDEL"); +const XGROUP_CREATE = require("../commands/XGROUP_CREATE"); +const XGROUP_CREATECONSUMER = require("../commands/XGROUP_CREATECONSUMER"); +const XGROUP_DELCONSUMER = require("../commands/XGROUP_DELCONSUMER"); +const XGROUP_DESTROY = require("../commands/XGROUP_DESTROY"); +const XGROUP_SETID = require("../commands/XGROUP_SETID"); +const XINFO_CONSUMERS = require("../commands/XINFO_CONSUMERS"); +const XINFO_GROUPS = require("../commands/XINFO_GROUPS"); +const XINFO_STREAM = require("../commands/XINFO_STREAM"); +const XLEN = require("../commands/XLEN"); +const XPENDING_RANGE = require("../commands/XPENDING_RANGE"); +const XPENDING = require("../commands/XPENDING"); +const XRANGE = require("../commands/XRANGE"); +const XREAD = require("../commands/XREAD"); +const XREADGROUP = require("../commands/XREADGROUP"); +const XREVRANGE = require("../commands/XREVRANGE"); +const XSETID = require("../commands/XSETID"); +const XTRIM = require("../commands/XTRIM"); +const ZADD = require("../commands/ZADD"); +const ZCARD = require("../commands/ZCARD"); +const ZCOUNT = require("../commands/ZCOUNT"); +const ZDIFF_WITHSCORES = require("../commands/ZDIFF_WITHSCORES"); +const ZDIFF = require("../commands/ZDIFF"); +const ZDIFFSTORE = require("../commands/ZDIFFSTORE"); +const ZINCRBY = require("../commands/ZINCRBY"); +const ZINTER_WITHSCORES = require("../commands/ZINTER_WITHSCORES"); +const ZINTER = require("../commands/ZINTER"); +const ZINTERCARD = require("../commands/ZINTERCARD"); +const ZINTERSTORE = require("../commands/ZINTERSTORE"); +const ZLEXCOUNT = require("../commands/ZLEXCOUNT"); +const ZMPOP = require("../commands/ZMPOP"); +const ZMSCORE = require("../commands/ZMSCORE"); +const ZPOPMAX_COUNT = require("../commands/ZPOPMAX_COUNT"); +const ZPOPMAX = require("../commands/ZPOPMAX"); +const ZPOPMIN_COUNT = require("../commands/ZPOPMIN_COUNT"); +const ZPOPMIN = require("../commands/ZPOPMIN"); +const ZRANDMEMBER_COUNT_WITHSCORES = require("../commands/ZRANDMEMBER_COUNT_WITHSCORES"); +const ZRANDMEMBER_COUNT = require("../commands/ZRANDMEMBER_COUNT"); +const ZRANDMEMBER = require("../commands/ZRANDMEMBER"); +const ZRANGE_WITHSCORES = require("../commands/ZRANGE_WITHSCORES"); +const ZRANGE = require("../commands/ZRANGE"); +const ZRANGEBYLEX = require("../commands/ZRANGEBYLEX"); +const ZRANGEBYSCORE_WITHSCORES = require("../commands/ZRANGEBYSCORE_WITHSCORES"); +const ZRANGEBYSCORE = require("../commands/ZRANGEBYSCORE"); +const ZRANGESTORE = require("../commands/ZRANGESTORE"); +const ZRANK = require("../commands/ZRANK"); +const ZREM = require("../commands/ZREM"); +const ZREMRANGEBYLEX = require("../commands/ZREMRANGEBYLEX"); +const ZREMRANGEBYRANK = require("../commands/ZREMRANGEBYRANK"); +const ZREMRANGEBYSCORE = require("../commands/ZREMRANGEBYSCORE"); +const ZREVRANK = require("../commands/ZREVRANK"); +const ZSCAN = require("../commands/ZSCAN"); +const ZSCORE = require("../commands/ZSCORE"); +const ZUNION_WITHSCORES = require("../commands/ZUNION_WITHSCORES"); +const ZUNION = require("../commands/ZUNION"); +const ZUNIONSTORE = require("../commands/ZUNIONSTORE"); +exports.default = { + APPEND, + append: APPEND, + BITCOUNT, + bitCount: BITCOUNT, + BITFIELD_RO, + bitFieldRo: BITFIELD_RO, + BITFIELD, + bitField: BITFIELD, + BITOP, + bitOp: BITOP, + BITPOS, + bitPos: BITPOS, + BLMOVE, + blMove: BLMOVE, + BLMPOP, + blmPop: BLMPOP, + BLPOP, + blPop: BLPOP, + BRPOP, + brPop: BRPOP, + BRPOPLPUSH, + brPopLPush: BRPOPLPUSH, + BZMPOP, + bzmPop: BZMPOP, + BZPOPMAX, + bzPopMax: BZPOPMAX, + BZPOPMIN, + bzPopMin: BZPOPMIN, + COPY, + copy: COPY, + DECR, + decr: DECR, + DECRBY, + decrBy: DECRBY, + DEL, + del: DEL, + DUMP, + dump: DUMP, + EVAL_RO, + evalRo: EVAL_RO, + EVAL, + eval: EVAL, + EVALSHA, + evalSha: EVALSHA, + EVALSHA_RO, + evalShaRo: EVALSHA_RO, + EXISTS, + exists: EXISTS, + EXPIRE, + expire: EXPIRE, + EXPIREAT, + expireAt: EXPIREAT, + EXPIRETIME, + expireTime: EXPIRETIME, + FCALL_RO, + fCallRo: FCALL_RO, + FCALL, + fCall: FCALL, + GEOADD, + geoAdd: GEOADD, + GEODIST, + geoDist: GEODIST, + GEOHASH, + geoHash: GEOHASH, + GEOPOS, + geoPos: GEOPOS, + GEORADIUS_RO_WITH, + geoRadiusRoWith: GEORADIUS_RO_WITH, + GEORADIUS_RO, + geoRadiusRo: GEORADIUS_RO, + GEORADIUS_WITH, + geoRadiusWith: GEORADIUS_WITH, + GEORADIUS, + geoRadius: GEORADIUS, + GEORADIUSBYMEMBER_RO_WITH, + geoRadiusByMemberRoWith: GEORADIUSBYMEMBER_RO_WITH, + GEORADIUSBYMEMBER_RO, + geoRadiusByMemberRo: GEORADIUSBYMEMBER_RO, + GEORADIUSBYMEMBER_WITH, + geoRadiusByMemberWith: GEORADIUSBYMEMBER_WITH, + GEORADIUSBYMEMBER, + geoRadiusByMember: GEORADIUSBYMEMBER, + GEORADIUSBYMEMBERSTORE, + geoRadiusByMemberStore: GEORADIUSBYMEMBERSTORE, + GEORADIUSSTORE, + geoRadiusStore: GEORADIUSSTORE, + GEOSEARCH_WITH, + geoSearchWith: GEOSEARCH_WITH, + GEOSEARCH, + geoSearch: GEOSEARCH, + GEOSEARCHSTORE, + geoSearchStore: GEOSEARCHSTORE, + GET, + get: GET, + GETBIT, + getBit: GETBIT, + GETDEL, + getDel: GETDEL, + GETEX, + getEx: GETEX, + GETRANGE, + getRange: GETRANGE, + GETSET, + getSet: GETSET, + HDEL, + hDel: HDEL, + HEXISTS, + hExists: HEXISTS, + HEXPIRE, + hExpire: HEXPIRE, + HEXPIREAT, + hExpireAt: HEXPIREAT, + HEXPIRETIME, + hExpireTime: HEXPIRETIME, + HGET, + hGet: HGET, + HGETALL, + hGetAll: HGETALL, + HINCRBY, + hIncrBy: HINCRBY, + HINCRBYFLOAT, + hIncrByFloat: HINCRBYFLOAT, + HKEYS, + hKeys: HKEYS, + HLEN, + hLen: HLEN, + HMGET, + hmGet: HMGET, + HPERSIST, + hPersist: HPERSIST, + HPEXPIRE, + hpExpire: HPEXPIRE, + HPEXPIREAT, + hpExpireAt: HPEXPIREAT, + HPEXPIRETIME, + hpExpireTime: HPEXPIRETIME, + HPTTL, + hpTTL: HPTTL, + HRANDFIELD_COUNT_WITHVALUES, + hRandFieldCountWithValues: HRANDFIELD_COUNT_WITHVALUES, + HRANDFIELD_COUNT, + hRandFieldCount: HRANDFIELD_COUNT, + HRANDFIELD, + hRandField: HRANDFIELD, + HSCAN, + hScan: HSCAN, + HSCAN_NOVALUES, + hScanNoValues: HSCAN_NOVALUES, + HSET, + hSet: HSET, + HSETNX, + hSetNX: HSETNX, + HSTRLEN, + hStrLen: HSTRLEN, + HTTL, + hTTL: HTTL, + HVALS, + hVals: HVALS, + INCR, + incr: INCR, + INCRBY, + incrBy: INCRBY, + INCRBYFLOAT, + incrByFloat: INCRBYFLOAT, + LCS_IDX_WITHMATCHLEN, + lcsIdxWithMatchLen: LCS_IDX_WITHMATCHLEN, + LCS_IDX, + lcsIdx: LCS_IDX, + LCS_LEN, + lcsLen: LCS_LEN, + LCS, + lcs: LCS, + LINDEX, + lIndex: LINDEX, + LINSERT, + lInsert: LINSERT, + LLEN, + lLen: LLEN, + LMOVE, + lMove: LMOVE, + LMPOP, + lmPop: LMPOP, + LPOP_COUNT, + lPopCount: LPOP_COUNT, + LPOP, + lPop: LPOP, + LPOS_COUNT, + lPosCount: LPOS_COUNT, + LPOS, + lPos: LPOS, + LPUSH, + lPush: LPUSH, + LPUSHX, + lPushX: LPUSHX, + LRANGE, + lRange: LRANGE, + LREM, + lRem: LREM, + LSET, + lSet: LSET, + LTRIM, + lTrim: LTRIM, + MGET, + mGet: MGET, + MIGRATE, + migrate: MIGRATE, + MSET, + mSet: MSET, + MSETNX, + mSetNX: MSETNX, + OBJECT_ENCODING, + objectEncoding: OBJECT_ENCODING, + OBJECT_FREQ, + objectFreq: OBJECT_FREQ, + OBJECT_IDLETIME, + objectIdleTime: OBJECT_IDLETIME, + OBJECT_REFCOUNT, + objectRefCount: OBJECT_REFCOUNT, + PERSIST, + persist: PERSIST, + PEXPIRE, + pExpire: PEXPIRE, + PEXPIREAT, + pExpireAt: PEXPIREAT, + PEXPIRETIME, + pExpireTime: PEXPIRETIME, + PFADD, + pfAdd: PFADD, + PFCOUNT, + pfCount: PFCOUNT, + PFMERGE, + pfMerge: PFMERGE, + PSETEX, + pSetEx: PSETEX, + PTTL, + pTTL: PTTL, + PUBLISH, + publish: PUBLISH, + RENAME, + rename: RENAME, + RENAMENX, + renameNX: RENAMENX, + RESTORE, + restore: RESTORE, + RPOP_COUNT, + rPopCount: RPOP_COUNT, + RPOP, + rPop: RPOP, + RPOPLPUSH, + rPopLPush: RPOPLPUSH, + RPUSH, + rPush: RPUSH, + RPUSHX, + rPushX: RPUSHX, + SADD, + sAdd: SADD, + SCARD, + sCard: SCARD, + SDIFF, + sDiff: SDIFF, + SDIFFSTORE, + sDiffStore: SDIFFSTORE, + SINTER, + sInter: SINTER, + SINTERCARD, + sInterCard: SINTERCARD, + SINTERSTORE, + sInterStore: SINTERSTORE, + SET, + set: SET, + SETBIT, + setBit: SETBIT, + SETEX, + setEx: SETEX, + SETNX, + setNX: SETNX, + SETRANGE, + setRange: SETRANGE, + SISMEMBER, + sIsMember: SISMEMBER, + SMEMBERS, + sMembers: SMEMBERS, + SMISMEMBER, + smIsMember: SMISMEMBER, + SMOVE, + sMove: SMOVE, + SORT_RO, + sortRo: SORT_RO, + SORT_STORE, + sortStore: SORT_STORE, + SORT, + sort: SORT, + SPOP, + sPop: SPOP, + SPUBLISH, + sPublish: SPUBLISH, + SRANDMEMBER_COUNT, + sRandMemberCount: SRANDMEMBER_COUNT, + SRANDMEMBER, + sRandMember: SRANDMEMBER, + SREM, + sRem: SREM, + SSCAN, + sScan: SSCAN, + STRLEN, + strLen: STRLEN, + SUNION, + sUnion: SUNION, + SUNIONSTORE, + sUnionStore: SUNIONSTORE, + TOUCH, + touch: TOUCH, + TTL, + ttl: TTL, + TYPE, + type: TYPE, + UNLINK, + unlink: UNLINK, + WATCH, + watch: WATCH, + XACK, + xAck: XACK, + XADD, + xAdd: XADD, + XAUTOCLAIM_JUSTID, + xAutoClaimJustId: XAUTOCLAIM_JUSTID, + XAUTOCLAIM, + xAutoClaim: XAUTOCLAIM, + XCLAIM, + xClaim: XCLAIM, + XCLAIM_JUSTID, + xClaimJustId: XCLAIM_JUSTID, + XDEL, + xDel: XDEL, + XGROUP_CREATE, + xGroupCreate: XGROUP_CREATE, + XGROUP_CREATECONSUMER, + xGroupCreateConsumer: XGROUP_CREATECONSUMER, + XGROUP_DELCONSUMER, + xGroupDelConsumer: XGROUP_DELCONSUMER, + XGROUP_DESTROY, + xGroupDestroy: XGROUP_DESTROY, + XGROUP_SETID, + xGroupSetId: XGROUP_SETID, + XINFO_CONSUMERS, + xInfoConsumers: XINFO_CONSUMERS, + XINFO_GROUPS, + xInfoGroups: XINFO_GROUPS, + XINFO_STREAM, + xInfoStream: XINFO_STREAM, + XLEN, + xLen: XLEN, + XPENDING_RANGE, + xPendingRange: XPENDING_RANGE, + XPENDING, + xPending: XPENDING, + XRANGE, + xRange: XRANGE, + XREAD, + xRead: XREAD, + XREADGROUP, + xReadGroup: XREADGROUP, + XREVRANGE, + xRevRange: XREVRANGE, + XSETID, + xSetId: XSETID, + XTRIM, + xTrim: XTRIM, + ZADD, + zAdd: ZADD, + ZCARD, + zCard: ZCARD, + ZCOUNT, + zCount: ZCOUNT, + ZDIFF_WITHSCORES, + zDiffWithScores: ZDIFF_WITHSCORES, + ZDIFF, + zDiff: ZDIFF, + ZDIFFSTORE, + zDiffStore: ZDIFFSTORE, + ZINCRBY, + zIncrBy: ZINCRBY, + ZINTER_WITHSCORES, + zInterWithScores: ZINTER_WITHSCORES, + ZINTER, + zInter: ZINTER, + ZINTERCARD, + zInterCard: ZINTERCARD, + ZINTERSTORE, + zInterStore: ZINTERSTORE, + ZLEXCOUNT, + zLexCount: ZLEXCOUNT, + ZMPOP, + zmPop: ZMPOP, + ZMSCORE, + zmScore: ZMSCORE, + ZPOPMAX_COUNT, + zPopMaxCount: ZPOPMAX_COUNT, + ZPOPMAX, + zPopMax: ZPOPMAX, + ZPOPMIN_COUNT, + zPopMinCount: ZPOPMIN_COUNT, + ZPOPMIN, + zPopMin: ZPOPMIN, + ZRANDMEMBER_COUNT_WITHSCORES, + zRandMemberCountWithScores: ZRANDMEMBER_COUNT_WITHSCORES, + ZRANDMEMBER_COUNT, + zRandMemberCount: ZRANDMEMBER_COUNT, + ZRANDMEMBER, + zRandMember: ZRANDMEMBER, + ZRANGE_WITHSCORES, + zRangeWithScores: ZRANGE_WITHSCORES, + ZRANGE, + zRange: ZRANGE, + ZRANGEBYLEX, + zRangeByLex: ZRANGEBYLEX, + ZRANGEBYSCORE_WITHSCORES, + zRangeByScoreWithScores: ZRANGEBYSCORE_WITHSCORES, + ZRANGEBYSCORE, + zRangeByScore: ZRANGEBYSCORE, + ZRANGESTORE, + zRangeStore: ZRANGESTORE, + ZRANK, + zRank: ZRANK, + ZREM, + zRem: ZREM, + ZREMRANGEBYLEX, + zRemRangeByLex: ZREMRANGEBYLEX, + ZREMRANGEBYRANK, + zRemRangeByRank: ZREMRANGEBYRANK, + ZREMRANGEBYSCORE, + zRemRangeByScore: ZREMRANGEBYSCORE, + ZREVRANK, + zRevRank: ZREVRANK, + ZSCAN, + zScan: ZSCAN, + ZSCORE, + zScore: ZSCORE, + ZUNION_WITHSCORES, + zUnionWithScores: ZUNION_WITHSCORES, + ZUNION, + zUnion: ZUNION, + ZUNIONSTORE, + zUnionStore: ZUNIONSTORE +}; diff --git a/node_modules/@redis/client/dist/lib/cluster/index.d.ts b/node_modules/@redis/client/dist/lib/cluster/index.d.ts new file mode 100644 index 0000000..195a30c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/cluster/index.d.ts @@ -0,0 +1,93 @@ +/// +import COMMANDS from './commands'; +import { RedisCommand, RedisCommandArgument, RedisCommandArguments, RedisCommandRawReply, RedisCommandReply, RedisFunctions, RedisModules, RedisExtensions, RedisScript, RedisScripts, RedisCommandSignature, RedisFunction } from '../commands'; +import { ClientCommandOptions, RedisClientOptions, RedisClientType, WithFunctions, WithModules, WithScripts } from '../client'; +import { NodeAddressMap, ShardNode } from './cluster-slots'; +import { EventEmitter } from 'events'; +import { RedisClusterMultiCommandType } from './multi-command'; +import { PubSubListener } from '../client/pub-sub'; +export type RedisClusterClientOptions = Omit; +export interface RedisClusterOptions, F extends RedisFunctions = Record, S extends RedisScripts = Record> extends RedisExtensions { + /** + * Should contain details for some of the cluster nodes that the client will use to discover + * the "cluster topology". We recommend including details for at least 3 nodes here. + */ + rootNodes: Array; + /** + * Default values used for every client in the cluster. Use this to specify global values, + * for example: ACL credentials, timeouts, TLS configuration etc. + */ + defaults?: Partial; + /** + * When `true`, `.connect()` will only discover the cluster topology, without actually connecting to all the nodes. + * Useful for short-term or PubSub-only connections. + */ + minimizeConnections?: boolean; + /** + * When `true`, distribute load by executing readonly commands (such as `GET`, `GEOSEARCH`, etc.) across all cluster nodes. When `false`, only use master nodes. + */ + useReplicas?: boolean; + /** + * The maximum number of times a command will be redirected due to `MOVED` or `ASK` errors. + */ + maxCommandRedirections?: number; + /** + * Mapping between the addresses in the cluster (see `CLUSTER SHARDS`) and the addresses the client should connect to + * Useful when the cluster is running on another network + * + */ + nodeAddressMap?: NodeAddressMap; +} +type WithCommands = { + [P in keyof typeof COMMANDS]: RedisCommandSignature<(typeof COMMANDS)[P]>; +}; +export type RedisClusterType, F extends RedisFunctions = Record, S extends RedisScripts = Record> = RedisCluster & WithCommands & WithModules & WithFunctions & WithScripts; +export default class RedisCluster extends EventEmitter { + #private; + static extractFirstKey(command: RedisCommand, originalArgs: Array, redisArgs: RedisCommandArguments): RedisCommandArgument | undefined; + static create(options?: RedisClusterOptions): RedisClusterType; + get slots(): import("./cluster-slots").Shard[]; + get shards(): import("./cluster-slots").Shard[]; + get masters(): ShardNode[]; + get replicas(): ShardNode[]; + get nodeByAddress(): Map | import("./cluster-slots").MasterNode>; + get pubSubNode(): Required> | undefined; + get isOpen(): boolean; + constructor(options: RedisClusterOptions); + duplicate(overrides?: Partial>): RedisClusterType; + connect(): Promise; + commandsExecutor(command: C, args: Array): Promise>; + sendCommand(firstKey: RedisCommandArgument | undefined, isReadonly: boolean | undefined, args: RedisCommandArguments, options?: ClientCommandOptions): Promise; + functionsExecutor(fn: F, args: Array, name: string): Promise>; + executeFunction(name: string, fn: RedisFunction, originalArgs: Array, redisArgs: RedisCommandArguments, options?: ClientCommandOptions): Promise; + scriptsExecutor(script: S, args: Array): Promise>; + executeScript(script: RedisScript, originalArgs: Array, redisArgs: RedisCommandArguments, options?: ClientCommandOptions): Promise; + MULTI(routing?: RedisCommandArgument): RedisClusterMultiCommandType; + multi: (routing?: RedisCommandArgument) => RedisClusterMultiCommandType; + SUBSCRIBE(channels: string | Array, listener: PubSubListener, bufferMode?: T): Promise; + subscribe: (channels: string | Array, listener: PubSubListener, bufferMode?: T | undefined) => Promise; + UNSUBSCRIBE(channels?: string | Array, listener?: PubSubListener, bufferMode?: T): Promise; + unsubscribe: (channels?: string | Array, listener?: PubSubListener, bufferMode?: T | undefined) => Promise; + PSUBSCRIBE(patterns: string | Array, listener: PubSubListener, bufferMode?: T): Promise; + pSubscribe: (patterns: string | Array, listener: PubSubListener, bufferMode?: T | undefined) => Promise; + PUNSUBSCRIBE(patterns?: string | Array, listener?: PubSubListener, bufferMode?: T): Promise; + pUnsubscribe: (patterns?: string | Array, listener?: PubSubListener | undefined, bufferMode?: T | undefined) => Promise; + SSUBSCRIBE(channels: string | Array, listener: PubSubListener, bufferMode?: T): Promise; + sSubscribe: (channels: string | Array, listener: PubSubListener, bufferMode?: T | undefined) => Promise; + SUNSUBSCRIBE(channels: string | Array, listener?: PubSubListener, bufferMode?: T): Promise; + sUnsubscribe: (channels: string | Array, listener?: PubSubListener | undefined, bufferMode?: T | undefined) => Promise; + quit(): Promise; + disconnect(): Promise; + nodeClient(node: ShardNode): RedisClientType | Promise>; + getRandomNode(): ShardNode; + getSlotRandomNode(slot: number): ShardNode; + /** + * @deprecated use `.masters` instead + */ + getMasters(): ShardNode[]; + /** + * @deprecated use `.slots[]` instead + */ + getSlotMaster(slot: number): import("./cluster-slots").MasterNode; +} +export {}; diff --git a/node_modules/@redis/client/dist/lib/cluster/index.js b/node_modules/@redis/client/dist/lib/cluster/index.js new file mode 100644 index 0000000..826aa75 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/cluster/index.js @@ -0,0 +1,254 @@ +"use strict"; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var _RedisCluster_instances, _RedisCluster_options, _RedisCluster_slots, _RedisCluster_Multi, _RedisCluster_execute; +Object.defineProperty(exports, "__esModule", { value: true }); +const commands_1 = require("./commands"); +const cluster_slots_1 = require("./cluster-slots"); +const commander_1 = require("../commander"); +const events_1 = require("events"); +const multi_command_1 = require("./multi-command"); +const errors_1 = require("../errors"); +class RedisCluster extends events_1.EventEmitter { + static extractFirstKey(command, originalArgs, redisArgs) { + if (command.FIRST_KEY_INDEX === undefined) { + return undefined; + } + else if (typeof command.FIRST_KEY_INDEX === 'number') { + return redisArgs[command.FIRST_KEY_INDEX]; + } + return command.FIRST_KEY_INDEX(...originalArgs); + } + static create(options) { + return new ((0, commander_1.attachExtensions)({ + BaseClass: RedisCluster, + modulesExecutor: RedisCluster.prototype.commandsExecutor, + modules: options?.modules, + functionsExecutor: RedisCluster.prototype.functionsExecutor, + functions: options?.functions, + scriptsExecutor: RedisCluster.prototype.scriptsExecutor, + scripts: options?.scripts + }))(options); + } + get slots() { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").slots; + } + get shards() { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").shards; + } + get masters() { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").masters; + } + get replicas() { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").replicas; + } + get nodeByAddress() { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").nodeByAddress; + } + get pubSubNode() { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").pubSubNode; + } + get isOpen() { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").isOpen; + } + constructor(options) { + super(); + _RedisCluster_instances.add(this); + _RedisCluster_options.set(this, void 0); + _RedisCluster_slots.set(this, void 0); + _RedisCluster_Multi.set(this, void 0); + Object.defineProperty(this, "multi", { + enumerable: true, + configurable: true, + writable: true, + value: this.MULTI + }); + Object.defineProperty(this, "subscribe", { + enumerable: true, + configurable: true, + writable: true, + value: this.SUBSCRIBE + }); + Object.defineProperty(this, "unsubscribe", { + enumerable: true, + configurable: true, + writable: true, + value: this.UNSUBSCRIBE + }); + Object.defineProperty(this, "pSubscribe", { + enumerable: true, + configurable: true, + writable: true, + value: this.PSUBSCRIBE + }); + Object.defineProperty(this, "pUnsubscribe", { + enumerable: true, + configurable: true, + writable: true, + value: this.PUNSUBSCRIBE + }); + Object.defineProperty(this, "sSubscribe", { + enumerable: true, + configurable: true, + writable: true, + value: this.SSUBSCRIBE + }); + Object.defineProperty(this, "sUnsubscribe", { + enumerable: true, + configurable: true, + writable: true, + value: this.SUNSUBSCRIBE + }); + __classPrivateFieldSet(this, _RedisCluster_options, options, "f"); + __classPrivateFieldSet(this, _RedisCluster_slots, new cluster_slots_1.default(options, this.emit.bind(this)), "f"); + __classPrivateFieldSet(this, _RedisCluster_Multi, multi_command_1.default.extend(options), "f"); + } + duplicate(overrides) { + return new (Object.getPrototypeOf(this).constructor)({ + ...__classPrivateFieldGet(this, _RedisCluster_options, "f"), + ...overrides + }); + } + connect() { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").connect(); + } + async commandsExecutor(command, args) { + const { jsArgs, args: redisArgs, options } = (0, commander_1.transformCommandArguments)(command, args); + return (0, commander_1.transformCommandReply)(command, await this.sendCommand(RedisCluster.extractFirstKey(command, jsArgs, redisArgs), command.IS_READ_ONLY, redisArgs, options), redisArgs.preserve); + } + async sendCommand(firstKey, isReadonly, args, options) { + return __classPrivateFieldGet(this, _RedisCluster_instances, "m", _RedisCluster_execute).call(this, firstKey, isReadonly, client => client.sendCommand(args, options)); + } + async functionsExecutor(fn, args, name) { + const { args: redisArgs, options } = (0, commander_1.transformCommandArguments)(fn, args); + return (0, commander_1.transformCommandReply)(fn, await this.executeFunction(name, fn, args, redisArgs, options), redisArgs.preserve); + } + async executeFunction(name, fn, originalArgs, redisArgs, options) { + return __classPrivateFieldGet(this, _RedisCluster_instances, "m", _RedisCluster_execute).call(this, RedisCluster.extractFirstKey(fn, originalArgs, redisArgs), fn.IS_READ_ONLY, client => client.executeFunction(name, fn, redisArgs, options)); + } + async scriptsExecutor(script, args) { + const { args: redisArgs, options } = (0, commander_1.transformCommandArguments)(script, args); + return (0, commander_1.transformCommandReply)(script, await this.executeScript(script, args, redisArgs, options), redisArgs.preserve); + } + async executeScript(script, originalArgs, redisArgs, options) { + return __classPrivateFieldGet(this, _RedisCluster_instances, "m", _RedisCluster_execute).call(this, RedisCluster.extractFirstKey(script, originalArgs, redisArgs), script.IS_READ_ONLY, client => client.executeScript(script, redisArgs, options)); + } + MULTI(routing) { + return new (__classPrivateFieldGet(this, _RedisCluster_Multi, "f"))((commands, firstKey, chainId) => { + return __classPrivateFieldGet(this, _RedisCluster_instances, "m", _RedisCluster_execute).call(this, firstKey, false, client => client.multiExecutor(commands, undefined, chainId)); + }, routing); + } + async SUBSCRIBE(channels, listener, bufferMode) { + return (await __classPrivateFieldGet(this, _RedisCluster_slots, "f").getPubSubClient()) + .SUBSCRIBE(channels, listener, bufferMode); + } + async UNSUBSCRIBE(channels, listener, bufferMode) { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").executeUnsubscribeCommand(client => client.UNSUBSCRIBE(channels, listener, bufferMode)); + } + async PSUBSCRIBE(patterns, listener, bufferMode) { + return (await __classPrivateFieldGet(this, _RedisCluster_slots, "f").getPubSubClient()) + .PSUBSCRIBE(patterns, listener, bufferMode); + } + async PUNSUBSCRIBE(patterns, listener, bufferMode) { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").executeUnsubscribeCommand(client => client.PUNSUBSCRIBE(patterns, listener, bufferMode)); + } + async SSUBSCRIBE(channels, listener, bufferMode) { + const maxCommandRedirections = __classPrivateFieldGet(this, _RedisCluster_options, "f").maxCommandRedirections ?? 16, firstChannel = Array.isArray(channels) ? channels[0] : channels; + let client = await __classPrivateFieldGet(this, _RedisCluster_slots, "f").getShardedPubSubClient(firstChannel); + for (let i = 0;; i++) { + try { + return await client.SSUBSCRIBE(channels, listener, bufferMode); + } + catch (err) { + if (++i > maxCommandRedirections || !(err instanceof errors_1.ErrorReply)) { + throw err; + } + if (err.message.startsWith('MOVED')) { + await __classPrivateFieldGet(this, _RedisCluster_slots, "f").rediscover(client); + client = await __classPrivateFieldGet(this, _RedisCluster_slots, "f").getShardedPubSubClient(firstChannel); + continue; + } + throw err; + } + } + } + SUNSUBSCRIBE(channels, listener, bufferMode) { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").executeShardedUnsubscribeCommand(Array.isArray(channels) ? channels[0] : channels, client => client.SUNSUBSCRIBE(channels, listener, bufferMode)); + } + quit() { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").quit(); + } + disconnect() { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").disconnect(); + } + nodeClient(node) { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").nodeClient(node); + } + getRandomNode() { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").getRandomNode(); + } + getSlotRandomNode(slot) { + return __classPrivateFieldGet(this, _RedisCluster_slots, "f").getSlotRandomNode(slot); + } + /** + * @deprecated use `.masters` instead + */ + getMasters() { + return this.masters; + } + /** + * @deprecated use `.slots[]` instead + */ + getSlotMaster(slot) { + return this.slots[slot].master; + } +} +_RedisCluster_options = new WeakMap(), _RedisCluster_slots = new WeakMap(), _RedisCluster_Multi = new WeakMap(), _RedisCluster_instances = new WeakSet(), _RedisCluster_execute = async function _RedisCluster_execute(firstKey, isReadonly, executor) { + const maxCommandRedirections = __classPrivateFieldGet(this, _RedisCluster_options, "f").maxCommandRedirections ?? 16; + let client = await __classPrivateFieldGet(this, _RedisCluster_slots, "f").getClient(firstKey, isReadonly); + for (let i = 0;; i++) { + try { + return await executor(client); + } + catch (err) { + if (++i > maxCommandRedirections || !(err instanceof errors_1.ErrorReply)) { + throw err; + } + if (err.message.startsWith('ASK')) { + const address = err.message.substring(err.message.lastIndexOf(' ') + 1); + let redirectTo = await __classPrivateFieldGet(this, _RedisCluster_slots, "f").getMasterByAddress(address); + if (!redirectTo) { + await __classPrivateFieldGet(this, _RedisCluster_slots, "f").rediscover(client); + redirectTo = await __classPrivateFieldGet(this, _RedisCluster_slots, "f").getMasterByAddress(address); + } + if (!redirectTo) { + throw new Error(`Cannot find node ${address}`); + } + await redirectTo.asking(); + client = redirectTo; + continue; + } + else if (err.message.startsWith('MOVED')) { + await __classPrivateFieldGet(this, _RedisCluster_slots, "f").rediscover(client); + client = await __classPrivateFieldGet(this, _RedisCluster_slots, "f").getClient(firstKey, isReadonly); + continue; + } + throw err; + } + } +}; +exports.default = RedisCluster; +(0, commander_1.attachCommands)({ + BaseClass: RedisCluster, + commands: commands_1.default, + executor: RedisCluster.prototype.commandsExecutor +}); diff --git a/node_modules/@redis/client/dist/lib/cluster/multi-command.d.ts b/node_modules/@redis/client/dist/lib/cluster/multi-command.d.ts new file mode 100644 index 0000000..84a02c9 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/cluster/multi-command.d.ts @@ -0,0 +1,36 @@ +import COMMANDS from './commands'; +import { RedisCommand, RedisCommandArgument, RedisCommandArguments, RedisCommandRawReply, RedisFunctions, RedisModules, RedisExtensions, RedisScript, RedisScripts, ExcludeMappedString, RedisFunction } from '../commands'; +import { RedisMultiQueuedCommand } from '../multi-command'; +type RedisClusterMultiCommandSignature = (...args: Parameters) => RedisClusterMultiCommandType; +type WithCommands = { + [P in keyof typeof COMMANDS]: RedisClusterMultiCommandSignature<(typeof COMMANDS)[P], M, F, S>; +}; +type WithModules = { + [P in keyof M as ExcludeMappedString

]: { + [C in keyof M[P] as ExcludeMappedString]: RedisClusterMultiCommandSignature; + }; +}; +type WithFunctions = { + [P in keyof F as ExcludeMappedString

]: { + [FF in keyof F[P] as ExcludeMappedString]: RedisClusterMultiCommandSignature; + }; +}; +type WithScripts = { + [P in keyof S as ExcludeMappedString

]: RedisClusterMultiCommandSignature; +}; +export type RedisClusterMultiCommandType = RedisClusterMultiCommand & WithCommands & WithModules & WithFunctions & WithScripts; +export type InstantiableRedisClusterMultiCommandType = new (...args: ConstructorParameters) => RedisClusterMultiCommandType; +export type RedisClusterMultiExecutor = (queue: Array, firstKey?: RedisCommandArgument, chainId?: symbol) => Promise>; +export default class RedisClusterMultiCommand { + #private; + static extend(extensions?: RedisExtensions): InstantiableRedisClusterMultiCommandType; + constructor(executor: RedisClusterMultiExecutor, firstKey?: RedisCommandArgument); + commandsExecutor(command: RedisCommand, args: Array): this; + addCommand(firstKey: RedisCommandArgument | undefined, args: RedisCommandArguments, transformReply?: RedisCommand['transformReply']): this; + functionsExecutor(fn: RedisFunction, args: Array, name: string): this; + scriptsExecutor(script: RedisScript, args: Array): this; + exec(execAsPipeline?: boolean): Promise>; + EXEC: (execAsPipeline?: boolean) => Promise>; + execAsPipeline(): Promise>; +} +export {}; diff --git a/node_modules/@redis/client/dist/lib/cluster/multi-command.js b/node_modules/@redis/client/dist/lib/cluster/multi-command.js new file mode 100644 index 0000000..1e8e797 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/cluster/multi-command.js @@ -0,0 +1,80 @@ +"use strict"; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _RedisClusterMultiCommand_multi, _RedisClusterMultiCommand_executor, _RedisClusterMultiCommand_firstKey; +Object.defineProperty(exports, "__esModule", { value: true }); +const commands_1 = require("./commands"); +const multi_command_1 = require("../multi-command"); +const commander_1 = require("../commander"); +const _1 = require("."); +class RedisClusterMultiCommand { + static extend(extensions) { + return (0, commander_1.attachExtensions)({ + BaseClass: RedisClusterMultiCommand, + modulesExecutor: RedisClusterMultiCommand.prototype.commandsExecutor, + modules: extensions?.modules, + functionsExecutor: RedisClusterMultiCommand.prototype.functionsExecutor, + functions: extensions?.functions, + scriptsExecutor: RedisClusterMultiCommand.prototype.scriptsExecutor, + scripts: extensions?.scripts + }); + } + constructor(executor, firstKey) { + _RedisClusterMultiCommand_multi.set(this, new multi_command_1.default()); + _RedisClusterMultiCommand_executor.set(this, void 0); + _RedisClusterMultiCommand_firstKey.set(this, void 0); + Object.defineProperty(this, "EXEC", { + enumerable: true, + configurable: true, + writable: true, + value: this.exec + }); + __classPrivateFieldSet(this, _RedisClusterMultiCommand_executor, executor, "f"); + __classPrivateFieldSet(this, _RedisClusterMultiCommand_firstKey, firstKey, "f"); + } + commandsExecutor(command, args) { + const transformedArguments = command.transformArguments(...args); + __classPrivateFieldSet(this, _RedisClusterMultiCommand_firstKey, __classPrivateFieldGet(this, _RedisClusterMultiCommand_firstKey, "f") ?? _1.default.extractFirstKey(command, args, transformedArguments), "f"); + return this.addCommand(undefined, transformedArguments, command.transformReply); + } + addCommand(firstKey, args, transformReply) { + __classPrivateFieldSet(this, _RedisClusterMultiCommand_firstKey, __classPrivateFieldGet(this, _RedisClusterMultiCommand_firstKey, "f") ?? firstKey, "f"); + __classPrivateFieldGet(this, _RedisClusterMultiCommand_multi, "f").addCommand(args, transformReply); + return this; + } + functionsExecutor(fn, args, name) { + const transformedArguments = __classPrivateFieldGet(this, _RedisClusterMultiCommand_multi, "f").addFunction(name, fn, args); + __classPrivateFieldSet(this, _RedisClusterMultiCommand_firstKey, __classPrivateFieldGet(this, _RedisClusterMultiCommand_firstKey, "f") ?? _1.default.extractFirstKey(fn, args, transformedArguments), "f"); + return this; + } + scriptsExecutor(script, args) { + const transformedArguments = __classPrivateFieldGet(this, _RedisClusterMultiCommand_multi, "f").addScript(script, args); + __classPrivateFieldSet(this, _RedisClusterMultiCommand_firstKey, __classPrivateFieldGet(this, _RedisClusterMultiCommand_firstKey, "f") ?? _1.default.extractFirstKey(script, args, transformedArguments), "f"); + return this; + } + async exec(execAsPipeline = false) { + if (execAsPipeline) { + return this.execAsPipeline(); + } + return __classPrivateFieldGet(this, _RedisClusterMultiCommand_multi, "f").handleExecReplies(await __classPrivateFieldGet(this, _RedisClusterMultiCommand_executor, "f").call(this, __classPrivateFieldGet(this, _RedisClusterMultiCommand_multi, "f").queue, __classPrivateFieldGet(this, _RedisClusterMultiCommand_firstKey, "f"), multi_command_1.default.generateChainId())); + } + async execAsPipeline() { + return __classPrivateFieldGet(this, _RedisClusterMultiCommand_multi, "f").transformReplies(await __classPrivateFieldGet(this, _RedisClusterMultiCommand_executor, "f").call(this, __classPrivateFieldGet(this, _RedisClusterMultiCommand_multi, "f").queue, __classPrivateFieldGet(this, _RedisClusterMultiCommand_firstKey, "f"))); + } +} +_RedisClusterMultiCommand_multi = new WeakMap(), _RedisClusterMultiCommand_executor = new WeakMap(), _RedisClusterMultiCommand_firstKey = new WeakMap(); +exports.default = RedisClusterMultiCommand; +(0, commander_1.attachCommands)({ + BaseClass: RedisClusterMultiCommand, + commands: commands_1.default, + executor: RedisClusterMultiCommand.prototype.commandsExecutor +}); diff --git a/node_modules/@redis/client/dist/lib/command-options.d.ts b/node_modules/@redis/client/dist/lib/command-options.d.ts new file mode 100644 index 0000000..58cbd46 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/command-options.d.ts @@ -0,0 +1,7 @@ +declare const symbol: unique symbol; +export type CommandOptions = T & { + readonly [symbol]: true; +}; +export declare function commandOptions(options: T): CommandOptions; +export declare function isCommandOptions(options: any): options is CommandOptions; +export {}; diff --git a/node_modules/@redis/client/dist/lib/command-options.js b/node_modules/@redis/client/dist/lib/command-options.js new file mode 100644 index 0000000..d337994 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/command-options.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.isCommandOptions = exports.commandOptions = void 0; +const symbol = Symbol('Command Options'); +function commandOptions(options) { + options[symbol] = true; + return options; +} +exports.commandOptions = commandOptions; +function isCommandOptions(options) { + return options?.[symbol] === true; +} +exports.isCommandOptions = isCommandOptions; diff --git a/node_modules/@redis/client/dist/lib/commander.d.ts b/node_modules/@redis/client/dist/lib/commander.d.ts new file mode 100644 index 0000000..207fd12 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commander.d.ts @@ -0,0 +1,30 @@ +import { ClientCommandOptions } from './client'; +import { CommandOptions } from './command-options'; +import { RedisCommand, RedisCommandArgument, RedisCommandArguments, RedisCommandReply, RedisFunction, RedisFunctions, RedisModules, RedisScript, RedisScripts } from './commands'; +type Instantiable = new (...args: Array) => T; +type CommandsExecutor = (command: C, args: Array, name: string) => unknown; +interface AttachCommandsConfig { + BaseClass: Instantiable; + commands: Record; + executor: CommandsExecutor; +} +export declare function attachCommands({ BaseClass, commands, executor }: AttachCommandsConfig): void; +interface AttachExtensionsConfig { + BaseClass: T; + modulesExecutor: CommandsExecutor; + modules?: RedisModules; + functionsExecutor: CommandsExecutor; + functions?: RedisFunctions; + scriptsExecutor: CommandsExecutor; + scripts?: RedisScripts; +} +export declare function attachExtensions(config: AttachExtensionsConfig): any; +export declare function transformCommandArguments(command: RedisCommand, args: Array): { + jsArgs: Array; + args: RedisCommandArguments; + options: CommandOptions | undefined; +}; +export declare function transformLegacyCommandArguments(args: Array): Array; +export declare function transformCommandReply(command: C, rawReply: unknown, preserved: unknown): RedisCommandReply; +export declare function fCallArguments(name: RedisCommandArgument, fn: RedisFunction, args: RedisCommandArguments): RedisCommandArguments; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commander.js b/node_modules/@redis/client/dist/lib/commander.js new file mode 100644 index 0000000..28c15dd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commander.js @@ -0,0 +1,103 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fCallArguments = exports.transformCommandReply = exports.transformLegacyCommandArguments = exports.transformCommandArguments = exports.attachExtensions = exports.attachCommands = void 0; +const command_options_1 = require("./command-options"); +function attachCommands({ BaseClass, commands, executor }) { + for (const [name, command] of Object.entries(commands)) { + BaseClass.prototype[name] = function (...args) { + return executor.call(this, command, args, name); + }; + } +} +exports.attachCommands = attachCommands; +function attachExtensions(config) { + let Commander; + if (config.modules) { + Commander = attachWithNamespaces({ + BaseClass: config.BaseClass, + namespaces: config.modules, + executor: config.modulesExecutor + }); + } + if (config.functions) { + Commander = attachWithNamespaces({ + BaseClass: Commander ?? config.BaseClass, + namespaces: config.functions, + executor: config.functionsExecutor + }); + } + if (config.scripts) { + Commander ?? (Commander = class extends config.BaseClass { + }); + attachCommands({ + BaseClass: Commander, + commands: config.scripts, + executor: config.scriptsExecutor + }); + } + return Commander ?? config.BaseClass; +} +exports.attachExtensions = attachExtensions; +function attachWithNamespaces({ BaseClass, namespaces, executor }) { + const Commander = class extends BaseClass { + constructor(...args) { + super(...args); + for (const namespace of Object.keys(namespaces)) { + this[namespace] = Object.create(this[namespace], { + self: { + value: this + } + }); + } + } + }; + for (const [namespace, commands] of Object.entries(namespaces)) { + Commander.prototype[namespace] = {}; + for (const [name, command] of Object.entries(commands)) { + Commander.prototype[namespace][name] = function (...args) { + return executor.call(this.self, command, args, name); + }; + } + } + return Commander; +} +function transformCommandArguments(command, args) { + let options; + if ((0, command_options_1.isCommandOptions)(args[0])) { + options = args[0]; + args = args.slice(1); + } + return { + jsArgs: args, + args: command.transformArguments(...args), + options + }; +} +exports.transformCommandArguments = transformCommandArguments; +function transformLegacyCommandArguments(args) { + return args.flat().map(arg => { + return typeof arg === 'number' || arg instanceof Date ? + arg.toString() : + arg; + }); +} +exports.transformLegacyCommandArguments = transformLegacyCommandArguments; +function transformCommandReply(command, rawReply, preserved) { + if (!command.transformReply) { + return rawReply; + } + return command.transformReply(rawReply, preserved); +} +exports.transformCommandReply = transformCommandReply; +function fCallArguments(name, fn, args) { + const actualArgs = [ + fn.IS_READ_ONLY ? 'FCALL_RO' : 'FCALL', + name + ]; + if (fn.NUMBER_OF_KEYS !== undefined) { + actualArgs.push(fn.NUMBER_OF_KEYS.toString()); + } + actualArgs.push(...args); + return actualArgs; +} +exports.fCallArguments = fCallArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_CAT.d.ts b/node_modules/@redis/client/dist/lib/commands/ACL_CAT.d.ts new file mode 100644 index 0000000..8df4390 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_CAT.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(categoryName?: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_CAT.js b/node_modules/@redis/client/dist/lib/commands/ACL_CAT.js new file mode 100644 index 0000000..b6fce00 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_CAT.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(categoryName) { + const args = ['ACL', 'CAT']; + if (categoryName) { + args.push(categoryName); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_DELUSER.d.ts b/node_modules/@redis/client/dist/lib/commands/ACL_DELUSER.d.ts new file mode 100644 index 0000000..d4abe17 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_DELUSER.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(username: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_DELUSER.js b/node_modules/@redis/client/dist/lib/commands/ACL_DELUSER.js new file mode 100644 index 0000000..9a43ae3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_DELUSER.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +const generic_transformers_1 = require("./generic-transformers"); +function transformArguments(username) { + return (0, generic_transformers_1.pushVerdictArguments)(['ACL', 'DELUSER'], username); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_DRYRUN.d.ts b/node_modules/@redis/client/dist/lib/commands/ACL_DRYRUN.d.ts new file mode 100644 index 0000000..11f1d9a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_DRYRUN.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(username: RedisCommandArgument, command: Array): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_DRYRUN.js b/node_modules/@redis/client/dist/lib/commands/ACL_DRYRUN.js new file mode 100644 index 0000000..0e4f0f6 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_DRYRUN.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments(username, command) { + return [ + 'ACL', + 'DRYRUN', + username, + ...command + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_GENPASS.d.ts b/node_modules/@redis/client/dist/lib/commands/ACL_GENPASS.d.ts new file mode 100644 index 0000000..5f85023 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_GENPASS.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(bits?: number): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_GENPASS.js b/node_modules/@redis/client/dist/lib/commands/ACL_GENPASS.js new file mode 100644 index 0000000..cd2a3a7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_GENPASS.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(bits) { + const args = ['ACL', 'GENPASS']; + if (bits) { + args.push(bits.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_GETUSER.d.ts b/node_modules/@redis/client/dist/lib/commands/ACL_GETUSER.d.ts new file mode 100644 index 0000000..2574a35 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_GETUSER.d.ts @@ -0,0 +1,26 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(username: RedisCommandArgument): RedisCommandArguments; +type AclGetUserRawReply = [ + 'flags', + Array, + 'passwords', + Array, + 'commands', + RedisCommandArgument, + 'keys', + Array | RedisCommandArgument, + 'channels', + Array | RedisCommandArgument, + 'selectors' | undefined, + Array> | undefined +]; +interface AclUser { + flags: Array; + passwords: Array; + commands: RedisCommandArgument; + keys: Array | RedisCommandArgument; + channels: Array | RedisCommandArgument; + selectors?: Array>; +} +export declare function transformReply(reply: AclGetUserRawReply): AclUser; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_GETUSER.js b/node_modules/@redis/client/dist/lib/commands/ACL_GETUSER.js new file mode 100644 index 0000000..6e58582 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_GETUSER.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments(username) { + return ['ACL', 'GETUSER', username]; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + flags: reply[1], + passwords: reply[3], + commands: reply[5], + keys: reply[7], + channels: reply[9], + selectors: reply[11] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_LIST.d.ts b/node_modules/@redis/client/dist/lib/commands/ACL_LIST.d.ts new file mode 100644 index 0000000..5e7c41d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_LIST.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_LIST.js b/node_modules/@redis/client/dist/lib/commands/ACL_LIST.js new file mode 100644 index 0000000..fa806d8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_LIST.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['ACL', 'LIST']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_LOAD.d.ts b/node_modules/@redis/client/dist/lib/commands/ACL_LOAD.d.ts new file mode 100644 index 0000000..2cb83d8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_LOAD.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_LOAD.js b/node_modules/@redis/client/dist/lib/commands/ACL_LOAD.js new file mode 100644 index 0000000..d433e3b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_LOAD.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['ACL', 'LOAD']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_LOG.d.ts b/node_modules/@redis/client/dist/lib/commands/ACL_LOG.d.ts new file mode 100644 index 0000000..1534451 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_LOG.d.ts @@ -0,0 +1,29 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(count?: number): RedisCommandArguments; +type AclLogRawReply = [ + _: RedisCommandArgument, + count: number, + _: RedisCommandArgument, + reason: RedisCommandArgument, + _: RedisCommandArgument, + context: RedisCommandArgument, + _: RedisCommandArgument, + object: RedisCommandArgument, + _: RedisCommandArgument, + username: RedisCommandArgument, + _: RedisCommandArgument, + ageSeconds: RedisCommandArgument, + _: RedisCommandArgument, + clientInfo: RedisCommandArgument +]; +interface AclLog { + count: number; + reason: RedisCommandArgument; + context: RedisCommandArgument; + object: RedisCommandArgument; + username: RedisCommandArgument; + ageSeconds: number; + clientInfo: RedisCommandArgument; +} +export declare function transformReply(reply: Array): Array; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_LOG.js b/node_modules/@redis/client/dist/lib/commands/ACL_LOG.js new file mode 100644 index 0000000..e943bee --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_LOG.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments(count) { + const args = ['ACL', 'LOG']; + if (count) { + args.push(count.toString()); + } + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply.map(log => ({ + count: log[1], + reason: log[3], + context: log[5], + object: log[7], + username: log[9], + ageSeconds: Number(log[11]), + clientInfo: log[13] + })); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_LOG_RESET.d.ts b/node_modules/@redis/client/dist/lib/commands/ACL_LOG_RESET.d.ts new file mode 100644 index 0000000..2cb83d8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_LOG_RESET.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_LOG_RESET.js b/node_modules/@redis/client/dist/lib/commands/ACL_LOG_RESET.js new file mode 100644 index 0000000..1a57c0b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_LOG_RESET.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['ACL', 'LOG', 'RESET']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_SAVE.d.ts b/node_modules/@redis/client/dist/lib/commands/ACL_SAVE.d.ts new file mode 100644 index 0000000..2cb83d8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_SAVE.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_SAVE.js b/node_modules/@redis/client/dist/lib/commands/ACL_SAVE.js new file mode 100644 index 0000000..e793f9d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_SAVE.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['ACL', 'SAVE']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_SETUSER.d.ts b/node_modules/@redis/client/dist/lib/commands/ACL_SETUSER.d.ts new file mode 100644 index 0000000..2a9b744 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_SETUSER.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(username: RedisCommandArgument, rule: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_SETUSER.js b/node_modules/@redis/client/dist/lib/commands/ACL_SETUSER.js new file mode 100644 index 0000000..2421845 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_SETUSER.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +const generic_transformers_1 = require("./generic-transformers"); +function transformArguments(username, rule) { + return (0, generic_transformers_1.pushVerdictArguments)(['ACL', 'SETUSER', username], rule); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_USERS.d.ts b/node_modules/@redis/client/dist/lib/commands/ACL_USERS.d.ts new file mode 100644 index 0000000..5e7c41d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_USERS.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_USERS.js b/node_modules/@redis/client/dist/lib/commands/ACL_USERS.js new file mode 100644 index 0000000..3895e5c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_USERS.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['ACL', 'USERS']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_WHOAMI.d.ts b/node_modules/@redis/client/dist/lib/commands/ACL_WHOAMI.d.ts new file mode 100644 index 0000000..2cb83d8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_WHOAMI.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/ACL_WHOAMI.js b/node_modules/@redis/client/dist/lib/commands/ACL_WHOAMI.js new file mode 100644 index 0000000..38996a7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ACL_WHOAMI.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['ACL', 'WHOAMI']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/APPEND.d.ts b/node_modules/@redis/client/dist/lib/commands/APPEND.d.ts new file mode 100644 index 0000000..0db828a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/APPEND.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, value: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/APPEND.js b/node_modules/@redis/client/dist/lib/commands/APPEND.js new file mode 100644 index 0000000..93dc4bb --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/APPEND.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, value) { + return ['APPEND', key, value]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ASKING.d.ts b/node_modules/@redis/client/dist/lib/commands/ASKING.d.ts new file mode 100644 index 0000000..fdf0501 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ASKING.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArguments, RedisCommandArgument } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/ASKING.js b/node_modules/@redis/client/dist/lib/commands/ASKING.js new file mode 100644 index 0000000..3485abd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ASKING.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['ASKING']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/AUTH.d.ts b/node_modules/@redis/client/dist/lib/commands/AUTH.d.ts new file mode 100644 index 0000000..befe182 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/AUTH.d.ts @@ -0,0 +1,7 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export interface AuthOptions { + username?: RedisCommandArgument; + password: RedisCommandArgument; +} +export declare function transformArguments({ username, password }: AuthOptions): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/AUTH.js b/node_modules/@redis/client/dist/lib/commands/AUTH.js new file mode 100644 index 0000000..35ec3b0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/AUTH.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments({ username, password }) { + if (!username) { + return ['AUTH', password]; + } + return ['AUTH', username, password]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/BGREWRITEAOF.d.ts b/node_modules/@redis/client/dist/lib/commands/BGREWRITEAOF.d.ts new file mode 100644 index 0000000..2cb83d8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BGREWRITEAOF.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/BGREWRITEAOF.js b/node_modules/@redis/client/dist/lib/commands/BGREWRITEAOF.js new file mode 100644 index 0000000..dc7fd6d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BGREWRITEAOF.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['BGREWRITEAOF']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/BGSAVE.d.ts b/node_modules/@redis/client/dist/lib/commands/BGSAVE.d.ts new file mode 100644 index 0000000..4631dcc --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BGSAVE.d.ts @@ -0,0 +1,7 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +interface BgSaveOptions { + SCHEDULE?: true; +} +export declare function transformArguments(options?: BgSaveOptions): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/BGSAVE.js b/node_modules/@redis/client/dist/lib/commands/BGSAVE.js new file mode 100644 index 0000000..899e2b9 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BGSAVE.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(options) { + const args = ['BGSAVE']; + if (options?.SCHEDULE) { + args.push('SCHEDULE'); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/BITCOUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/BITCOUNT.d.ts new file mode 100644 index 0000000..4d94255 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BITCOUNT.d.ts @@ -0,0 +1,11 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +interface BitCountRange { + start: number; + end: number; + mode?: 'BYTE' | 'BIT'; +} +export declare function transformArguments(key: RedisCommandArgument, range?: BitCountRange): RedisCommandArguments; +export declare function transformReply(): number; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/BITCOUNT.js b/node_modules/@redis/client/dist/lib/commands/BITCOUNT.js new file mode 100644 index 0000000..73131b7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BITCOUNT.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, range) { + const args = ['BITCOUNT', key]; + if (range) { + args.push(range.start.toString(), range.end.toString()); + if (range.mode) { + args.push(range.mode); + } + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/BITFIELD.d.ts b/node_modules/@redis/client/dist/lib/commands/BITFIELD.d.ts new file mode 100644 index 0000000..f7932fd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BITFIELD.d.ts @@ -0,0 +1,26 @@ +export declare const FIRST_KEY_INDEX = 1; +export type BitFieldEncoding = `${'i' | 'u'}${number}`; +export interface BitFieldOperation { + operation: S; +} +export interface BitFieldGetOperation extends BitFieldOperation<'GET'> { + encoding: BitFieldEncoding; + offset: number | string; +} +interface BitFieldSetOperation extends BitFieldOperation<'SET'> { + encoding: BitFieldEncoding; + offset: number | string; + value: number; +} +interface BitFieldIncrByOperation extends BitFieldOperation<'INCRBY'> { + encoding: BitFieldEncoding; + offset: number | string; + increment: number; +} +interface BitFieldOverflowOperation extends BitFieldOperation<'OVERFLOW'> { + behavior: string; +} +type BitFieldOperations = Array; +export declare function transformArguments(key: string, operations: BitFieldOperations): Array; +export declare function transformReply(): Array; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/BITFIELD.js b/node_modules/@redis/client/dist/lib/commands/BITFIELD.js new file mode 100644 index 0000000..0550f21 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BITFIELD.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, operations) { + const args = ['BITFIELD', key]; + for (const options of operations) { + switch (options.operation) { + case 'GET': + args.push('GET', options.encoding, options.offset.toString()); + break; + case 'SET': + args.push('SET', options.encoding, options.offset.toString(), options.value.toString()); + break; + case 'INCRBY': + args.push('INCRBY', options.encoding, options.offset.toString(), options.increment.toString()); + break; + case 'OVERFLOW': + args.push('OVERFLOW', options.behavior); + break; + } + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/BITFIELD_RO.d.ts b/node_modules/@redis/client/dist/lib/commands/BITFIELD_RO.d.ts new file mode 100644 index 0000000..72c8a79 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BITFIELD_RO.d.ts @@ -0,0 +1,7 @@ +import { BitFieldGetOperation } from './BITFIELD'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +type BitFieldRoOperations = Array & Partial>>; +export declare function transformArguments(key: string, operations: BitFieldRoOperations): Array; +export declare function transformReply(): Array; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/BITFIELD_RO.js b/node_modules/@redis/client/dist/lib/commands/BITFIELD_RO.js new file mode 100644 index 0000000..5021750 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BITFIELD_RO.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, operations) { + const args = ['BITFIELD_RO', key]; + for (const operation of operations) { + args.push('GET', operation.encoding, operation.offset.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/BITOP.d.ts b/node_modules/@redis/client/dist/lib/commands/BITOP.d.ts new file mode 100644 index 0000000..12d506d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BITOP.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +type BitOperations = 'AND' | 'OR' | 'XOR' | 'NOT'; +export declare function transformArguments(operation: BitOperations, destKey: RedisCommandArgument, key: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/BITOP.js b/node_modules/@redis/client/dist/lib/commands/BITOP.js new file mode 100644 index 0000000..1cab80b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BITOP.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 2; +function transformArguments(operation, destKey, key) { + return (0, generic_transformers_1.pushVerdictArguments)(['BITOP', operation, destKey], key); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/BITPOS.d.ts b/node_modules/@redis/client/dist/lib/commands/BITPOS.d.ts new file mode 100644 index 0000000..4e32356 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BITPOS.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { BitValue } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, bit: BitValue, start?: number, end?: number, mode?: 'BYTE' | 'BIT'): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/BITPOS.js b/node_modules/@redis/client/dist/lib/commands/BITPOS.js new file mode 100644 index 0000000..bd880ea --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BITPOS.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, bit, start, end, mode) { + const args = ['BITPOS', key, bit.toString()]; + if (typeof start === 'number') { + args.push(start.toString()); + } + if (typeof end === 'number') { + args.push(end.toString()); + } + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/BLMOVE.d.ts b/node_modules/@redis/client/dist/lib/commands/BLMOVE.d.ts new file mode 100644 index 0000000..e641a59 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BLMOVE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { ListSide } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(source: RedisCommandArgument, destination: RedisCommandArgument, sourceDirection: ListSide, destinationDirection: ListSide, timeout: number): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/BLMOVE.js b/node_modules/@redis/client/dist/lib/commands/BLMOVE.js new file mode 100644 index 0000000..cfa4233 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BLMOVE.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(source, destination, sourceDirection, destinationDirection, timeout) { + return [ + 'BLMOVE', + source, + destination, + sourceDirection, + destinationDirection, + timeout.toString() + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/BLMPOP.d.ts b/node_modules/@redis/client/dist/lib/commands/BLMPOP.d.ts new file mode 100644 index 0000000..157bbcb --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BLMPOP.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { LMPopOptions, ListSide } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 3; +export declare function transformArguments(timeout: number, keys: RedisCommandArgument | Array, side: ListSide, options?: LMPopOptions): RedisCommandArguments; +export { transformReply } from './LMPOP'; diff --git a/node_modules/@redis/client/dist/lib/commands/BLMPOP.js b/node_modules/@redis/client/dist/lib/commands/BLMPOP.js new file mode 100644 index 0000000..046bb86 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BLMPOP.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 3; +function transformArguments(timeout, keys, side, options) { + return (0, generic_transformers_1.transformLMPopArguments)(['BLMPOP', timeout.toString()], keys, side, options); +} +exports.transformArguments = transformArguments; +var LMPOP_1 = require("./LMPOP"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return LMPOP_1.transformReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/BLPOP.d.ts b/node_modules/@redis/client/dist/lib/commands/BLPOP.d.ts new file mode 100644 index 0000000..8195564 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BLPOP.d.ts @@ -0,0 +1,10 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(keys: RedisCommandArgument | Array, timeout: number): RedisCommandArguments; +type BLPopRawReply = null | [RedisCommandArgument, RedisCommandArgument]; +type BLPopReply = null | { + key: RedisCommandArgument; + element: RedisCommandArgument; +}; +export declare function transformReply(reply: BLPopRawReply): BLPopReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/BLPOP.js b/node_modules/@redis/client/dist/lib/commands/BLPOP.js new file mode 100644 index 0000000..46682ac --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BLPOP.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(keys, timeout) { + const args = (0, generic_transformers_1.pushVerdictArguments)(['BLPOP'], keys); + args.push(timeout.toString()); + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + if (reply === null) + return null; + return { + key: reply[0], + element: reply[1] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/BRPOP.d.ts b/node_modules/@redis/client/dist/lib/commands/BRPOP.d.ts new file mode 100644 index 0000000..7d3c79f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BRPOP.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument | Array, timeout: number): RedisCommandArguments; +export { transformReply } from './BLPOP'; diff --git a/node_modules/@redis/client/dist/lib/commands/BRPOP.js b/node_modules/@redis/client/dist/lib/commands/BRPOP.js new file mode 100644 index 0000000..64b7f2c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BRPOP.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, timeout) { + const args = (0, generic_transformers_1.pushVerdictArguments)(['BRPOP'], key); + args.push(timeout.toString()); + return args; +} +exports.transformArguments = transformArguments; +var BLPOP_1 = require("./BLPOP"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return BLPOP_1.transformReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/BRPOPLPUSH.d.ts b/node_modules/@redis/client/dist/lib/commands/BRPOPLPUSH.d.ts new file mode 100644 index 0000000..72cf017 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BRPOPLPUSH.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(source: RedisCommandArgument, destination: RedisCommandArgument, timeout: number): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/BRPOPLPUSH.js b/node_modules/@redis/client/dist/lib/commands/BRPOPLPUSH.js new file mode 100644 index 0000000..31e3e1b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BRPOPLPUSH.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(source, destination, timeout) { + return ['BRPOPLPUSH', source, destination, timeout.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/BZMPOP.d.ts b/node_modules/@redis/client/dist/lib/commands/BZMPOP.d.ts new file mode 100644 index 0000000..a79d87b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BZMPOP.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { SortedSetSide, ZMPopOptions } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 3; +export declare function transformArguments(timeout: number, keys: RedisCommandArgument | Array, side: SortedSetSide, options?: ZMPopOptions): RedisCommandArguments; +export { transformReply } from './ZMPOP'; diff --git a/node_modules/@redis/client/dist/lib/commands/BZMPOP.js b/node_modules/@redis/client/dist/lib/commands/BZMPOP.js new file mode 100644 index 0000000..d219eb2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BZMPOP.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 3; +function transformArguments(timeout, keys, side, options) { + return (0, generic_transformers_1.transformZMPopArguments)(['BZMPOP', timeout.toString()], keys, side, options); +} +exports.transformArguments = transformArguments; +var ZMPOP_1 = require("./ZMPOP"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return ZMPOP_1.transformReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/BZPOPMAX.d.ts b/node_modules/@redis/client/dist/lib/commands/BZPOPMAX.d.ts new file mode 100644 index 0000000..f619c17 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BZPOPMAX.d.ts @@ -0,0 +1,10 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { ZMember } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument | Array, timeout: number): RedisCommandArguments; +type ZMemberRawReply = [key: RedisCommandArgument, value: RedisCommandArgument, score: RedisCommandArgument] | null; +type BZPopMaxReply = (ZMember & { + key: RedisCommandArgument; +}) | null; +export declare function transformReply(reply: ZMemberRawReply): BZPopMaxReply | null; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/BZPOPMAX.js b/node_modules/@redis/client/dist/lib/commands/BZPOPMAX.js new file mode 100644 index 0000000..ef9a41e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BZPOPMAX.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, timeout) { + const args = (0, generic_transformers_1.pushVerdictArguments)(['BZPOPMAX'], key); + args.push(timeout.toString()); + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + if (!reply) + return null; + return { + key: reply[0], + value: reply[1], + score: (0, generic_transformers_1.transformNumberInfinityReply)(reply[2]) + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/BZPOPMIN.d.ts b/node_modules/@redis/client/dist/lib/commands/BZPOPMIN.d.ts new file mode 100644 index 0000000..5e88b8f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BZPOPMIN.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument | Array, timeout: number): RedisCommandArguments; +export { transformReply } from './BZPOPMAX'; diff --git a/node_modules/@redis/client/dist/lib/commands/BZPOPMIN.js b/node_modules/@redis/client/dist/lib/commands/BZPOPMIN.js new file mode 100644 index 0000000..b55ee96 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/BZPOPMIN.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, timeout) { + const args = (0, generic_transformers_1.pushVerdictArguments)(['BZPOPMIN'], key); + args.push(timeout.toString()); + return args; +} +exports.transformArguments = transformArguments; +var BZPOPMAX_1 = require("./BZPOPMAX"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return BZPOPMAX_1.transformReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_CACHING.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_CACHING.d.ts new file mode 100644 index 0000000..be851a4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_CACHING.d.ts @@ -0,0 +1,4 @@ +/// +import { RedisCommandArguments } from '.'; +export declare function transformArguments(value: boolean): RedisCommandArguments; +export declare function transformReply(): 'OK' | Buffer; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_CACHING.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_CACHING.js new file mode 100644 index 0000000..fe230a2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_CACHING.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(value) { + return [ + 'CLIENT', + 'CACHING', + value ? 'YES' : 'NO' + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_GETNAME.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_GETNAME.d.ts new file mode 100644 index 0000000..658ceee --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_GETNAME.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): string | null; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_GETNAME.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_GETNAME.js new file mode 100644 index 0000000..ee67ae8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_GETNAME.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['CLIENT', 'GETNAME']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_GETREDIR.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_GETREDIR.d.ts new file mode 100644 index 0000000..a911ea1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_GETREDIR.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_GETREDIR.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_GETREDIR.js new file mode 100644 index 0000000..b4285c9 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_GETREDIR.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['CLIENT', 'GETREDIR']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_ID.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_ID.d.ts new file mode 100644 index 0000000..14244d2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_ID.d.ts @@ -0,0 +1,3 @@ +export declare const IS_READ_ONLY = true; +export declare function transformArguments(): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_ID.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_ID.js new file mode 100644 index 0000000..1429e4b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_ID.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments() { + return ['CLIENT', 'ID']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_INFO.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_INFO.d.ts new file mode 100644 index 0000000..04f28d1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_INFO.d.ts @@ -0,0 +1,33 @@ +export declare const IS_READ_ONLY = true; +export declare function transformArguments(): Array; +export interface ClientInfoReply { + id: number; + addr: string; + laddr?: string; + fd: number; + name: string; + age: number; + idle: number; + flags: string; + db: number; + sub: number; + psub: number; + ssub?: number; + multi: number; + qbuf: number; + qbufFree: number; + argvMem?: number; + multiMem?: number; + obl: number; + oll: number; + omem: number; + totMem?: number; + events: string; + cmd: string; + user?: string; + redir?: number; + resp?: number; + libName?: string; + libVer?: string; +} +export declare function transformReply(rawReply: string): ClientInfoReply; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_INFO.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_INFO.js new file mode 100644 index 0000000..b46cb47 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_INFO.js @@ -0,0 +1,57 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments() { + return ['CLIENT', 'INFO']; +} +exports.transformArguments = transformArguments; +const CLIENT_INFO_REGEX = /([^\s=]+)=([^\s]*)/g; +function transformReply(rawReply) { + const map = {}; + for (const item of rawReply.matchAll(CLIENT_INFO_REGEX)) { + map[item[1]] = item[2]; + } + const reply = { + id: Number(map.id), + addr: map.addr, + fd: Number(map.fd), + name: map.name, + age: Number(map.age), + idle: Number(map.idle), + flags: map.flags, + db: Number(map.db), + sub: Number(map.sub), + psub: Number(map.psub), + multi: Number(map.multi), + qbuf: Number(map.qbuf), + qbufFree: Number(map['qbuf-free']), + argvMem: Number(map['argv-mem']), + obl: Number(map.obl), + oll: Number(map.oll), + omem: Number(map.omem), + totMem: Number(map['tot-mem']), + events: map.events, + cmd: map.cmd, + user: map.user, + libName: map['lib-name'], + libVer: map['lib-ver'], + }; + if (map.laddr !== undefined) { + reply.laddr = map.laddr; + } + if (map.redir !== undefined) { + reply.redir = Number(map.redir); + } + if (map.ssub !== undefined) { + reply.ssub = Number(map.ssub); + } + if (map['multi-mem'] !== undefined) { + reply.multiMem = Number(map['multi-mem']); + } + if (map.resp !== undefined) { + reply.resp = Number(map.resp); + } + return reply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_KILL.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_KILL.d.ts new file mode 100644 index 0000000..ddc8af1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_KILL.d.ts @@ -0,0 +1,38 @@ +import { RedisCommandArguments } from '.'; +export declare enum ClientKillFilters { + ADDRESS = "ADDR", + LOCAL_ADDRESS = "LADDR", + ID = "ID", + TYPE = "TYPE", + USER = "USER", + SKIP_ME = "SKIPME", + MAXAGE = "MAXAGE" +} +interface KillFilter { + filter: T; +} +interface KillAddress extends KillFilter { + address: `${string}:${number}`; +} +interface KillLocalAddress extends KillFilter { + localAddress: `${string}:${number}`; +} +interface KillId extends KillFilter { + id: number | `${number}`; +} +interface KillType extends KillFilter { + type: 'normal' | 'master' | 'replica' | 'pubsub'; +} +interface KillUser extends KillFilter { + username: string; +} +type KillSkipMe = ClientKillFilters.SKIP_ME | (KillFilter & { + skipMe: boolean; +}); +interface KillMaxAge extends KillFilter { + maxAge: number; +} +type KillFilters = KillAddress | KillLocalAddress | KillId | KillType | KillUser | KillSkipMe | KillMaxAge; +export declare function transformArguments(filters: KillFilters | Array): RedisCommandArguments; +export declare function transformReply(): number; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_KILL.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_KILL.js new file mode 100644 index 0000000..eaa2b89 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_KILL.js @@ -0,0 +1,58 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.ClientKillFilters = void 0; +var ClientKillFilters; +(function (ClientKillFilters) { + ClientKillFilters["ADDRESS"] = "ADDR"; + ClientKillFilters["LOCAL_ADDRESS"] = "LADDR"; + ClientKillFilters["ID"] = "ID"; + ClientKillFilters["TYPE"] = "TYPE"; + ClientKillFilters["USER"] = "USER"; + ClientKillFilters["SKIP_ME"] = "SKIPME"; + ClientKillFilters["MAXAGE"] = "MAXAGE"; +})(ClientKillFilters || (exports.ClientKillFilters = ClientKillFilters = {})); +function transformArguments(filters) { + const args = ['CLIENT', 'KILL']; + if (Array.isArray(filters)) { + for (const filter of filters) { + pushFilter(args, filter); + } + } + else { + pushFilter(args, filters); + } + return args; +} +exports.transformArguments = transformArguments; +function pushFilter(args, filter) { + if (filter === ClientKillFilters.SKIP_ME) { + args.push('SKIPME'); + return; + } + args.push(filter.filter); + switch (filter.filter) { + case ClientKillFilters.ADDRESS: + args.push(filter.address); + break; + case ClientKillFilters.LOCAL_ADDRESS: + args.push(filter.localAddress); + break; + case ClientKillFilters.ID: + args.push(typeof filter.id === 'number' ? + filter.id.toString() : + filter.id); + break; + case ClientKillFilters.TYPE: + args.push(filter.type); + break; + case ClientKillFilters.USER: + args.push(filter.username); + break; + case ClientKillFilters.SKIP_ME: + args.push(filter.skipMe ? 'yes' : 'no'); + break; + case ClientKillFilters.MAXAGE: + args.push(filter.maxAge.toString()); + break; + } +} diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_LIST.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_LIST.d.ts new file mode 100644 index 0000000..7c2d4a8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_LIST.d.ts @@ -0,0 +1,15 @@ +import { RedisCommandArguments, RedisCommandArgument } from '.'; +import { ClientInfoReply } from './CLIENT_INFO'; +interface ListFilterType { + TYPE: 'NORMAL' | 'MASTER' | 'REPLICA' | 'PUBSUB'; + ID?: never; +} +interface ListFilterId { + ID: Array; + TYPE?: never; +} +export type ListFilter = ListFilterType | ListFilterId; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(filter?: ListFilter): RedisCommandArguments; +export declare function transformReply(rawReply: string): Array; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_LIST.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_LIST.js new file mode 100644 index 0000000..bdf8694 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_LIST.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const generic_transformers_1 = require("./generic-transformers"); +const CLIENT_INFO_1 = require("./CLIENT_INFO"); +exports.IS_READ_ONLY = true; +function transformArguments(filter) { + let args = ['CLIENT', 'LIST']; + if (filter) { + if (filter.TYPE !== undefined) { + args.push('TYPE', filter.TYPE); + } + else { + args.push('ID'); + args = (0, generic_transformers_1.pushVerdictArguments)(args, filter.ID); + } + } + return args; +} +exports.transformArguments = transformArguments; +function transformReply(rawReply) { + const split = rawReply.split('\n'), length = split.length - 1, reply = []; + for (let i = 0; i < length; i++) { + reply.push((0, CLIENT_INFO_1.transformReply)(split[i])); + } + return reply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_NO-EVICT.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_NO-EVICT.d.ts new file mode 100644 index 0000000..be851a4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_NO-EVICT.d.ts @@ -0,0 +1,4 @@ +/// +import { RedisCommandArguments } from '.'; +export declare function transformArguments(value: boolean): RedisCommandArguments; +export declare function transformReply(): 'OK' | Buffer; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_NO-EVICT.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_NO-EVICT.js new file mode 100644 index 0000000..f2331e7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_NO-EVICT.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(value) { + return [ + 'CLIENT', + 'NO-EVICT', + value ? 'ON' : 'OFF' + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_NO-TOUCH.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_NO-TOUCH.d.ts new file mode 100644 index 0000000..be851a4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_NO-TOUCH.d.ts @@ -0,0 +1,4 @@ +/// +import { RedisCommandArguments } from '.'; +export declare function transformArguments(value: boolean): RedisCommandArguments; +export declare function transformReply(): 'OK' | Buffer; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_NO-TOUCH.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_NO-TOUCH.js new file mode 100644 index 0000000..2c72957 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_NO-TOUCH.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(value) { + return [ + 'CLIENT', + 'NO-TOUCH', + value ? 'ON' : 'OFF' + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_PAUSE.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_PAUSE.d.ts new file mode 100644 index 0000000..09294c0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_PAUSE.d.ts @@ -0,0 +1,4 @@ +/// +import { RedisCommandArguments } from '.'; +export declare function transformArguments(timeout: number, mode?: 'WRITE' | 'ALL'): RedisCommandArguments; +export declare function transformReply(): 'OK' | Buffer; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_PAUSE.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_PAUSE.js new file mode 100644 index 0000000..127c0d5 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_PAUSE.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(timeout, mode) { + const args = [ + 'CLIENT', + 'PAUSE', + timeout.toString() + ]; + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_SETNAME.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_SETNAME.d.ts new file mode 100644 index 0000000..11c70f3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_SETNAME.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(name: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_SETNAME.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_SETNAME.js new file mode 100644 index 0000000..2dacf4d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_SETNAME.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(name) { + return ['CLIENT', 'SETNAME', name]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKING.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKING.d.ts new file mode 100644 index 0000000..072d4c5 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKING.d.ts @@ -0,0 +1,20 @@ +/// +import { RedisCommandArgument, RedisCommandArguments } from '.'; +interface CommonOptions { + REDIRECT?: number; + NOLOOP?: boolean; +} +interface BroadcastOptions { + BCAST?: boolean; + PREFIX?: RedisCommandArgument | Array; +} +interface OptInOptions { + OPTIN?: boolean; +} +interface OptOutOptions { + OPTOUT?: boolean; +} +type ClientTrackingOptions = CommonOptions & (BroadcastOptions | OptInOptions | OptOutOptions); +export declare function transformArguments(mode: M, options?: M extends true ? ClientTrackingOptions : undefined): RedisCommandArguments; +export declare function transformReply(): 'OK' | Buffer; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKING.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKING.js new file mode 100644 index 0000000..24817a8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKING.js @@ -0,0 +1,48 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(mode, options) { + const args = [ + 'CLIENT', + 'TRACKING', + mode ? 'ON' : 'OFF' + ]; + if (mode) { + if (options?.REDIRECT) { + args.push('REDIRECT', options.REDIRECT.toString()); + } + if (isBroadcast(options)) { + args.push('BCAST'); + if (options?.PREFIX) { + if (Array.isArray(options.PREFIX)) { + for (const prefix of options.PREFIX) { + args.push('PREFIX', prefix); + } + } + else { + args.push('PREFIX', options.PREFIX); + } + } + } + else if (isOptIn(options)) { + args.push('OPTIN'); + } + else if (isOptOut(options)) { + args.push('OPTOUT'); + } + if (options?.NOLOOP) { + args.push('NOLOOP'); + } + } + return args; +} +exports.transformArguments = transformArguments; +function isBroadcast(options) { + return options?.BCAST === true; +} +function isOptIn(options) { + return options?.OPTIN === true; +} +function isOptOut(options) { + return options?.OPTOUT === true; +} diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKINGINFO.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKINGINFO.d.ts new file mode 100644 index 0000000..2c7519a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKINGINFO.d.ts @@ -0,0 +1,17 @@ +import { RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +type RawReply = [ + 'flags', + Array, + 'redirect', + number, + 'prefixes', + Array +]; +interface Reply { + flags: Set; + redirect: number; + prefixes: Array; +} +export declare function transformReply(reply: RawReply): Reply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKINGINFO.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKINGINFO.js new file mode 100644 index 0000000..8b38d3e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_TRACKINGINFO.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments() { + return ['CLIENT', 'TRACKINGINFO']; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + flags: new Set(reply[1]), + redirect: reply[3], + prefixes: reply[5] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_UNPAUSE.d.ts b/node_modules/@redis/client/dist/lib/commands/CLIENT_UNPAUSE.d.ts new file mode 100644 index 0000000..85ed25f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_UNPAUSE.d.ts @@ -0,0 +1,4 @@ +/// +import { RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): 'OK' | Buffer; diff --git a/node_modules/@redis/client/dist/lib/commands/CLIENT_UNPAUSE.js b/node_modules/@redis/client/dist/lib/commands/CLIENT_UNPAUSE.js new file mode 100644 index 0000000..8b34bec --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLIENT_UNPAUSE.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['CLIENT', 'UNPAUSE']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTS.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTS.d.ts new file mode 100644 index 0000000..a7379a2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTS.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArguments } from '.'; +export declare function transformArguments(slots: number | Array): RedisCommandArguments; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTS.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTS.js new file mode 100644 index 0000000..be3c57f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTS.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +const generic_transformers_1 = require("./generic-transformers"); +function transformArguments(slots) { + return (0, generic_transformers_1.pushVerdictNumberArguments)(['CLUSTER', 'ADDSLOTS'], slots); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTSRANGE.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTSRANGE.d.ts new file mode 100644 index 0000000..d300f0a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTSRANGE.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArguments } from '.'; +import { SlotRange } from './generic-transformers'; +export declare function transformArguments(ranges: SlotRange | Array): RedisCommandArguments; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTSRANGE.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTSRANGE.js new file mode 100644 index 0000000..2f5a581 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_ADDSLOTSRANGE.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +const generic_transformers_1 = require("./generic-transformers"); +function transformArguments(ranges) { + return (0, generic_transformers_1.pushSlotRangesArguments)(['CLUSTER', 'ADDSLOTSRANGE'], ranges); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_BUMPEPOCH.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_BUMPEPOCH.d.ts new file mode 100644 index 0000000..4c0c07c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_BUMPEPOCH.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): 'BUMPED' | 'STILL'; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_BUMPEPOCH.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_BUMPEPOCH.js new file mode 100644 index 0000000..7d9928a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_BUMPEPOCH.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['CLUSTER', 'BUMPEPOCH']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.d.ts new file mode 100644 index 0000000..fe4cef2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(nodeId: string): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.js new file mode 100644 index 0000000..84d3e42 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNT-FAILURE-REPORTS.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(nodeId) { + return ['CLUSTER', 'COUNT-FAILURE-REPORTS', nodeId]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNTKEYSINSLOT.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNTKEYSINSLOT.d.ts new file mode 100644 index 0000000..8b2d0da --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNTKEYSINSLOT.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(slot: number): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNTKEYSINSLOT.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNTKEYSINSLOT.js new file mode 100644 index 0000000..de29644 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_COUNTKEYSINSLOT.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(slot) { + return ['CLUSTER', 'COUNTKEYSINSLOT', slot.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTS.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTS.d.ts new file mode 100644 index 0000000..7fd38c2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTS.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArguments } from '.'; +export declare function transformArguments(slots: number | Array): RedisCommandArguments; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTS.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTS.js new file mode 100644 index 0000000..ba06017 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTS.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +const generic_transformers_1 = require("./generic-transformers"); +function transformArguments(slots) { + return (0, generic_transformers_1.pushVerdictNumberArguments)(['CLUSTER', 'DELSLOTS'], slots); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTSRANGE.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTSRANGE.d.ts new file mode 100644 index 0000000..d300f0a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTSRANGE.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArguments } from '.'; +import { SlotRange } from './generic-transformers'; +export declare function transformArguments(ranges: SlotRange | Array): RedisCommandArguments; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTSRANGE.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTSRANGE.js new file mode 100644 index 0000000..bf57903 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_DELSLOTSRANGE.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +const generic_transformers_1 = require("./generic-transformers"); +function transformArguments(ranges) { + return (0, generic_transformers_1.pushSlotRangesArguments)(['CLUSTER', 'DELSLOTSRANGE'], ranges); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_FAILOVER.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_FAILOVER.d.ts new file mode 100644 index 0000000..83cd679 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_FAILOVER.d.ts @@ -0,0 +1,6 @@ +export declare enum FailoverModes { + FORCE = "FORCE", + TAKEOVER = "TAKEOVER" +} +export declare function transformArguments(mode?: FailoverModes): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_FAILOVER.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_FAILOVER.js new file mode 100644 index 0000000..2709cfe --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_FAILOVER.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FailoverModes = void 0; +var FailoverModes; +(function (FailoverModes) { + FailoverModes["FORCE"] = "FORCE"; + FailoverModes["TAKEOVER"] = "TAKEOVER"; +})(FailoverModes || (exports.FailoverModes = FailoverModes = {})); +function transformArguments(mode) { + const args = ['CLUSTER', 'FAILOVER']; + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_FLUSHSLOTS.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_FLUSHSLOTS.d.ts new file mode 100644 index 0000000..42b48b6 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_FLUSHSLOTS.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_FLUSHSLOTS.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_FLUSHSLOTS.js new file mode 100644 index 0000000..3ce25ee --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_FLUSHSLOTS.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['CLUSTER', 'FLUSHSLOTS']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_FORGET.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_FORGET.d.ts new file mode 100644 index 0000000..f820b9f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_FORGET.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(nodeId: string): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_FORGET.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_FORGET.js new file mode 100644 index 0000000..177eb52 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_FORGET.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(nodeId) { + return ['CLUSTER', 'FORGET', nodeId]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_GETKEYSINSLOT.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_GETKEYSINSLOT.d.ts new file mode 100644 index 0000000..b234842 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_GETKEYSINSLOT.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(slot: number, count: number): Array; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_GETKEYSINSLOT.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_GETKEYSINSLOT.js new file mode 100644 index 0000000..a151bda --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_GETKEYSINSLOT.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(slot, count) { + return ['CLUSTER', 'GETKEYSINSLOT', slot.toString(), count.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_INFO.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_INFO.d.ts new file mode 100644 index 0000000..7f6d965 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_INFO.d.ts @@ -0,0 +1,21 @@ +export declare function transformArguments(): Array; +interface ClusterInfoReply { + state: string; + slots: { + assigned: number; + ok: number; + pfail: number; + fail: number; + }; + knownNodes: number; + size: number; + currentEpoch: number; + myEpoch: number; + stats: { + messagesSent: number; + messagesReceived: number; + }; +} +export declare function transformReply(reply: string): ClusterInfoReply; +export declare function extractLineValue(line: string): string; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_INFO.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_INFO.js new file mode 100644 index 0000000..3a81925 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_INFO.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.extractLineValue = exports.transformReply = exports.transformArguments = void 0; +function transformArguments() { + return ['CLUSTER', 'INFO']; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + const lines = reply.split('\r\n'); + return { + state: extractLineValue(lines[0]), + slots: { + assigned: Number(extractLineValue(lines[1])), + ok: Number(extractLineValue(lines[2])), + pfail: Number(extractLineValue(lines[3])), + fail: Number(extractLineValue(lines[4])) + }, + knownNodes: Number(extractLineValue(lines[5])), + size: Number(extractLineValue(lines[6])), + currentEpoch: Number(extractLineValue(lines[7])), + myEpoch: Number(extractLineValue(lines[8])), + stats: { + messagesSent: Number(extractLineValue(lines[9])), + messagesReceived: Number(extractLineValue(lines[10])) + } + }; +} +exports.transformReply = transformReply; +function extractLineValue(line) { + return line.substring(line.indexOf(':') + 1); +} +exports.extractLineValue = extractLineValue; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_KEYSLOT.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_KEYSLOT.d.ts new file mode 100644 index 0000000..181d809 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_KEYSLOT.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(key: string): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_KEYSLOT.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_KEYSLOT.js new file mode 100644 index 0000000..d4ba8d7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_KEYSLOT.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(key) { + return ['CLUSTER', 'KEYSLOT', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_LINKS.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_LINKS.d.ts new file mode 100644 index 0000000..13bb777 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_LINKS.d.ts @@ -0,0 +1,25 @@ +export declare function transformArguments(): Array; +type ClusterLinksRawReply = Array<[ + 'direction', + string, + 'node', + string, + 'createTime', + number, + 'events', + string, + 'send-buffer-allocated', + number, + 'send-buffer-used', + number +]>; +type ClusterLinksReply = Array<{ + direction: string; + node: string; + createTime: number; + events: string; + sendBufferAllocated: number; + sendBufferUsed: number; +}>; +export declare function transformReply(reply: ClusterLinksRawReply): ClusterLinksReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_LINKS.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_LINKS.js new file mode 100644 index 0000000..815fd7a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_LINKS.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments() { + return ['CLUSTER', 'LINKS']; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply.map(peerLink => ({ + direction: peerLink[1], + node: peerLink[3], + createTime: Number(peerLink[5]), + events: peerLink[7], + sendBufferAllocated: Number(peerLink[9]), + sendBufferUsed: Number(peerLink[11]) + })); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_MEET.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_MEET.d.ts new file mode 100644 index 0000000..00af6a9 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_MEET.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(ip: string, port: number): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_MEET.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_MEET.js new file mode 100644 index 0000000..5a32515 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_MEET.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(ip, port) { + return ['CLUSTER', 'MEET', ip, port.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_MYID.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_MYID.d.ts new file mode 100644 index 0000000..f7ef9c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_MYID.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_MYID.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_MYID.js new file mode 100644 index 0000000..184b4fa --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_MYID.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['CLUSTER', 'MYID']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_MYSHARDID.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_MYSHARDID.d.ts new file mode 100644 index 0000000..2e0d31e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_MYSHARDID.d.ts @@ -0,0 +1,4 @@ +/// +export declare const IS_READ_ONLY = true; +export declare function transformArguments(): string[]; +export declare function transformReply(): string | Buffer; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_MYSHARDID.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_MYSHARDID.js new file mode 100644 index 0000000..269a2a4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_MYSHARDID.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments() { + return ['CLUSTER', 'MYSHARDID']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_NODES.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_NODES.d.ts new file mode 100644 index 0000000..52d4e3d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_NODES.d.ts @@ -0,0 +1,28 @@ +export declare function transformArguments(): Array; +export declare enum RedisClusterNodeLinkStates { + CONNECTED = "connected", + DISCONNECTED = "disconnected" +} +interface RedisClusterNodeAddress { + host: string; + port: number; + cport: number | null; +} +export interface RedisClusterReplicaNode extends RedisClusterNodeAddress { + id: string; + address: string; + flags: Array; + pingSent: number; + pongRecv: number; + configEpoch: number; + linkState: RedisClusterNodeLinkStates; +} +export interface RedisClusterMasterNode extends RedisClusterReplicaNode { + slots: Array<{ + from: number; + to: number; + }>; + replicas: Array; +} +export declare function transformReply(reply: string): Array; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_NODES.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_NODES.js new file mode 100644 index 0000000..a88989e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_NODES.js @@ -0,0 +1,74 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.RedisClusterNodeLinkStates = exports.transformArguments = void 0; +function transformArguments() { + return ['CLUSTER', 'NODES']; +} +exports.transformArguments = transformArguments; +var RedisClusterNodeLinkStates; +(function (RedisClusterNodeLinkStates) { + RedisClusterNodeLinkStates["CONNECTED"] = "connected"; + RedisClusterNodeLinkStates["DISCONNECTED"] = "disconnected"; +})(RedisClusterNodeLinkStates || (exports.RedisClusterNodeLinkStates = RedisClusterNodeLinkStates = {})); +function transformReply(reply) { + const lines = reply.split('\n'); + lines.pop(); // last line is empty + const mastersMap = new Map(), replicasMap = new Map(); + for (const line of lines) { + const [id, address, flags, masterId, pingSent, pongRecv, configEpoch, linkState, ...slots] = line.split(' '), node = { + id, + address, + ...transformNodeAddress(address), + flags: flags.split(','), + pingSent: Number(pingSent), + pongRecv: Number(pongRecv), + configEpoch: Number(configEpoch), + linkState: linkState + }; + if (masterId === '-') { + let replicas = replicasMap.get(id); + if (!replicas) { + replicas = []; + replicasMap.set(id, replicas); + } + mastersMap.set(id, { + ...node, + slots: slots.map(slot => { + // TODO: importing & exporting (https://redis.io/commands/cluster-nodes#special-slot-entries) + const [fromString, toString] = slot.split('-', 2), from = Number(fromString); + return { + from, + to: toString ? Number(toString) : from + }; + }), + replicas + }); + } + else { + const replicas = replicasMap.get(masterId); + if (!replicas) { + replicasMap.set(masterId, [node]); + } + else { + replicas.push(node); + } + } + } + return [...mastersMap.values()]; +} +exports.transformReply = transformReply; +function transformNodeAddress(address) { + const indexOfColon = address.lastIndexOf(':'), indexOfAt = address.indexOf('@', indexOfColon), host = address.substring(0, indexOfColon); + if (indexOfAt === -1) { + return { + host, + port: Number(address.substring(indexOfColon + 1)), + cport: null + }; + } + return { + host: address.substring(0, indexOfColon), + port: Number(address.substring(indexOfColon + 1, indexOfAt)), + cport: Number(address.substring(indexOfAt + 1)) + }; +} diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICAS.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICAS.d.ts new file mode 100644 index 0000000..73e9e4b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICAS.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(nodeId: string): Array; +export { transformReply } from './CLUSTER_NODES'; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICAS.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICAS.js new file mode 100644 index 0000000..bc83f4e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICAS.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments(nodeId) { + return ['CLUSTER', 'REPLICAS', nodeId]; +} +exports.transformArguments = transformArguments; +var CLUSTER_NODES_1 = require("./CLUSTER_NODES"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return CLUSTER_NODES_1.transformReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICATE.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICATE.d.ts new file mode 100644 index 0000000..f820b9f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICATE.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(nodeId: string): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICATE.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICATE.js new file mode 100644 index 0000000..931b74c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_REPLICATE.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(nodeId) { + return ['CLUSTER', 'REPLICATE', nodeId]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_RESET.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_RESET.d.ts new file mode 100644 index 0000000..dfb60c3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_RESET.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(mode?: 'HARD' | 'SOFT'): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_RESET.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_RESET.js new file mode 100644 index 0000000..da95cd0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_RESET.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(mode) { + const args = ['CLUSTER', 'RESET']; + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_SAVECONFIG.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SAVECONFIG.d.ts new file mode 100644 index 0000000..42b48b6 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SAVECONFIG.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_SAVECONFIG.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SAVECONFIG.js new file mode 100644 index 0000000..2742c94 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SAVECONFIG.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['CLUSTER', 'SAVECONFIG']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_SET-CONFIG-EPOCH.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SET-CONFIG-EPOCH.d.ts new file mode 100644 index 0000000..f580d42 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SET-CONFIG-EPOCH.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(configEpoch: number): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_SET-CONFIG-EPOCH.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SET-CONFIG-EPOCH.js new file mode 100644 index 0000000..b4d3b73 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SET-CONFIG-EPOCH.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(configEpoch) { + return ['CLUSTER', 'SET-CONFIG-EPOCH', configEpoch.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_SETSLOT.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SETSLOT.d.ts new file mode 100644 index 0000000..aae29e0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SETSLOT.d.ts @@ -0,0 +1,8 @@ +export declare enum ClusterSlotStates { + IMPORTING = "IMPORTING", + MIGRATING = "MIGRATING", + STABLE = "STABLE", + NODE = "NODE" +} +export declare function transformArguments(slot: number, state: ClusterSlotStates, nodeId?: string): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_SETSLOT.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SETSLOT.js new file mode 100644 index 0000000..bf726e1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SETSLOT.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.ClusterSlotStates = void 0; +var ClusterSlotStates; +(function (ClusterSlotStates) { + ClusterSlotStates["IMPORTING"] = "IMPORTING"; + ClusterSlotStates["MIGRATING"] = "MIGRATING"; + ClusterSlotStates["STABLE"] = "STABLE"; + ClusterSlotStates["NODE"] = "NODE"; +})(ClusterSlotStates || (exports.ClusterSlotStates = ClusterSlotStates = {})); +function transformArguments(slot, state, nodeId) { + const args = ['CLUSTER', 'SETSLOT', slot.toString(), state]; + if (nodeId) { + args.push(nodeId); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_SLOTS.d.ts b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SLOTS.d.ts new file mode 100644 index 0000000..76888e0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SLOTS.d.ts @@ -0,0 +1,22 @@ +import { RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +type ClusterSlotsRawNode = [ip: string, port: number, id: string]; +type ClusterSlotsRawReply = Array<[ + from: number, + to: number, + master: ClusterSlotsRawNode, + ...replicas: Array +]>; +export interface ClusterSlotsNode { + ip: string; + port: number; + id: string; +} +export type ClusterSlotsReply = Array<{ + from: number; + to: number; + master: ClusterSlotsNode; + replicas: Array; +}>; +export declare function transformReply(reply: ClusterSlotsRawReply): ClusterSlotsReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/CLUSTER_SLOTS.js b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SLOTS.js new file mode 100644 index 0000000..1ee9206 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CLUSTER_SLOTS.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments() { + return ['CLUSTER', 'SLOTS']; +} +exports.transformArguments = transformArguments; +; +function transformReply(reply) { + return reply.map(([from, to, master, ...replicas]) => { + return { + from, + to, + master: transformNode(master), + replicas: replicas.map(transformNode) + }; + }); +} +exports.transformReply = transformReply; +function transformNode([ip, port, id]) { + return { + ip, + port, + id + }; +} diff --git a/node_modules/@redis/client/dist/lib/commands/COMMAND.d.ts b/node_modules/@redis/client/dist/lib/commands/COMMAND.d.ts new file mode 100644 index 0000000..f1730e0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COMMAND.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '.'; +import { CommandRawReply, CommandReply } from './generic-transformers'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(reply: Array): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/COMMAND.js b/node_modules/@redis/client/dist/lib/commands/COMMAND.js new file mode 100644 index 0000000..b482a91 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COMMAND.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.IS_READ_ONLY = true; +function transformArguments() { + return ['COMMAND']; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply.map(generic_transformers_1.transformCommandReply); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/COMMAND_COUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/COMMAND_COUNT.d.ts new file mode 100644 index 0000000..a2461a3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COMMAND_COUNT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArguments } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/COMMAND_COUNT.js b/node_modules/@redis/client/dist/lib/commands/COMMAND_COUNT.js new file mode 100644 index 0000000..c417a38 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COMMAND_COUNT.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments() { + return ['COMMAND', 'COUNT']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYS.d.ts b/node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYS.d.ts new file mode 100644 index 0000000..8979786 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYS.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(args: Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYS.js b/node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYS.js new file mode 100644 index 0000000..8a3e57e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYS.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments(args) { + return ['COMMAND', 'GETKEYS', ...args]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYSANDFLAGS.d.ts b/node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYSANDFLAGS.d.ts new file mode 100644 index 0000000..2dcb14c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYSANDFLAGS.d.ts @@ -0,0 +1,13 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(args: Array): RedisCommandArguments; +type KeysAndFlagsRawReply = Array<[ + RedisCommandArgument, + RedisCommandArguments +]>; +type KeysAndFlagsReply = Array<{ + key: RedisCommandArgument; + flags: RedisCommandArguments; +}>; +export declare function transformReply(reply: KeysAndFlagsRawReply): KeysAndFlagsReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYSANDFLAGS.js b/node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYSANDFLAGS.js new file mode 100644 index 0000000..76c0fcd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COMMAND_GETKEYSANDFLAGS.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments(args) { + return ['COMMAND', 'GETKEYSANDFLAGS', ...args]; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply.map(([key, flags]) => ({ + key, + flags + })); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/COMMAND_INFO.d.ts b/node_modules/@redis/client/dist/lib/commands/COMMAND_INFO.d.ts new file mode 100644 index 0000000..80fce32 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COMMAND_INFO.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '.'; +import { CommandRawReply, CommandReply } from './generic-transformers'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(commands: Array): RedisCommandArguments; +export declare function transformReply(reply: Array): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/COMMAND_INFO.js b/node_modules/@redis/client/dist/lib/commands/COMMAND_INFO.js new file mode 100644 index 0000000..552c991 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COMMAND_INFO.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.IS_READ_ONLY = true; +function transformArguments(commands) { + return ['COMMAND', 'INFO', ...commands]; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply.map(command => command ? (0, generic_transformers_1.transformCommandReply)(command) : null); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/COMMAND_LIST.d.ts b/node_modules/@redis/client/dist/lib/commands/COMMAND_LIST.d.ts new file mode 100644 index 0000000..7f8bdaa --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COMMAND_LIST.d.ts @@ -0,0 +1,14 @@ +import { RedisCommandArguments } from '.'; +export declare const IS_READ_ONLY = true; +export declare enum FilterBy { + MODULE = "MODULE", + ACLCAT = "ACLCAT", + PATTERN = "PATTERN" +} +interface Filter { + filterBy: FilterBy; + value: string; +} +export declare function transformArguments(filter?: Filter): RedisCommandArguments; +export declare function transformReply(): Array; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/COMMAND_LIST.js b/node_modules/@redis/client/dist/lib/commands/COMMAND_LIST.js new file mode 100644 index 0000000..ffda6c2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COMMAND_LIST.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FilterBy = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +var FilterBy; +(function (FilterBy) { + FilterBy["MODULE"] = "MODULE"; + FilterBy["ACLCAT"] = "ACLCAT"; + FilterBy["PATTERN"] = "PATTERN"; +})(FilterBy || (exports.FilterBy = FilterBy = {})); +function transformArguments(filter) { + const args = ['COMMAND', 'LIST']; + if (filter) { + args.push('FILTERBY', filter.filterBy, filter.value); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CONFIG_GET.d.ts b/node_modules/@redis/client/dist/lib/commands/CONFIG_GET.d.ts new file mode 100644 index 0000000..063a85a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CONFIG_GET.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(parameter: string): Array; +export { transformTuplesReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/CONFIG_GET.js b/node_modules/@redis/client/dist/lib/commands/CONFIG_GET.js new file mode 100644 index 0000000..646c774 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CONFIG_GET.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments(parameter) { + return ['CONFIG', 'GET', parameter]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformTuplesReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/CONFIG_RESETSTAT.d.ts b/node_modules/@redis/client/dist/lib/commands/CONFIG_RESETSTAT.d.ts new file mode 100644 index 0000000..f7ef9c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CONFIG_RESETSTAT.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/CONFIG_RESETSTAT.js b/node_modules/@redis/client/dist/lib/commands/CONFIG_RESETSTAT.js new file mode 100644 index 0000000..debd3b2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CONFIG_RESETSTAT.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['CONFIG', 'RESETSTAT']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CONFIG_REWRITE.d.ts b/node_modules/@redis/client/dist/lib/commands/CONFIG_REWRITE.d.ts new file mode 100644 index 0000000..f7ef9c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CONFIG_REWRITE.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/CONFIG_REWRITE.js b/node_modules/@redis/client/dist/lib/commands/CONFIG_REWRITE.js new file mode 100644 index 0000000..b78ddbc --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CONFIG_REWRITE.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['CONFIG', 'REWRITE']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/CONFIG_SET.d.ts b/node_modules/@redis/client/dist/lib/commands/CONFIG_SET.d.ts new file mode 100644 index 0000000..a6deda1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CONFIG_SET.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +type SingleParameter = [parameter: RedisCommandArgument, value: RedisCommandArgument]; +type MultipleParameters = [config: Record]; +export declare function transformArguments(...[parameterOrConfig, value]: SingleParameter | MultipleParameters): RedisCommandArguments; +export declare function transformReply(): string; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/CONFIG_SET.js b/node_modules/@redis/client/dist/lib/commands/CONFIG_SET.js new file mode 100644 index 0000000..6af6707 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/CONFIG_SET.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(...[parameterOrConfig, value]) { + const args = ['CONFIG', 'SET']; + if (typeof parameterOrConfig === 'string') { + args.push(parameterOrConfig, value); + } + else { + for (const [key, value] of Object.entries(parameterOrConfig)) { + args.push(key, value); + } + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/COPY.d.ts b/node_modules/@redis/client/dist/lib/commands/COPY.d.ts new file mode 100644 index 0000000..d266fee --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COPY.d.ts @@ -0,0 +1,8 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +interface CopyCommandOptions { + destinationDb?: number; + replace?: boolean; +} +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(source: RedisCommandArgument, destination: RedisCommandArgument, options?: CopyCommandOptions): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/COPY.js b/node_modules/@redis/client/dist/lib/commands/COPY.js new file mode 100644 index 0000000..e6090a6 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/COPY.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(source, destination, options) { + const args = ['COPY', source, destination]; + if (options?.destinationDb) { + args.push('DB', options.destinationDb.toString()); + } + if (options?.replace) { + args.push('REPLACE'); + } + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/DBSIZE.d.ts b/node_modules/@redis/client/dist/lib/commands/DBSIZE.d.ts new file mode 100644 index 0000000..14244d2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/DBSIZE.d.ts @@ -0,0 +1,3 @@ +export declare const IS_READ_ONLY = true; +export declare function transformArguments(): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/DBSIZE.js b/node_modules/@redis/client/dist/lib/commands/DBSIZE.js new file mode 100644 index 0000000..c50332d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/DBSIZE.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments() { + return ['DBSIZE']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/DECR.d.ts b/node_modules/@redis/client/dist/lib/commands/DECR.d.ts new file mode 100644 index 0000000..c35a19c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/DECR.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/DECR.js b/node_modules/@redis/client/dist/lib/commands/DECR.js new file mode 100644 index 0000000..a7fbc3d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/DECR.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['DECR', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/DECRBY.d.ts b/node_modules/@redis/client/dist/lib/commands/DECRBY.d.ts new file mode 100644 index 0000000..65d04dc --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/DECRBY.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, decrement: number): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/DECRBY.js b/node_modules/@redis/client/dist/lib/commands/DECRBY.js new file mode 100644 index 0000000..f119275 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/DECRBY.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, decrement) { + return ['DECRBY', key, decrement.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/DEL.d.ts b/node_modules/@redis/client/dist/lib/commands/DEL.d.ts new file mode 100644 index 0000000..8c2d71e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/DEL.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(keys: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/DEL.js b/node_modules/@redis/client/dist/lib/commands/DEL.js new file mode 100644 index 0000000..bc2a5c0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/DEL.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(keys) { + return (0, generic_transformers_1.pushVerdictArguments)(['DEL'], keys); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/DISCARD.d.ts b/node_modules/@redis/client/dist/lib/commands/DISCARD.d.ts new file mode 100644 index 0000000..3e8dbce --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/DISCARD.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument } from '.'; +export declare function transformArguments(): Array; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/DISCARD.js b/node_modules/@redis/client/dist/lib/commands/DISCARD.js new file mode 100644 index 0000000..eb19c5f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/DISCARD.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['DISCARD']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/DUMP.d.ts b/node_modules/@redis/client/dist/lib/commands/DUMP.d.ts new file mode 100644 index 0000000..287366c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/DUMP.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/DUMP.js b/node_modules/@redis/client/dist/lib/commands/DUMP.js new file mode 100644 index 0000000..9b3d68a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/DUMP.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['DUMP', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ECHO.d.ts b/node_modules/@redis/client/dist/lib/commands/ECHO.d.ts new file mode 100644 index 0000000..144daca --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ECHO.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(message: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/ECHO.js b/node_modules/@redis/client/dist/lib/commands/ECHO.js new file mode 100644 index 0000000..06370d2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ECHO.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments(message) { + return ['ECHO', message]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/EVAL.d.ts b/node_modules/@redis/client/dist/lib/commands/EVAL.d.ts new file mode 100644 index 0000000..f084f6f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EVAL.d.ts @@ -0,0 +1,3 @@ +import { evalFirstKeyIndex, EvalOptions } from './generic-transformers'; +export declare const FIRST_KEY_INDEX: typeof evalFirstKeyIndex; +export declare function transformArguments(script: string, options?: EvalOptions): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/EVAL.js b/node_modules/@redis/client/dist/lib/commands/EVAL.js new file mode 100644 index 0000000..f159be6 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EVAL.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = generic_transformers_1.evalFirstKeyIndex; +function transformArguments(script, options) { + return (0, generic_transformers_1.pushEvalArguments)(['EVAL', script], options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/EVALSHA.d.ts b/node_modules/@redis/client/dist/lib/commands/EVALSHA.d.ts new file mode 100644 index 0000000..1e07b96 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EVALSHA.d.ts @@ -0,0 +1,3 @@ +import { evalFirstKeyIndex, EvalOptions } from './generic-transformers'; +export declare const FIRST_KEY_INDEX: typeof evalFirstKeyIndex; +export declare function transformArguments(sha1: string, options?: EvalOptions): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/EVALSHA.js b/node_modules/@redis/client/dist/lib/commands/EVALSHA.js new file mode 100644 index 0000000..6339378 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EVALSHA.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = generic_transformers_1.evalFirstKeyIndex; +function transformArguments(sha1, options) { + return (0, generic_transformers_1.pushEvalArguments)(['EVALSHA', sha1], options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/EVALSHA_RO.d.ts b/node_modules/@redis/client/dist/lib/commands/EVALSHA_RO.d.ts new file mode 100644 index 0000000..ba5e8fd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EVALSHA_RO.d.ts @@ -0,0 +1,4 @@ +import { evalFirstKeyIndex, EvalOptions } from './generic-transformers'; +export declare const FIRST_KEY_INDEX: typeof evalFirstKeyIndex; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(sha1: string, options?: EvalOptions): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/EVALSHA_RO.js b/node_modules/@redis/client/dist/lib/commands/EVALSHA_RO.js new file mode 100644 index 0000000..7eead4d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EVALSHA_RO.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = generic_transformers_1.evalFirstKeyIndex; +exports.IS_READ_ONLY = true; +function transformArguments(sha1, options) { + return (0, generic_transformers_1.pushEvalArguments)(['EVALSHA_RO', sha1], options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/EVAL_RO.d.ts b/node_modules/@redis/client/dist/lib/commands/EVAL_RO.d.ts new file mode 100644 index 0000000..327b45b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EVAL_RO.d.ts @@ -0,0 +1,4 @@ +import { evalFirstKeyIndex, EvalOptions } from './generic-transformers'; +export declare const FIRST_KEY_INDEX: typeof evalFirstKeyIndex; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(script: string, options?: EvalOptions): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/EVAL_RO.js b/node_modules/@redis/client/dist/lib/commands/EVAL_RO.js new file mode 100644 index 0000000..9aae49f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EVAL_RO.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = generic_transformers_1.evalFirstKeyIndex; +exports.IS_READ_ONLY = true; +function transformArguments(script, options) { + return (0, generic_transformers_1.pushEvalArguments)(['EVAL_RO', script], options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/EXISTS.d.ts b/node_modules/@redis/client/dist/lib/commands/EXISTS.d.ts new file mode 100644 index 0000000..4401897 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EXISTS.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(keys: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/EXISTS.js b/node_modules/@redis/client/dist/lib/commands/EXISTS.js new file mode 100644 index 0000000..e389339 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EXISTS.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(keys) { + return (0, generic_transformers_1.pushVerdictArguments)(['EXISTS'], keys); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/EXPIRE.d.ts b/node_modules/@redis/client/dist/lib/commands/EXPIRE.d.ts new file mode 100644 index 0000000..c226f18 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EXPIRE.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, seconds: number, mode?: 'NX' | 'XX' | 'GT' | 'LT'): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/EXPIRE.js b/node_modules/@redis/client/dist/lib/commands/EXPIRE.js new file mode 100644 index 0000000..143d0b6 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EXPIRE.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, seconds, mode) { + const args = ['EXPIRE', key, seconds.toString()]; + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/EXPIREAT.d.ts b/node_modules/@redis/client/dist/lib/commands/EXPIREAT.d.ts new file mode 100644 index 0000000..44d630b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EXPIREAT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, timestamp: number | Date, mode?: 'NX' | 'XX' | 'GT' | 'LT'): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/EXPIREAT.js b/node_modules/@redis/client/dist/lib/commands/EXPIREAT.js new file mode 100644 index 0000000..88c9f54 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EXPIREAT.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, timestamp, mode) { + const args = [ + 'EXPIREAT', + key, + (0, generic_transformers_1.transformEXAT)(timestamp) + ]; + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_2 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_2.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/EXPIRETIME.d.ts b/node_modules/@redis/client/dist/lib/commands/EXPIRETIME.d.ts new file mode 100644 index 0000000..c35a19c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EXPIRETIME.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/EXPIRETIME.js b/node_modules/@redis/client/dist/lib/commands/EXPIRETIME.js new file mode 100644 index 0000000..3e0c4e2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/EXPIRETIME.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['EXPIRETIME', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/FAILOVER.d.ts b/node_modules/@redis/client/dist/lib/commands/FAILOVER.d.ts new file mode 100644 index 0000000..8d74bf9 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FAILOVER.d.ts @@ -0,0 +1,12 @@ +interface FailoverOptions { + TO?: { + host: string; + port: number; + FORCE?: true; + }; + ABORT?: true; + TIMEOUT?: number; +} +export declare function transformArguments(options?: FailoverOptions): Array; +export declare function transformReply(): string; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/FAILOVER.js b/node_modules/@redis/client/dist/lib/commands/FAILOVER.js new file mode 100644 index 0000000..e6db685 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FAILOVER.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(options) { + const args = ['FAILOVER']; + if (options?.TO) { + args.push('TO', options.TO.host, options.TO.port.toString()); + if (options.TO.FORCE) { + args.push('FORCE'); + } + } + if (options?.ABORT) { + args.push('ABORT'); + } + if (options?.TIMEOUT) { + args.push('TIMEOUT', options.TIMEOUT.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/FCALL.d.ts b/node_modules/@redis/client/dist/lib/commands/FCALL.d.ts new file mode 100644 index 0000000..79da897 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FCALL.d.ts @@ -0,0 +1,3 @@ +import { evalFirstKeyIndex, EvalOptions } from './generic-transformers'; +export declare const FIRST_KEY_INDEX: typeof evalFirstKeyIndex; +export declare function transformArguments(fn: string, options?: EvalOptions): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/FCALL.js b/node_modules/@redis/client/dist/lib/commands/FCALL.js new file mode 100644 index 0000000..eb75f71 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FCALL.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = generic_transformers_1.evalFirstKeyIndex; +function transformArguments(fn, options) { + return (0, generic_transformers_1.pushEvalArguments)(['FCALL', fn], options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/FCALL_RO.d.ts b/node_modules/@redis/client/dist/lib/commands/FCALL_RO.d.ts new file mode 100644 index 0000000..b963229 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FCALL_RO.d.ts @@ -0,0 +1,4 @@ +import { evalFirstKeyIndex, EvalOptions } from './generic-transformers'; +export declare const FIRST_KEY_INDEX: typeof evalFirstKeyIndex; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(fn: string, options?: EvalOptions): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/FCALL_RO.js b/node_modules/@redis/client/dist/lib/commands/FCALL_RO.js new file mode 100644 index 0000000..e086497 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FCALL_RO.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = generic_transformers_1.evalFirstKeyIndex; +exports.IS_READ_ONLY = true; +function transformArguments(fn, options) { + return (0, generic_transformers_1.pushEvalArguments)(['FCALL_RO', fn], options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/FLUSHALL.d.ts b/node_modules/@redis/client/dist/lib/commands/FLUSHALL.d.ts new file mode 100644 index 0000000..b0a35d1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FLUSHALL.d.ts @@ -0,0 +1,6 @@ +export declare enum RedisFlushModes { + ASYNC = "ASYNC", + SYNC = "SYNC" +} +export declare function transformArguments(mode?: RedisFlushModes): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/FLUSHALL.js b/node_modules/@redis/client/dist/lib/commands/FLUSHALL.js new file mode 100644 index 0000000..38c1ff1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FLUSHALL.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.RedisFlushModes = void 0; +var RedisFlushModes; +(function (RedisFlushModes) { + RedisFlushModes["ASYNC"] = "ASYNC"; + RedisFlushModes["SYNC"] = "SYNC"; +})(RedisFlushModes || (exports.RedisFlushModes = RedisFlushModes = {})); +function transformArguments(mode) { + const args = ['FLUSHALL']; + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/FLUSHDB.d.ts b/node_modules/@redis/client/dist/lib/commands/FLUSHDB.d.ts new file mode 100644 index 0000000..34c668d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FLUSHDB.d.ts @@ -0,0 +1,3 @@ +import { RedisFlushModes } from './FLUSHALL'; +export declare function transformArguments(mode?: RedisFlushModes): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/FLUSHDB.js b/node_modules/@redis/client/dist/lib/commands/FLUSHDB.js new file mode 100644 index 0000000..3d52b30 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FLUSHDB.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(mode) { + const args = ['FLUSHDB']; + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_DELETE.d.ts b/node_modules/@redis/client/dist/lib/commands/FUNCTION_DELETE.d.ts new file mode 100644 index 0000000..bffe6ff --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_DELETE.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArguments } from '.'; +export declare function transformArguments(library: string): RedisCommandArguments; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_DELETE.js b/node_modules/@redis/client/dist/lib/commands/FUNCTION_DELETE.js new file mode 100644 index 0000000..750d230 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_DELETE.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(library) { + return ['FUNCTION', 'DELETE', library]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_DUMP.d.ts b/node_modules/@redis/client/dist/lib/commands/FUNCTION_DUMP.d.ts new file mode 100644 index 0000000..2cb83d8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_DUMP.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_DUMP.js b/node_modules/@redis/client/dist/lib/commands/FUNCTION_DUMP.js new file mode 100644 index 0000000..a372bf0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_DUMP.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['FUNCTION', 'DUMP']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_FLUSH.d.ts b/node_modules/@redis/client/dist/lib/commands/FUNCTION_FLUSH.d.ts new file mode 100644 index 0000000..782dfae --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_FLUSH.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArguments } from '.'; +export declare function transformArguments(mode?: 'ASYNC' | 'SYNC'): RedisCommandArguments; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_FLUSH.js b/node_modules/@redis/client/dist/lib/commands/FUNCTION_FLUSH.js new file mode 100644 index 0000000..b922cfa --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_FLUSH.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(mode) { + const args = ['FUNCTION', 'FLUSH']; + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_KILL.d.ts b/node_modules/@redis/client/dist/lib/commands/FUNCTION_KILL.d.ts new file mode 100644 index 0000000..add4a5f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_KILL.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_KILL.js b/node_modules/@redis/client/dist/lib/commands/FUNCTION_KILL.js new file mode 100644 index 0000000..a19919f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_KILL.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['FUNCTION', 'KILL']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST.d.ts b/node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST.d.ts new file mode 100644 index 0000000..95fc892 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArguments } from '.'; +import { FunctionListItemReply, FunctionListRawItemReply } from './generic-transformers'; +export declare function transformArguments(pattern?: string): RedisCommandArguments; +export declare function transformReply(reply: Array): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST.js b/node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST.js new file mode 100644 index 0000000..216ee6b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +const generic_transformers_1 = require("./generic-transformers"); +function transformArguments(pattern) { + const args = ['FUNCTION', 'LIST']; + if (pattern) { + args.push(pattern); + } + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply.map(generic_transformers_1.transformFunctionListItemReply); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST_WITHCODE.d.ts b/node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST_WITHCODE.d.ts new file mode 100644 index 0000000..5ae6a77 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST_WITHCODE.d.ts @@ -0,0 +1,13 @@ +import { RedisCommandArguments } from '.'; +import { FunctionListItemReply, FunctionListRawItemReply } from './generic-transformers'; +export declare function transformArguments(pattern?: string): RedisCommandArguments; +type FunctionListWithCodeRawItemReply = [ + ...FunctionListRawItemReply, + 'library_code', + string +]; +interface FunctionListWithCodeItemReply extends FunctionListItemReply { + libraryCode: string; +} +export declare function transformReply(reply: Array): Array; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST_WITHCODE.js b/node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST_WITHCODE.js new file mode 100644 index 0000000..3ac65dc --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_LIST_WITHCODE.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +const FUNCTION_LIST_1 = require("./FUNCTION_LIST"); +const generic_transformers_1 = require("./generic-transformers"); +function transformArguments(pattern) { + const args = (0, FUNCTION_LIST_1.transformArguments)(pattern); + args.push('WITHCODE'); + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply.map(library => ({ + ...(0, generic_transformers_1.transformFunctionListItemReply)(library), + libraryCode: library[7] + })); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_LOAD.d.ts b/node_modules/@redis/client/dist/lib/commands/FUNCTION_LOAD.d.ts new file mode 100644 index 0000000..da91865 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_LOAD.d.ts @@ -0,0 +1,7 @@ +import { RedisCommandArguments } from '.'; +interface FunctionLoadOptions { + REPLACE?: boolean; +} +export declare function transformArguments(code: string, options?: FunctionLoadOptions): RedisCommandArguments; +export declare function transformReply(): string; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_LOAD.js b/node_modules/@redis/client/dist/lib/commands/FUNCTION_LOAD.js new file mode 100644 index 0000000..51892a0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_LOAD.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(code, options) { + const args = ['FUNCTION', 'LOAD']; + if (options?.REPLACE) { + args.push('REPLACE'); + } + args.push(code); + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_RESTORE.d.ts b/node_modules/@redis/client/dist/lib/commands/FUNCTION_RESTORE.d.ts new file mode 100644 index 0000000..6d1188e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_RESTORE.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(dump: RedisCommandArgument, mode?: 'FLUSH' | 'APPEND' | 'REPLACE'): RedisCommandArguments; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_RESTORE.js b/node_modules/@redis/client/dist/lib/commands/FUNCTION_RESTORE.js new file mode 100644 index 0000000..2d368bf --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_RESTORE.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(dump, mode) { + const args = ['FUNCTION', 'RESTORE', dump]; + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_STATS.d.ts b/node_modules/@redis/client/dist/lib/commands/FUNCTION_STATS.d.ts new file mode 100644 index 0000000..a170273 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_STATS.d.ts @@ -0,0 +1,28 @@ +import { RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +type FunctionStatsRawReply = [ + 'running_script', + null | [ + 'name', + string, + 'command', + string, + 'duration_ms', + number + ], + 'engines', + Array +]; +interface FunctionStatsReply { + runningScript: null | { + name: string; + command: string; + durationMs: number; + }; + engines: Record; +} +export declare function transformReply(reply: FunctionStatsRawReply): FunctionStatsReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/FUNCTION_STATS.js b/node_modules/@redis/client/dist/lib/commands/FUNCTION_STATS.js new file mode 100644 index 0000000..27c7de4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/FUNCTION_STATS.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments() { + return ['FUNCTION', 'STATS']; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + const engines = Object.create(null); + for (let i = 0; i < reply[3].length; i++) { + engines[reply[3][i]] = { + librariesCount: reply[3][++i][1], + functionsCount: reply[3][i][3] + }; + } + return { + runningScript: reply[1] === null ? null : { + name: reply[1][1], + command: reply[1][3], + durationMs: reply[1][5] + }, + engines + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/GEOADD.d.ts b/node_modules/@redis/client/dist/lib/commands/GEOADD.d.ts new file mode 100644 index 0000000..38967d8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEOADD.d.ts @@ -0,0 +1,20 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoCoordinates } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +interface GeoMember extends GeoCoordinates { + member: RedisCommandArgument; +} +interface NX { + NX?: true; +} +interface XX { + XX?: true; +} +type SetGuards = NX | XX; +interface GeoAddCommonOptions { + CH?: true; +} +type GeoAddOptions = SetGuards & GeoAddCommonOptions; +export declare function transformArguments(key: RedisCommandArgument, toAdd: GeoMember | Array, options?: GeoAddOptions): RedisCommandArguments; +export declare function transformReply(): number; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/GEOADD.js b/node_modules/@redis/client/dist/lib/commands/GEOADD.js new file mode 100644 index 0000000..966758b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEOADD.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, toAdd, options) { + const args = ['GEOADD', key]; + if (options?.NX) { + args.push('NX'); + } + else if (options?.XX) { + args.push('XX'); + } + if (options?.CH) { + args.push('CH'); + } + for (const { longitude, latitude, member } of (Array.isArray(toAdd) ? toAdd : [toAdd])) { + args.push(longitude.toString(), latitude.toString(), member); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GEODIST.d.ts b/node_modules/@redis/client/dist/lib/commands/GEODIST.d.ts new file mode 100644 index 0000000..e3064e4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEODIST.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoUnits } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, member1: RedisCommandArgument, member2: RedisCommandArgument, unit?: GeoUnits): RedisCommandArguments; +export declare function transformReply(reply: RedisCommandArgument | null): number | null; diff --git a/node_modules/@redis/client/dist/lib/commands/GEODIST.js b/node_modules/@redis/client/dist/lib/commands/GEODIST.js new file mode 100644 index 0000000..e9f67d2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEODIST.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, member1, member2, unit) { + const args = ['GEODIST', key, member1, member2]; + if (unit) { + args.push(unit); + } + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply === null ? null : Number(reply); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/GEOHASH.d.ts b/node_modules/@redis/client/dist/lib/commands/GEOHASH.d.ts new file mode 100644 index 0000000..fab2cad --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEOHASH.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, member: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/GEOHASH.js b/node_modules/@redis/client/dist/lib/commands/GEOHASH.js new file mode 100644 index 0000000..e53d5ea --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEOHASH.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, member) { + return (0, generic_transformers_1.pushVerdictArguments)(['GEOHASH', key], member); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GEOPOS.d.ts b/node_modules/@redis/client/dist/lib/commands/GEOPOS.d.ts new file mode 100644 index 0000000..e209890 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEOPOS.d.ts @@ -0,0 +1,11 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, member: RedisCommandArgument | Array): RedisCommandArguments; +type GeoCoordinatesRawReply = Array<[RedisCommandArgument, RedisCommandArgument] | null>; +interface GeoCoordinates { + longitude: RedisCommandArgument; + latitude: RedisCommandArgument; +} +export declare function transformReply(reply: GeoCoordinatesRawReply): Array; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/GEOPOS.js b/node_modules/@redis/client/dist/lib/commands/GEOPOS.js new file mode 100644 index 0000000..c1d90e8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEOPOS.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, member) { + return (0, generic_transformers_1.pushVerdictArguments)(['GEOPOS', key], member); +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply.map(coordinates => coordinates === null ? null : { + longitude: coordinates[0], + latitude: coordinates[1] + }); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUS.d.ts b/node_modules/@redis/client/dist/lib/commands/GEORADIUS.d.ts new file mode 100644 index 0000000..7574a1a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUS.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoSearchOptions, GeoCoordinates, GeoUnits } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, coordinates: GeoCoordinates, radius: number, unit: GeoUnits, options?: GeoSearchOptions): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUS.js b/node_modules/@redis/client/dist/lib/commands/GEORADIUS.js new file mode 100644 index 0000000..8329111 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUS.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, coordinates, radius, unit, options) { + return (0, generic_transformers_1.pushGeoRadiusArguments)(['GEORADIUS'], key, coordinates, radius, unit, options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER.d.ts b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER.d.ts new file mode 100644 index 0000000..c801788 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoSearchOptions, GeoUnits } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, member: string, radius: number, unit: GeoUnits, options?: GeoSearchOptions): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER.js b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER.js new file mode 100644 index 0000000..18bb292 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, member, radius, unit, options) { + return (0, generic_transformers_1.pushGeoRadiusArguments)(['GEORADIUSBYMEMBER'], key, member, radius, unit, options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBERSTORE.d.ts b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBERSTORE.d.ts new file mode 100644 index 0000000..b602982 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBERSTORE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoUnits, GeoRadiusStoreOptions } from './generic-transformers'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './GEORADIUSBYMEMBER'; +export declare function transformArguments(key: RedisCommandArgument, member: string, radius: number, unit: GeoUnits, destination: RedisCommandArgument, options?: GeoRadiusStoreOptions): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBERSTORE.js b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBERSTORE.js new file mode 100644 index 0000000..8a91761 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBERSTORE.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +var GEORADIUSBYMEMBER_1 = require("./GEORADIUSBYMEMBER"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return GEORADIUSBYMEMBER_1.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return GEORADIUSBYMEMBER_1.IS_READ_ONLY; } }); +function transformArguments(key, member, radius, unit, destination, options) { + return (0, generic_transformers_1.pushGeoRadiusStoreArguments)(['GEORADIUSBYMEMBER'], key, member, radius, unit, destination, options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO.d.ts b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO.d.ts new file mode 100644 index 0000000..c801788 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoSearchOptions, GeoUnits } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, member: string, radius: number, unit: GeoUnits, options?: GeoSearchOptions): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO.js b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO.js new file mode 100644 index 0000000..7e0c7e0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, member, radius, unit, options) { + return (0, generic_transformers_1.pushGeoRadiusArguments)(['GEORADIUSBYMEMBER_RO'], key, member, radius, unit, options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO_WITH.d.ts b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO_WITH.d.ts new file mode 100644 index 0000000..65a34ff --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO_WITH.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoReplyWith, GeoSearchOptions, GeoUnits } from './generic-transformers'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './GEORADIUSBYMEMBER_RO'; +export declare function transformArguments(key: RedisCommandArgument, member: string, radius: number, unit: GeoUnits, replyWith: Array, options?: GeoSearchOptions): RedisCommandArguments; +export { transformGeoMembersWithReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO_WITH.js b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO_WITH.js new file mode 100644 index 0000000..dc095b2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_RO_WITH.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const GEORADIUSBYMEMBER_RO_1 = require("./GEORADIUSBYMEMBER_RO"); +var GEORADIUSBYMEMBER_RO_2 = require("./GEORADIUSBYMEMBER_RO"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return GEORADIUSBYMEMBER_RO_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return GEORADIUSBYMEMBER_RO_2.IS_READ_ONLY; } }); +function transformArguments(key, member, radius, unit, replyWith, options) { + const args = (0, GEORADIUSBYMEMBER_RO_1.transformArguments)(key, member, radius, unit, options); + args.push(...replyWith); + args.preserve = replyWith; + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformGeoMembersWithReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_WITH.d.ts b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_WITH.d.ts new file mode 100644 index 0000000..d5976c1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_WITH.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoReplyWith, GeoSearchOptions, GeoUnits } from './generic-transformers'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './GEORADIUSBYMEMBER'; +export declare function transformArguments(key: RedisCommandArgument, member: string, radius: number, unit: GeoUnits, replyWith: Array, options?: GeoSearchOptions): RedisCommandArguments; +export { transformGeoMembersWithReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_WITH.js b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_WITH.js new file mode 100644 index 0000000..c8b18eb --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUSBYMEMBER_WITH.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const GEORADIUSBYMEMBER_1 = require("./GEORADIUSBYMEMBER"); +var GEORADIUSBYMEMBER_2 = require("./GEORADIUSBYMEMBER"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return GEORADIUSBYMEMBER_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return GEORADIUSBYMEMBER_2.IS_READ_ONLY; } }); +function transformArguments(key, member, radius, unit, replyWith, options) { + const args = (0, GEORADIUSBYMEMBER_1.transformArguments)(key, member, radius, unit, options); + args.push(...replyWith); + args.preserve = replyWith; + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformGeoMembersWithReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUSSTORE.d.ts b/node_modules/@redis/client/dist/lib/commands/GEORADIUSSTORE.d.ts new file mode 100644 index 0000000..272ba5d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUSSTORE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoCoordinates, GeoUnits, GeoRadiusStoreOptions } from './generic-transformers'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './GEORADIUS'; +export declare function transformArguments(key: RedisCommandArgument, coordinates: GeoCoordinates, radius: number, unit: GeoUnits, destination: RedisCommandArgument, options?: GeoRadiusStoreOptions): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUSSTORE.js b/node_modules/@redis/client/dist/lib/commands/GEORADIUSSTORE.js new file mode 100644 index 0000000..eec23ef --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUSSTORE.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +var GEORADIUS_1 = require("./GEORADIUS"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return GEORADIUS_1.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return GEORADIUS_1.IS_READ_ONLY; } }); +function transformArguments(key, coordinates, radius, unit, destination, options) { + return (0, generic_transformers_1.pushGeoRadiusStoreArguments)(['GEORADIUS'], key, coordinates, radius, unit, destination, options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO.d.ts b/node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO.d.ts new file mode 100644 index 0000000..7574a1a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoSearchOptions, GeoCoordinates, GeoUnits } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, coordinates: GeoCoordinates, radius: number, unit: GeoUnits, options?: GeoSearchOptions): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO.js b/node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO.js new file mode 100644 index 0000000..7f34dad --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, coordinates, radius, unit, options) { + return (0, generic_transformers_1.pushGeoRadiusArguments)(['GEORADIUS_RO'], key, coordinates, radius, unit, options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO_WITH.d.ts b/node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO_WITH.d.ts new file mode 100644 index 0000000..ce99bdd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO_WITH.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoReplyWith, GeoSearchOptions, GeoCoordinates, GeoUnits } from './generic-transformers'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './GEORADIUS_RO'; +export declare function transformArguments(key: RedisCommandArgument, coordinates: GeoCoordinates, radius: number, unit: GeoUnits, replyWith: Array, options?: GeoSearchOptions): RedisCommandArguments; +export { transformGeoMembersWithReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO_WITH.js b/node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO_WITH.js new file mode 100644 index 0000000..a83fbb1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUS_RO_WITH.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const GEORADIUS_RO_1 = require("./GEORADIUS_RO"); +var GEORADIUS_RO_2 = require("./GEORADIUS_RO"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return GEORADIUS_RO_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return GEORADIUS_RO_2.IS_READ_ONLY; } }); +function transformArguments(key, coordinates, radius, unit, replyWith, options) { + const args = (0, GEORADIUS_RO_1.transformArguments)(key, coordinates, radius, unit, options); + args.push(...replyWith); + args.preserve = replyWith; + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformGeoMembersWithReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUS_WITH.d.ts b/node_modules/@redis/client/dist/lib/commands/GEORADIUS_WITH.d.ts new file mode 100644 index 0000000..96f887c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUS_WITH.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoReplyWith, GeoSearchOptions, GeoCoordinates, GeoUnits } from './generic-transformers'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './GEORADIUS'; +export declare function transformArguments(key: RedisCommandArgument, coordinates: GeoCoordinates, radius: number, unit: GeoUnits, replyWith: Array, options?: GeoSearchOptions): RedisCommandArguments; +export { transformGeoMembersWithReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/GEORADIUS_WITH.js b/node_modules/@redis/client/dist/lib/commands/GEORADIUS_WITH.js new file mode 100644 index 0000000..e146b00 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEORADIUS_WITH.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const GEORADIUS_1 = require("./GEORADIUS"); +var GEORADIUS_2 = require("./GEORADIUS"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return GEORADIUS_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return GEORADIUS_2.IS_READ_ONLY; } }); +function transformArguments(key, coordinates, radius, unit, replyWith, options) { + const args = (0, GEORADIUS_1.transformArguments)(key, coordinates, radius, unit, options); + args.push(...replyWith); + args.preserve = replyWith; + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformGeoMembersWithReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/GEOSEARCH.d.ts b/node_modules/@redis/client/dist/lib/commands/GEOSEARCH.d.ts new file mode 100644 index 0000000..2205100 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEOSEARCH.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoSearchFrom, GeoSearchBy, GeoSearchOptions } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, from: GeoSearchFrom, by: GeoSearchBy, options?: GeoSearchOptions): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/GEOSEARCH.js b/node_modules/@redis/client/dist/lib/commands/GEOSEARCH.js new file mode 100644 index 0000000..b2e2aca --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEOSEARCH.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, from, by, options) { + return (0, generic_transformers_1.pushGeoSearchArguments)(['GEOSEARCH'], key, from, by, options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GEOSEARCHSTORE.d.ts b/node_modules/@redis/client/dist/lib/commands/GEOSEARCHSTORE.d.ts new file mode 100644 index 0000000..1ac151e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEOSEARCHSTORE.d.ts @@ -0,0 +1,8 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoSearchFrom, GeoSearchBy, GeoSearchOptions } from './generic-transformers'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './GEOSEARCH'; +interface GeoSearchStoreOptions extends GeoSearchOptions { + STOREDIST?: true; +} +export declare function transformArguments(destination: RedisCommandArgument, source: RedisCommandArgument, from: GeoSearchFrom, by: GeoSearchBy, options?: GeoSearchStoreOptions): RedisCommandArguments; +export declare function transformReply(reply: number): number; diff --git a/node_modules/@redis/client/dist/lib/commands/GEOSEARCHSTORE.js b/node_modules/@redis/client/dist/lib/commands/GEOSEARCHSTORE.js new file mode 100644 index 0000000..da510e6 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEOSEARCHSTORE.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +var GEOSEARCH_1 = require("./GEOSEARCH"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return GEOSEARCH_1.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return GEOSEARCH_1.IS_READ_ONLY; } }); +function transformArguments(destination, source, from, by, options) { + const args = (0, generic_transformers_1.pushGeoSearchArguments)(['GEOSEARCHSTORE', destination], source, from, by, options); + if (options?.STOREDIST) { + args.push('STOREDIST'); + } + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + if (typeof reply !== 'number') { + throw new TypeError(`https://github.com/redis/redis/issues/9261`); + } + return reply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/GEOSEARCH_WITH.d.ts b/node_modules/@redis/client/dist/lib/commands/GEOSEARCH_WITH.d.ts new file mode 100644 index 0000000..852416d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEOSEARCH_WITH.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { GeoSearchFrom, GeoSearchBy, GeoReplyWith, GeoSearchOptions } from './generic-transformers'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './GEOSEARCH'; +export declare function transformArguments(key: RedisCommandArgument, from: GeoSearchFrom, by: GeoSearchBy, replyWith: Array, options?: GeoSearchOptions): RedisCommandArguments; +export { transformGeoMembersWithReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/GEOSEARCH_WITH.js b/node_modules/@redis/client/dist/lib/commands/GEOSEARCH_WITH.js new file mode 100644 index 0000000..3fb418f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GEOSEARCH_WITH.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const GEOSEARCH_1 = require("./GEOSEARCH"); +var GEOSEARCH_2 = require("./GEOSEARCH"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return GEOSEARCH_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return GEOSEARCH_2.IS_READ_ONLY; } }); +function transformArguments(key, from, by, replyWith, options) { + const args = (0, GEOSEARCH_1.transformArguments)(key, from, by, options); + args.push(...replyWith); + args.preserve = replyWith; + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformGeoMembersWithReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/GET.d.ts b/node_modules/@redis/client/dist/lib/commands/GET.d.ts new file mode 100644 index 0000000..ac0d54c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GET.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/GET.js b/node_modules/@redis/client/dist/lib/commands/GET.js new file mode 100644 index 0000000..8bb9de2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GET.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['GET', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GETBIT.d.ts b/node_modules/@redis/client/dist/lib/commands/GETBIT.d.ts new file mode 100644 index 0000000..6eaeaf5 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GETBIT.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { BitValue } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, offset: number): RedisCommandArguments; +export declare function transformReply(): BitValue; diff --git a/node_modules/@redis/client/dist/lib/commands/GETBIT.js b/node_modules/@redis/client/dist/lib/commands/GETBIT.js new file mode 100644 index 0000000..9ad7b33 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GETBIT.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, offset) { + return ['GETBIT', key, offset.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GETDEL.d.ts b/node_modules/@redis/client/dist/lib/commands/GETDEL.d.ts new file mode 100644 index 0000000..15f87fd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GETDEL.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/GETDEL.js b/node_modules/@redis/client/dist/lib/commands/GETDEL.js new file mode 100644 index 0000000..cd80218 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GETDEL.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['GETDEL', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GETEX.d.ts b/node_modules/@redis/client/dist/lib/commands/GETEX.d.ts new file mode 100644 index 0000000..7d7f1f4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GETEX.d.ts @@ -0,0 +1,16 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +type GetExModes = { + EX: number; +} | { + PX: number; +} | { + EXAT: number | Date; +} | { + PXAT: number | Date; +} | { + PERSIST: true; +}; +export declare function transformArguments(key: RedisCommandArgument, mode: GetExModes): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/GETEX.js b/node_modules/@redis/client/dist/lib/commands/GETEX.js new file mode 100644 index 0000000..897c824 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GETEX.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, mode) { + const args = ['GETEX', key]; + if ('EX' in mode) { + args.push('EX', mode.EX.toString()); + } + else if ('PX' in mode) { + args.push('PX', mode.PX.toString()); + } + else if ('EXAT' in mode) { + args.push('EXAT', (0, generic_transformers_1.transformEXAT)(mode.EXAT)); + } + else if ('PXAT' in mode) { + args.push('PXAT', (0, generic_transformers_1.transformPXAT)(mode.PXAT)); + } + else { // PERSIST + args.push('PERSIST'); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GETRANGE.d.ts b/node_modules/@redis/client/dist/lib/commands/GETRANGE.d.ts new file mode 100644 index 0000000..c28240e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GETRANGE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, start: number, end: number): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/GETRANGE.js b/node_modules/@redis/client/dist/lib/commands/GETRANGE.js new file mode 100644 index 0000000..32e1527 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GETRANGE.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, start, end) { + return ['GETRANGE', key, start.toString(), end.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/GETSET.d.ts b/node_modules/@redis/client/dist/lib/commands/GETSET.d.ts new file mode 100644 index 0000000..c2aad11 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GETSET.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, value: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/GETSET.js b/node_modules/@redis/client/dist/lib/commands/GETSET.js new file mode 100644 index 0000000..972d126 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/GETSET.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, value) { + return ['GETSET', key, value]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HDEL.d.ts b/node_modules/@redis/client/dist/lib/commands/HDEL.d.ts new file mode 100644 index 0000000..b014f91 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HDEL.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, field: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/HDEL.js b/node_modules/@redis/client/dist/lib/commands/HDEL.js new file mode 100644 index 0000000..199d9d0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HDEL.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, field) { + return (0, generic_transformers_1.pushVerdictArguments)(['HDEL', key], field); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HELLO.d.ts b/node_modules/@redis/client/dist/lib/commands/HELLO.d.ts new file mode 100644 index 0000000..39d7058 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HELLO.d.ts @@ -0,0 +1,35 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { AuthOptions } from './AUTH'; +interface HelloOptions { + protover: number; + auth?: Required; + clientName?: string; +} +export declare function transformArguments(options?: HelloOptions): RedisCommandArguments; +type HelloRawReply = [ + _: never, + server: RedisCommandArgument, + _: never, + version: RedisCommandArgument, + _: never, + proto: number, + _: never, + id: number, + _: never, + mode: RedisCommandArgument, + _: never, + role: RedisCommandArgument, + _: never, + modules: Array +]; +interface HelloTransformedReply { + server: RedisCommandArgument; + version: RedisCommandArgument; + proto: number; + id: number; + mode: RedisCommandArgument; + role: RedisCommandArgument; + modules: Array; +} +export declare function transformReply(reply: HelloRawReply): HelloTransformedReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/HELLO.js b/node_modules/@redis/client/dist/lib/commands/HELLO.js new file mode 100644 index 0000000..b16d4ed --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HELLO.js @@ -0,0 +1,29 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments(options) { + const args = ['HELLO']; + if (options) { + args.push(options.protover.toString()); + if (options.auth) { + args.push('AUTH', options.auth.username, options.auth.password); + } + if (options.clientName) { + args.push('SETNAME', options.clientName); + } + } + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + server: reply[1], + version: reply[3], + proto: reply[5], + id: reply[7], + mode: reply[9], + role: reply[11], + modules: reply[13] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/HEXISTS.d.ts b/node_modules/@redis/client/dist/lib/commands/HEXISTS.d.ts new file mode 100644 index 0000000..1a9fd57 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HEXISTS.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, field: RedisCommandArgument): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/HEXISTS.js b/node_modules/@redis/client/dist/lib/commands/HEXISTS.js new file mode 100644 index 0000000..f659dab --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HEXISTS.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, field) { + return ['HEXISTS', key, field]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/HEXPIRE.d.ts b/node_modules/@redis/client/dist/lib/commands/HEXPIRE.d.ts new file mode 100644 index 0000000..5e7aab1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HEXPIRE.d.ts @@ -0,0 +1,23 @@ +import { RedisCommandArgument } from '.'; +/** + * @readonly + * @enum {number} + */ +export declare const HASH_EXPIRATION: { + /** @property {number} */ + /** The field does not exist */ + readonly FIELD_NOT_EXISTS: -2; + /** @property {number} */ + /** Specified NX | XX | GT | LT condition not met */ + readonly CONDITION_NOT_MET: 0; + /** @property {number} */ + /** Expiration time was set or updated */ + readonly UPDATED: 1; + /** @property {number} */ + /** Field deleted because the specified expiration time is in the past */ + readonly DELETED: 2; +}; +export type HashExpiration = typeof HASH_EXPIRATION[keyof typeof HASH_EXPIRATION]; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, fields: RedisCommandArgument | Array, seconds: number, mode?: 'NX' | 'XX' | 'GT' | 'LT'): import(".").RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/HEXPIRE.js b/node_modules/@redis/client/dist/lib/commands/HEXPIRE.js new file mode 100644 index 0000000..c6657d3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HEXPIRE.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = exports.HASH_EXPIRATION = void 0; +const generic_transformers_1 = require("./generic-transformers"); +/** + * @readonly + * @enum {number} + */ +exports.HASH_EXPIRATION = { + /** @property {number} */ + /** The field does not exist */ + FIELD_NOT_EXISTS: -2, + /** @property {number} */ + /** Specified NX | XX | GT | LT condition not met */ + CONDITION_NOT_MET: 0, + /** @property {number} */ + /** Expiration time was set or updated */ + UPDATED: 1, + /** @property {number} */ + /** Field deleted because the specified expiration time is in the past */ + DELETED: 2 +}; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, fields, seconds, mode) { + const args = ['HEXPIRE', key, seconds.toString()]; + if (mode) { + args.push(mode); + } + args.push('FIELDS'); + return (0, generic_transformers_1.pushVerdictArgument)(args, fields); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HEXPIREAT.d.ts b/node_modules/@redis/client/dist/lib/commands/HEXPIREAT.d.ts new file mode 100644 index 0000000..f7e2764 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HEXPIREAT.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument } from '.'; +import { HashExpiration } from './HEXPIRE'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, fields: RedisCommandArgument | Array, timestamp: number | Date, mode?: 'NX' | 'XX' | 'GT' | 'LT'): import(".").RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/HEXPIREAT.js b/node_modules/@redis/client/dist/lib/commands/HEXPIREAT.js new file mode 100644 index 0000000..a124a91 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HEXPIREAT.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, fields, timestamp, mode) { + const args = [ + 'HEXPIREAT', + key, + (0, generic_transformers_1.transformEXAT)(timestamp) + ]; + if (mode) { + args.push(mode); + } + args.push('FIELDS'); + return (0, generic_transformers_1.pushVerdictArgument)(args, fields); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HEXPIRETIME.d.ts b/node_modules/@redis/client/dist/lib/commands/HEXPIRETIME.d.ts new file mode 100644 index 0000000..a129d25 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HEXPIRETIME.d.ts @@ -0,0 +1,13 @@ +import { RedisCommandArgument } from '.'; +export declare const HASH_EXPIRATION_TIME: { + /** @property {number} */ + /** The field does not exist */ + readonly FIELD_NOT_EXISTS: -2; + /** @property {number} */ + /** The field exists but has no associated expire */ + readonly NO_EXPIRATION: -1; +}; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, fields: RedisCommandArgument | Array): import(".").RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/HEXPIRETIME.js b/node_modules/@redis/client/dist/lib/commands/HEXPIRETIME.js new file mode 100644 index 0000000..37cabd1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HEXPIRETIME.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = exports.HASH_EXPIRATION_TIME = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.HASH_EXPIRATION_TIME = { + /** @property {number} */ + /** The field does not exist */ + FIELD_NOT_EXISTS: -2, + /** @property {number} */ + /** The field exists but has no associated expire */ + NO_EXPIRATION: -1, +}; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, fields) { + return (0, generic_transformers_1.pushVerdictArgument)(['HEXPIRETIME', key, 'FIELDS'], fields); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HGET.d.ts b/node_modules/@redis/client/dist/lib/commands/HGET.d.ts new file mode 100644 index 0000000..09f0f49 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HGET.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, field: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | undefined; diff --git a/node_modules/@redis/client/dist/lib/commands/HGET.js b/node_modules/@redis/client/dist/lib/commands/HGET.js new file mode 100644 index 0000000..e45af1f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HGET.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, field) { + return ['HGET', key, field]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HGETALL.d.ts b/node_modules/@redis/client/dist/lib/commands/HGETALL.d.ts new file mode 100644 index 0000000..6e37bdd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HGETALL.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare const TRANSFORM_LEGACY_REPLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export { transformTuplesReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/HGETALL.js b/node_modules/@redis/client/dist/lib/commands/HGETALL.js new file mode 100644 index 0000000..871ea00 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HGETALL.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.TRANSFORM_LEGACY_REPLY = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +exports.TRANSFORM_LEGACY_REPLY = true; +function transformArguments(key) { + return ['HGETALL', key]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformTuplesReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/HINCRBY.d.ts b/node_modules/@redis/client/dist/lib/commands/HINCRBY.d.ts new file mode 100644 index 0000000..ba2e705 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HINCRBY.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, field: RedisCommandArgument, increment: number): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/HINCRBY.js b/node_modules/@redis/client/dist/lib/commands/HINCRBY.js new file mode 100644 index 0000000..a47a501 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HINCRBY.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, field, increment) { + return ['HINCRBY', key, field, increment.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HINCRBYFLOAT.d.ts b/node_modules/@redis/client/dist/lib/commands/HINCRBYFLOAT.d.ts new file mode 100644 index 0000000..ba2e705 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HINCRBYFLOAT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, field: RedisCommandArgument, increment: number): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/HINCRBYFLOAT.js b/node_modules/@redis/client/dist/lib/commands/HINCRBYFLOAT.js new file mode 100644 index 0000000..dca7e8f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HINCRBYFLOAT.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, field, increment) { + return ['HINCRBYFLOAT', key, field, increment.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HKEYS.d.ts b/node_modules/@redis/client/dist/lib/commands/HKEYS.d.ts new file mode 100644 index 0000000..b5c1c09 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HKEYS.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/HKEYS.js b/node_modules/@redis/client/dist/lib/commands/HKEYS.js new file mode 100644 index 0000000..e3da594 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HKEYS.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['HKEYS', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HLEN.d.ts b/node_modules/@redis/client/dist/lib/commands/HLEN.d.ts new file mode 100644 index 0000000..c35a19c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HLEN.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/HLEN.js b/node_modules/@redis/client/dist/lib/commands/HLEN.js new file mode 100644 index 0000000..76687ff --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HLEN.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['HLEN', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HMGET.d.ts b/node_modules/@redis/client/dist/lib/commands/HMGET.d.ts new file mode 100644 index 0000000..26b7c7c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HMGET.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, fields: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/HMGET.js b/node_modules/@redis/client/dist/lib/commands/HMGET.js new file mode 100644 index 0000000..2116543 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HMGET.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, fields) { + return (0, generic_transformers_1.pushVerdictArguments)(['HMGET', key], fields); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HPERSIST.d.ts b/node_modules/@redis/client/dist/lib/commands/HPERSIST.d.ts new file mode 100644 index 0000000..336a4d2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HPERSIST.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, fields: RedisCommandArgument | Array): import(".").RedisCommandArguments; +export declare function transformReply(): Array | null; diff --git a/node_modules/@redis/client/dist/lib/commands/HPERSIST.js b/node_modules/@redis/client/dist/lib/commands/HPERSIST.js new file mode 100644 index 0000000..f2eb929 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HPERSIST.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, fields) { + return (0, generic_transformers_1.pushVerdictArgument)(['HPERSIST', key, 'FIELDS'], fields); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HPEXPIRE.d.ts b/node_modules/@redis/client/dist/lib/commands/HPEXPIRE.d.ts new file mode 100644 index 0000000..050ee87 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HPEXPIRE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument } from '.'; +import { HashExpiration } from "./HEXPIRE"; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, fields: RedisCommandArgument | Array, ms: number, mode?: 'NX' | 'XX' | 'GT' | 'LT'): import(".").RedisCommandArguments; +export declare function transformReply(): Array | null; diff --git a/node_modules/@redis/client/dist/lib/commands/HPEXPIRE.js b/node_modules/@redis/client/dist/lib/commands/HPEXPIRE.js new file mode 100644 index 0000000..a83ce3b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HPEXPIRE.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, fields, ms, mode) { + const args = ['HPEXPIRE', key, ms.toString()]; + if (mode) { + args.push(mode); + } + args.push('FIELDS'); + return (0, generic_transformers_1.pushVerdictArgument)(args, fields); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HPEXPIREAT.d.ts b/node_modules/@redis/client/dist/lib/commands/HPEXPIREAT.d.ts new file mode 100644 index 0000000..a839ffc --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HPEXPIREAT.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument } from '.'; +import { HashExpiration } from './HEXPIRE'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, fields: RedisCommandArgument | Array, timestamp: number | Date, mode?: 'NX' | 'XX' | 'GT' | 'LT'): import(".").RedisCommandArguments; +export declare function transformReply(): Array | null; diff --git a/node_modules/@redis/client/dist/lib/commands/HPEXPIREAT.js b/node_modules/@redis/client/dist/lib/commands/HPEXPIREAT.js new file mode 100644 index 0000000..6947263 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HPEXPIREAT.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, fields, timestamp, mode) { + const args = ['HPEXPIREAT', key, (0, generic_transformers_1.transformPXAT)(timestamp)]; + if (mode) { + args.push(mode); + } + args.push('FIELDS'); + return (0, generic_transformers_1.pushVerdictArgument)(args, fields); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HPEXPIRETIME.d.ts b/node_modules/@redis/client/dist/lib/commands/HPEXPIRETIME.d.ts new file mode 100644 index 0000000..f04cffd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HPEXPIRETIME.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, fields: RedisCommandArgument | Array): import(".").RedisCommandArguments; +export declare function transformReply(): Array | null; diff --git a/node_modules/@redis/client/dist/lib/commands/HPEXPIRETIME.js b/node_modules/@redis/client/dist/lib/commands/HPEXPIRETIME.js new file mode 100644 index 0000000..bf13ff1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HPEXPIRETIME.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, fields) { + return (0, generic_transformers_1.pushVerdictArgument)(['HPEXPIRETIME', key, 'FIELDS'], fields); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HPTTL.d.ts b/node_modules/@redis/client/dist/lib/commands/HPTTL.d.ts new file mode 100644 index 0000000..f04cffd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HPTTL.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, fields: RedisCommandArgument | Array): import(".").RedisCommandArguments; +export declare function transformReply(): Array | null; diff --git a/node_modules/@redis/client/dist/lib/commands/HPTTL.js b/node_modules/@redis/client/dist/lib/commands/HPTTL.js new file mode 100644 index 0000000..71906b1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HPTTL.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, fields) { + return (0, generic_transformers_1.pushVerdictArgument)(['HPTTL', key, 'FIELDS'], fields); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HRANDFIELD.d.ts b/node_modules/@redis/client/dist/lib/commands/HRANDFIELD.d.ts new file mode 100644 index 0000000..ac0d54c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HRANDFIELD.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/HRANDFIELD.js b/node_modules/@redis/client/dist/lib/commands/HRANDFIELD.js new file mode 100644 index 0000000..d242aef --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HRANDFIELD.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['HRANDFIELD', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT.d.ts new file mode 100644 index 0000000..ddfa26b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './HRANDFIELD'; +export declare function transformArguments(key: RedisCommandArgument, count: number): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT.js b/node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT.js new file mode 100644 index 0000000..dfb13aa --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const HRANDFIELD_1 = require("./HRANDFIELD"); +var HRANDFIELD_2 = require("./HRANDFIELD"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return HRANDFIELD_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return HRANDFIELD_2.IS_READ_ONLY; } }); +function transformArguments(key, count) { + return [ + ...(0, HRANDFIELD_1.transformArguments)(key), + count.toString() + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT_WITHVALUES.d.ts b/node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT_WITHVALUES.d.ts new file mode 100644 index 0000000..d18f62c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT_WITHVALUES.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './HRANDFIELD_COUNT'; +export declare function transformArguments(key: RedisCommandArgument, count: number): RedisCommandArguments; +export { transformTuplesReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT_WITHVALUES.js b/node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT_WITHVALUES.js new file mode 100644 index 0000000..24556d2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HRANDFIELD_COUNT_WITHVALUES.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const HRANDFIELD_COUNT_1 = require("./HRANDFIELD_COUNT"); +var HRANDFIELD_COUNT_2 = require("./HRANDFIELD_COUNT"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return HRANDFIELD_COUNT_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return HRANDFIELD_COUNT_2.IS_READ_ONLY; } }); +function transformArguments(key, count) { + return [ + ...(0, HRANDFIELD_COUNT_1.transformArguments)(key, count), + 'WITHVALUES' + ]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformTuplesReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/HSCAN.d.ts b/node_modules/@redis/client/dist/lib/commands/HSCAN.d.ts new file mode 100644 index 0000000..4d4447e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HSCAN.d.ts @@ -0,0 +1,16 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { ScanOptions } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, cursor: number, options?: ScanOptions): RedisCommandArguments; +export type HScanRawReply = [RedisCommandArgument, Array]; +export interface HScanTuple { + field: RedisCommandArgument; + value: RedisCommandArgument; +} +interface HScanReply { + cursor: number; + tuples: Array; +} +export declare function transformReply([cursor, rawTuples]: HScanRawReply): HScanReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/HSCAN.js b/node_modules/@redis/client/dist/lib/commands/HSCAN.js new file mode 100644 index 0000000..5930408 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HSCAN.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, cursor, options) { + return (0, generic_transformers_1.pushScanArguments)([ + 'HSCAN', + key + ], cursor, options); +} +exports.transformArguments = transformArguments; +function transformReply([cursor, rawTuples]) { + const parsedTuples = []; + for (let i = 0; i < rawTuples.length; i += 2) { + parsedTuples.push({ + field: rawTuples[i], + value: rawTuples[i + 1] + }); + } + return { + cursor: Number(cursor), + tuples: parsedTuples + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/HSCAN_NOVALUES.d.ts b/node_modules/@redis/client/dist/lib/commands/HSCAN_NOVALUES.d.ts new file mode 100644 index 0000000..196df8c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HSCAN_NOVALUES.d.ts @@ -0,0 +1,10 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { ScanOptions } from './generic-transformers'; +import { HScanRawReply } from './HSCAN'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './HSCAN'; +export declare function transformArguments(key: RedisCommandArgument, cursor: number, options?: ScanOptions): RedisCommandArguments; +interface HScanNoValuesReply { + cursor: number; + keys: Array; +} +export declare function transformReply([cursor, rawData]: HScanRawReply): HScanNoValuesReply; diff --git a/node_modules/@redis/client/dist/lib/commands/HSCAN_NOVALUES.js b/node_modules/@redis/client/dist/lib/commands/HSCAN_NOVALUES.js new file mode 100644 index 0000000..68db4ce --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HSCAN_NOVALUES.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const HSCAN_1 = require("./HSCAN"); +var HSCAN_2 = require("./HSCAN"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return HSCAN_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return HSCAN_2.IS_READ_ONLY; } }); +function transformArguments(key, cursor, options) { + const args = (0, HSCAN_1.transformArguments)(key, cursor, options); + args.push('NOVALUES'); + return args; +} +exports.transformArguments = transformArguments; +function transformReply([cursor, rawData]) { + return { + cursor: Number(cursor), + keys: rawData + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/HSET.d.ts b/node_modules/@redis/client/dist/lib/commands/HSET.d.ts new file mode 100644 index 0000000..7208ffe --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HSET.d.ts @@ -0,0 +1,12 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +type Types = RedisCommandArgument | number; +type HSETObject = Record; +type HSETMap = Map; +type HSETTuples = Array<[Types, Types]> | Array; +type GenericArguments = [key: RedisCommandArgument]; +type SingleFieldArguments = [...generic: GenericArguments, field: Types, value: Types]; +type MultipleFieldsArguments = [...generic: GenericArguments, value: HSETObject | HSETMap | HSETTuples]; +export declare function transformArguments(...[key, value, fieldValue]: SingleFieldArguments | MultipleFieldsArguments): RedisCommandArguments; +export declare function transformReply(): number; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/HSET.js b/node_modules/@redis/client/dist/lib/commands/HSET.js new file mode 100644 index 0000000..cb2b735 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HSET.js @@ -0,0 +1,45 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(...[key, value, fieldValue]) { + const args = ['HSET', key]; + if (typeof value === 'string' || typeof value === 'number' || Buffer.isBuffer(value)) { + args.push(convertValue(value), convertValue(fieldValue)); + } + else if (value instanceof Map) { + pushMap(args, value); + } + else if (Array.isArray(value)) { + pushTuples(args, value); + } + else { + pushObject(args, value); + } + return args; +} +exports.transformArguments = transformArguments; +function pushMap(args, map) { + for (const [key, value] of map.entries()) { + args.push(convertValue(key), convertValue(value)); + } +} +function pushTuples(args, tuples) { + for (const tuple of tuples) { + if (Array.isArray(tuple)) { + pushTuples(args, tuple); + continue; + } + args.push(convertValue(tuple)); + } +} +function pushObject(args, object) { + for (const key of Object.keys(object)) { + args.push(convertValue(key), convertValue(object[key])); + } +} +function convertValue(value) { + return typeof value === 'number' ? + value.toString() : + value; +} diff --git a/node_modules/@redis/client/dist/lib/commands/HSETNX.d.ts b/node_modules/@redis/client/dist/lib/commands/HSETNX.d.ts new file mode 100644 index 0000000..8c115a0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HSETNX.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, field: RedisCommandArgument, value: RedisCommandArgument): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/HSETNX.js b/node_modules/@redis/client/dist/lib/commands/HSETNX.js new file mode 100644 index 0000000..c450fc4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HSETNX.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, field, value) { + return ['HSETNX', key, field, value]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/HSTRLEN.d.ts b/node_modules/@redis/client/dist/lib/commands/HSTRLEN.d.ts new file mode 100644 index 0000000..cdffa17 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HSTRLEN.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, field: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/HSTRLEN.js b/node_modules/@redis/client/dist/lib/commands/HSTRLEN.js new file mode 100644 index 0000000..307213b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HSTRLEN.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, field) { + return ['HSTRLEN', key, field]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HTTL.d.ts b/node_modules/@redis/client/dist/lib/commands/HTTL.d.ts new file mode 100644 index 0000000..f04cffd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HTTL.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, fields: RedisCommandArgument | Array): import(".").RedisCommandArguments; +export declare function transformReply(): Array | null; diff --git a/node_modules/@redis/client/dist/lib/commands/HTTL.js b/node_modules/@redis/client/dist/lib/commands/HTTL.js new file mode 100644 index 0000000..3c93d1c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HTTL.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, fields) { + return (0, generic_transformers_1.pushVerdictArgument)(['HTTL', key, 'FIELDS'], fields); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/HVALS.d.ts b/node_modules/@redis/client/dist/lib/commands/HVALS.d.ts new file mode 100644 index 0000000..b5c1c09 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HVALS.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/HVALS.js b/node_modules/@redis/client/dist/lib/commands/HVALS.js new file mode 100644 index 0000000..f279fa1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/HVALS.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['HVALS', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/INCR.d.ts b/node_modules/@redis/client/dist/lib/commands/INCR.d.ts new file mode 100644 index 0000000..c35a19c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/INCR.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/INCR.js b/node_modules/@redis/client/dist/lib/commands/INCR.js new file mode 100644 index 0000000..1364aac --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/INCR.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['INCR', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/INCRBY.d.ts b/node_modules/@redis/client/dist/lib/commands/INCRBY.d.ts new file mode 100644 index 0000000..2ae5b5a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/INCRBY.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, increment: number): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/INCRBY.js b/node_modules/@redis/client/dist/lib/commands/INCRBY.js new file mode 100644 index 0000000..4ecbab2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/INCRBY.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, increment) { + return ['INCRBY', key, increment.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/INCRBYFLOAT.d.ts b/node_modules/@redis/client/dist/lib/commands/INCRBYFLOAT.d.ts new file mode 100644 index 0000000..369c0f6 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/INCRBYFLOAT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, increment: number): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/INCRBYFLOAT.js b/node_modules/@redis/client/dist/lib/commands/INCRBYFLOAT.js new file mode 100644 index 0000000..33d6bb2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/INCRBYFLOAT.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, increment) { + return ['INCRBYFLOAT', key, increment.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/INFO.d.ts b/node_modules/@redis/client/dist/lib/commands/INFO.d.ts new file mode 100644 index 0000000..8bec82b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/INFO.d.ts @@ -0,0 +1,3 @@ +export declare const IS_READ_ONLY = true; +export declare function transformArguments(section?: string): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/INFO.js b/node_modules/@redis/client/dist/lib/commands/INFO.js new file mode 100644 index 0000000..0711467 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/INFO.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments(section) { + const args = ['INFO']; + if (section) { + args.push(section); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/KEYS.d.ts b/node_modules/@redis/client/dist/lib/commands/KEYS.d.ts new file mode 100644 index 0000000..afb77fe --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/KEYS.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(pattern: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/KEYS.js b/node_modules/@redis/client/dist/lib/commands/KEYS.js new file mode 100644 index 0000000..84a8b33 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/KEYS.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(pattern) { + return ['KEYS', pattern]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LASTSAVE.d.ts b/node_modules/@redis/client/dist/lib/commands/LASTSAVE.d.ts new file mode 100644 index 0000000..6da05a0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LASTSAVE.d.ts @@ -0,0 +1,3 @@ +export declare const IS_READ_ONLY = true; +export declare function transformArguments(): Array; +export declare function transformReply(reply: number): Date; diff --git a/node_modules/@redis/client/dist/lib/commands/LASTSAVE.js b/node_modules/@redis/client/dist/lib/commands/LASTSAVE.js new file mode 100644 index 0000000..c22924f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LASTSAVE.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments() { + return ['LASTSAVE']; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return new Date(reply); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/LATENCY_DOCTOR.d.ts b/node_modules/@redis/client/dist/lib/commands/LATENCY_DOCTOR.d.ts new file mode 100644 index 0000000..f7ef9c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LATENCY_DOCTOR.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/LATENCY_DOCTOR.js b/node_modules/@redis/client/dist/lib/commands/LATENCY_DOCTOR.js new file mode 100644 index 0000000..426c33d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LATENCY_DOCTOR.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['LATENCY', 'DOCTOR']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LATENCY_GRAPH.d.ts b/node_modules/@redis/client/dist/lib/commands/LATENCY_GRAPH.d.ts new file mode 100644 index 0000000..60491ba --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LATENCY_GRAPH.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArguments } from '.'; +export type EventType = 'active-defrag-cycle' | 'aof-fsync-always' | 'aof-stat' | 'aof-rewrite-diff-write' | 'aof-rename' | 'aof-write' | 'aof-write-active-child' | 'aof-write-alone' | 'aof-write-pending-fsync' | 'command' | 'expire-cycle' | 'eviction-cycle' | 'eviction-del' | 'fast-command' | 'fork' | 'rdb-unlink-temp-file'; +export declare function transformArguments(event: EventType): RedisCommandArguments; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/LATENCY_GRAPH.js b/node_modules/@redis/client/dist/lib/commands/LATENCY_GRAPH.js new file mode 100644 index 0000000..1786777 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LATENCY_GRAPH.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(event) { + return ['LATENCY', 'GRAPH', event]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LATENCY_HISTORY.d.ts b/node_modules/@redis/client/dist/lib/commands/LATENCY_HISTORY.d.ts new file mode 100644 index 0000000..c9412d4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LATENCY_HISTORY.d.ts @@ -0,0 +1,6 @@ +export type EventType = ('active-defrag-cycle' | 'aof-fsync-always' | 'aof-stat' | 'aof-rewrite-diff-write' | 'aof-rename' | 'aof-write' | 'aof-write-active-child' | 'aof-write-alone' | 'aof-write-pending-fsync' | 'command' | 'expire-cycle' | 'eviction-cycle' | 'eviction-del' | 'fast-command' | 'fork' | 'rdb-unlink-temp-file'); +export declare function transformArguments(event: EventType): string[]; +export declare function transformReply(): Array<[ + timestamp: number, + latency: number +]>; diff --git a/node_modules/@redis/client/dist/lib/commands/LATENCY_HISTORY.js b/node_modules/@redis/client/dist/lib/commands/LATENCY_HISTORY.js new file mode 100644 index 0000000..063da3e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LATENCY_HISTORY.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(event) { + return ['LATENCY', 'HISTORY', event]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LATENCY_LATEST.d.ts b/node_modules/@redis/client/dist/lib/commands/LATENCY_LATEST.d.ts new file mode 100644 index 0000000..f3f12b1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LATENCY_LATEST.d.ts @@ -0,0 +1,8 @@ +import { RedisCommandArguments } from '.'; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): Array<[ + name: string, + timestamp: number, + latestLatency: number, + allTimeLatency: number +]>; diff --git a/node_modules/@redis/client/dist/lib/commands/LATENCY_LATEST.js b/node_modules/@redis/client/dist/lib/commands/LATENCY_LATEST.js new file mode 100644 index 0000000..98f6733 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LATENCY_LATEST.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['LATENCY', 'LATEST']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LCS.d.ts b/node_modules/@redis/client/dist/lib/commands/LCS.d.ts new file mode 100644 index 0000000..3d9a628 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LCS.d.ts @@ -0,0 +1,6 @@ +/// +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key1: RedisCommandArgument, key2: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): string | Buffer; diff --git a/node_modules/@redis/client/dist/lib/commands/LCS.js b/node_modules/@redis/client/dist/lib/commands/LCS.js new file mode 100644 index 0000000..b770199 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LCS.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key1, key2) { + return [ + 'LCS', + key1, + key2 + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LCS_IDX.d.ts b/node_modules/@redis/client/dist/lib/commands/LCS_IDX.d.ts new file mode 100644 index 0000000..1604e6e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LCS_IDX.d.ts @@ -0,0 +1,21 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { RangeReply, RawRangeReply } from './generic-transformers'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './LCS'; +export declare function transformArguments(key1: RedisCommandArgument, key2: RedisCommandArgument): RedisCommandArguments; +type RawReply = [ + 'matches', + Array<[ + key1: RawRangeReply, + key2: RawRangeReply + ]>, + 'len', + number +]; +interface Reply { + matches: Array<{ + key1: RangeReply; + key2: RangeReply; + }>; + length: number; +} +export declare function transformReply(reply: RawReply): Reply; diff --git a/node_modules/@redis/client/dist/lib/commands/LCS_IDX.js b/node_modules/@redis/client/dist/lib/commands/LCS_IDX.js new file mode 100644 index 0000000..c38132a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LCS_IDX.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +const LCS_1 = require("./LCS"); +var LCS_2 = require("./LCS"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return LCS_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return LCS_2.IS_READ_ONLY; } }); +function transformArguments(key1, key2) { + const args = (0, LCS_1.transformArguments)(key1, key2); + args.push('IDX'); + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + matches: reply[1].map(([key1, key2]) => ({ + key1: (0, generic_transformers_1.transformRangeReply)(key1), + key2: (0, generic_transformers_1.transformRangeReply)(key2) + })), + length: reply[3] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/LCS_IDX_WITHMATCHLEN.d.ts b/node_modules/@redis/client/dist/lib/commands/LCS_IDX_WITHMATCHLEN.d.ts new file mode 100644 index 0000000..89208da --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LCS_IDX_WITHMATCHLEN.d.ts @@ -0,0 +1,23 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { RangeReply, RawRangeReply } from './generic-transformers'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './LCS'; +export declare function transformArguments(key1: RedisCommandArgument, key2: RedisCommandArgument): RedisCommandArguments; +type RawReply = [ + 'matches', + Array<[ + key1: RawRangeReply, + key2: RawRangeReply, + length: number + ]>, + 'len', + number +]; +interface Reply { + matches: Array<{ + key1: RangeReply; + key2: RangeReply; + length: number; + }>; + length: number; +} +export declare function transformReply(reply: RawReply): Reply; diff --git a/node_modules/@redis/client/dist/lib/commands/LCS_IDX_WITHMATCHLEN.js b/node_modules/@redis/client/dist/lib/commands/LCS_IDX_WITHMATCHLEN.js new file mode 100644 index 0000000..e031d5e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LCS_IDX_WITHMATCHLEN.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +const LCS_1 = require("./LCS"); +var LCS_2 = require("./LCS"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return LCS_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return LCS_2.IS_READ_ONLY; } }); +function transformArguments(key1, key2) { + const args = (0, LCS_1.transformArguments)(key1, key2); + args.push('IDX', 'WITHMATCHLEN'); + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + matches: reply[1].map(([key1, key2, length]) => ({ + key1: (0, generic_transformers_1.transformRangeReply)(key1), + key2: (0, generic_transformers_1.transformRangeReply)(key2), + length + })), + length: reply[3] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/LCS_LEN.d.ts b/node_modules/@redis/client/dist/lib/commands/LCS_LEN.d.ts new file mode 100644 index 0000000..d8e90e1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LCS_LEN.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './LCS'; +export declare function transformArguments(key1: RedisCommandArgument, key2: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/LCS_LEN.js b/node_modules/@redis/client/dist/lib/commands/LCS_LEN.js new file mode 100644 index 0000000..cba853a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LCS_LEN.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const LCS_1 = require("./LCS"); +var LCS_2 = require("./LCS"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return LCS_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return LCS_2.IS_READ_ONLY; } }); +function transformArguments(key1, key2) { + const args = (0, LCS_1.transformArguments)(key1, key2); + args.push('LEN'); + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LINDEX.d.ts b/node_modules/@redis/client/dist/lib/commands/LINDEX.d.ts new file mode 100644 index 0000000..aa5a007 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LINDEX.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, index: number): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/LINDEX.js b/node_modules/@redis/client/dist/lib/commands/LINDEX.js new file mode 100644 index 0000000..72b52cc --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LINDEX.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, index) { + return ['LINDEX', key, index.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LINSERT.d.ts b/node_modules/@redis/client/dist/lib/commands/LINSERT.d.ts new file mode 100644 index 0000000..0ff1842 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LINSERT.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +type LInsertPosition = 'BEFORE' | 'AFTER'; +export declare function transformArguments(key: RedisCommandArgument, position: LInsertPosition, pivot: RedisCommandArgument, element: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/LINSERT.js b/node_modules/@redis/client/dist/lib/commands/LINSERT.js new file mode 100644 index 0000000..2e5b523 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LINSERT.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, position, pivot, element) { + return [ + 'LINSERT', + key, + position, + pivot, + element + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LLEN.d.ts b/node_modules/@redis/client/dist/lib/commands/LLEN.d.ts new file mode 100644 index 0000000..367e9b8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LLEN.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/LLEN.js b/node_modules/@redis/client/dist/lib/commands/LLEN.js new file mode 100644 index 0000000..2c96584 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LLEN.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['LLEN', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LMOVE.d.ts b/node_modules/@redis/client/dist/lib/commands/LMOVE.d.ts new file mode 100644 index 0000000..6c43ddc --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LMOVE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { ListSide } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(source: RedisCommandArgument, destination: RedisCommandArgument, sourceSide: ListSide, destinationSide: ListSide): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/LMOVE.js b/node_modules/@redis/client/dist/lib/commands/LMOVE.js new file mode 100644 index 0000000..3d44d91 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LMOVE.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(source, destination, sourceSide, destinationSide) { + return [ + 'LMOVE', + source, + destination, + sourceSide, + destinationSide, + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LMPOP.d.ts b/node_modules/@redis/client/dist/lib/commands/LMPOP.d.ts new file mode 100644 index 0000000..e411c3e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LMPOP.d.ts @@ -0,0 +1,8 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { LMPopOptions, ListSide } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 2; +export declare function transformArguments(keys: RedisCommandArgument | Array, side: ListSide, options?: LMPopOptions): RedisCommandArguments; +export declare function transformReply(): null | [ + key: string, + elements: Array +]; diff --git a/node_modules/@redis/client/dist/lib/commands/LMPOP.js b/node_modules/@redis/client/dist/lib/commands/LMPOP.js new file mode 100644 index 0000000..bf2ff9b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LMPOP.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 2; +function transformArguments(keys, side, options) { + return (0, generic_transformers_1.transformLMPopArguments)(['LMPOP'], keys, side, options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LOLWUT.d.ts b/node_modules/@redis/client/dist/lib/commands/LOLWUT.d.ts new file mode 100644 index 0000000..78dd275 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LOLWUT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(version?: number, ...optionalArguments: Array): Array; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/LOLWUT.js b/node_modules/@redis/client/dist/lib/commands/LOLWUT.js new file mode 100644 index 0000000..20d6b61 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LOLWUT.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments(version, ...optionalArguments) { + const args = ['LOLWUT']; + if (version) { + args.push('VERSION', version.toString(), ...optionalArguments.map(String)); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LPOP.d.ts b/node_modules/@redis/client/dist/lib/commands/LPOP.d.ts new file mode 100644 index 0000000..15f87fd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LPOP.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/LPOP.js b/node_modules/@redis/client/dist/lib/commands/LPOP.js new file mode 100644 index 0000000..84c99a4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LPOP.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['LPOP', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LPOP_COUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/LPOP_COUNT.d.ts new file mode 100644 index 0000000..e48bfcb --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LPOP_COUNT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, count: number): RedisCommandArguments; +export declare function transformReply(): Array | null; diff --git a/node_modules/@redis/client/dist/lib/commands/LPOP_COUNT.js b/node_modules/@redis/client/dist/lib/commands/LPOP_COUNT.js new file mode 100644 index 0000000..c865e8e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LPOP_COUNT.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, count) { + return ['LPOP', key, count.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LPOS.d.ts b/node_modules/@redis/client/dist/lib/commands/LPOS.d.ts new file mode 100644 index 0000000..816ba10 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LPOS.d.ts @@ -0,0 +1,9 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export interface LPosOptions { + RANK?: number; + MAXLEN?: number; +} +export declare function transformArguments(key: RedisCommandArgument, element: RedisCommandArgument, options?: LPosOptions): RedisCommandArguments; +export declare function transformReply(): number | null; diff --git a/node_modules/@redis/client/dist/lib/commands/LPOS.js b/node_modules/@redis/client/dist/lib/commands/LPOS.js new file mode 100644 index 0000000..72a47a0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LPOS.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, element, options) { + const args = ['LPOS', key, element]; + if (typeof options?.RANK === 'number') { + args.push('RANK', options.RANK.toString()); + } + if (typeof options?.MAXLEN === 'number') { + args.push('MAXLEN', options.MAXLEN.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LPOS_COUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/LPOS_COUNT.d.ts new file mode 100644 index 0000000..d009566 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LPOS_COUNT.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { LPosOptions } from './LPOS'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './LPOS'; +export declare function transformArguments(key: RedisCommandArgument, element: RedisCommandArgument, count: number, options?: LPosOptions): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/LPOS_COUNT.js b/node_modules/@redis/client/dist/lib/commands/LPOS_COUNT.js new file mode 100644 index 0000000..0616a06 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LPOS_COUNT.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +var LPOS_1 = require("./LPOS"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return LPOS_1.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return LPOS_1.IS_READ_ONLY; } }); +function transformArguments(key, element, count, options) { + const args = ['LPOS', key, element]; + if (typeof options?.RANK === 'number') { + args.push('RANK', options.RANK.toString()); + } + args.push('COUNT', count.toString()); + if (typeof options?.MAXLEN === 'number') { + args.push('MAXLEN', options.MAXLEN.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LPUSH.d.ts b/node_modules/@redis/client/dist/lib/commands/LPUSH.d.ts new file mode 100644 index 0000000..9416327 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LPUSH.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, elements: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/LPUSH.js b/node_modules/@redis/client/dist/lib/commands/LPUSH.js new file mode 100644 index 0000000..1de8108 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LPUSH.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, elements) { + return (0, generic_transformers_1.pushVerdictArguments)(['LPUSH', key], elements); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LPUSHX.d.ts b/node_modules/@redis/client/dist/lib/commands/LPUSHX.d.ts new file mode 100644 index 0000000..ce8878e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LPUSHX.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, element: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/LPUSHX.js b/node_modules/@redis/client/dist/lib/commands/LPUSHX.js new file mode 100644 index 0000000..8a45107 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LPUSHX.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, element) { + return (0, generic_transformers_1.pushVerdictArguments)(['LPUSHX', key], element); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LRANGE.d.ts b/node_modules/@redis/client/dist/lib/commands/LRANGE.d.ts new file mode 100644 index 0000000..4210b9d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LRANGE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, start: number, stop: number): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/LRANGE.js b/node_modules/@redis/client/dist/lib/commands/LRANGE.js new file mode 100644 index 0000000..fc6127c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LRANGE.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, start, stop) { + return [ + 'LRANGE', + key, + start.toString(), + stop.toString() + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LREM.d.ts b/node_modules/@redis/client/dist/lib/commands/LREM.d.ts new file mode 100644 index 0000000..1bf4abc --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LREM.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, count: number, element: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/LREM.js b/node_modules/@redis/client/dist/lib/commands/LREM.js new file mode 100644 index 0000000..b1a7257 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LREM.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, count, element) { + return [ + 'LREM', + key, + count.toString(), + element + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LSET.d.ts b/node_modules/@redis/client/dist/lib/commands/LSET.d.ts new file mode 100644 index 0000000..0e9f263 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LSET.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, index: number, element: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/LSET.js b/node_modules/@redis/client/dist/lib/commands/LSET.js new file mode 100644 index 0000000..132494b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LSET.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, index, element) { + return [ + 'LSET', + key, + index.toString(), + element + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/LTRIM.d.ts b/node_modules/@redis/client/dist/lib/commands/LTRIM.d.ts new file mode 100644 index 0000000..de34a67 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LTRIM.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, start: number, stop: number): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/LTRIM.js b/node_modules/@redis/client/dist/lib/commands/LTRIM.js new file mode 100644 index 0000000..8c3e9ee --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/LTRIM.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, start, stop) { + return [ + 'LTRIM', + key, + start.toString(), + stop.toString() + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/MEMORY_DOCTOR.d.ts b/node_modules/@redis/client/dist/lib/commands/MEMORY_DOCTOR.d.ts new file mode 100644 index 0000000..f7ef9c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MEMORY_DOCTOR.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/MEMORY_DOCTOR.js b/node_modules/@redis/client/dist/lib/commands/MEMORY_DOCTOR.js new file mode 100644 index 0000000..7efd2ea --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MEMORY_DOCTOR.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['MEMORY', 'DOCTOR']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/MEMORY_MALLOC-STATS.d.ts b/node_modules/@redis/client/dist/lib/commands/MEMORY_MALLOC-STATS.d.ts new file mode 100644 index 0000000..f7ef9c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MEMORY_MALLOC-STATS.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/MEMORY_MALLOC-STATS.js b/node_modules/@redis/client/dist/lib/commands/MEMORY_MALLOC-STATS.js new file mode 100644 index 0000000..e314ba2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MEMORY_MALLOC-STATS.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['MEMORY', 'MALLOC-STATS']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/MEMORY_PURGE.d.ts b/node_modules/@redis/client/dist/lib/commands/MEMORY_PURGE.d.ts new file mode 100644 index 0000000..f7ef9c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MEMORY_PURGE.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/MEMORY_PURGE.js b/node_modules/@redis/client/dist/lib/commands/MEMORY_PURGE.js new file mode 100644 index 0000000..c24f1c5 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MEMORY_PURGE.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['MEMORY', 'PURGE']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/MEMORY_STATS.d.ts b/node_modules/@redis/client/dist/lib/commands/MEMORY_STATS.d.ts new file mode 100644 index 0000000..cfaab6e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MEMORY_STATS.d.ts @@ -0,0 +1,36 @@ +export declare function transformArguments(): Array; +interface MemoryStatsReply { + peakAllocated: number; + totalAllocated: number; + startupAllocated: number; + replicationBacklog: number; + clientsReplicas: number; + clientsNormal: number; + aofBuffer: number; + luaCaches: number; + overheadTotal: number; + keysCount: number; + keysBytesPerKey: number; + datasetBytes: number; + datasetPercentage: number; + peakPercentage: number; + allocatorAllocated?: number; + allocatorActive?: number; + allocatorResident?: number; + allocatorFragmentationRatio?: number; + allocatorFragmentationBytes?: number; + allocatorRssRatio?: number; + allocatorRssBytes?: number; + rssOverheadRatio?: number; + rssOverheadBytes?: number; + fragmentation?: number; + fragmentationBytes: number; + db: { + [key: number]: { + overheadHashtableMain: number; + overheadHashtableExpires: number; + }; + }; +} +export declare function transformReply(rawReply: Array>): MemoryStatsReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/MEMORY_STATS.js b/node_modules/@redis/client/dist/lib/commands/MEMORY_STATS.js new file mode 100644 index 0000000..eea30bb --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MEMORY_STATS.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments() { + return ['MEMORY', 'STATS']; +} +exports.transformArguments = transformArguments; +const FIELDS_MAPPING = { + 'peak.allocated': 'peakAllocated', + 'total.allocated': 'totalAllocated', + 'startup.allocated': 'startupAllocated', + 'replication.backlog': 'replicationBacklog', + 'clients.slaves': 'clientsReplicas', + 'clients.normal': 'clientsNormal', + 'aof.buffer': 'aofBuffer', + 'lua.caches': 'luaCaches', + 'overhead.total': 'overheadTotal', + 'keys.count': 'keysCount', + 'keys.bytes-per-key': 'keysBytesPerKey', + 'dataset.bytes': 'datasetBytes', + 'dataset.percentage': 'datasetPercentage', + 'peak.percentage': 'peakPercentage', + 'allocator.allocated': 'allocatorAllocated', + 'allocator.active': 'allocatorActive', + 'allocator.resident': 'allocatorResident', + 'allocator-fragmentation.ratio': 'allocatorFragmentationRatio', + 'allocator-fragmentation.bytes': 'allocatorFragmentationBytes', + 'allocator-rss.ratio': 'allocatorRssRatio', + 'allocator-rss.bytes': 'allocatorRssBytes', + 'rss-overhead.ratio': 'rssOverheadRatio', + 'rss-overhead.bytes': 'rssOverheadBytes', + 'fragmentation': 'fragmentation', + 'fragmentation.bytes': 'fragmentationBytes' +}, DB_FIELDS_MAPPING = { + 'overhead.hashtable.main': 'overheadHashtableMain', + 'overhead.hashtable.expires': 'overheadHashtableExpires' +}; +function transformReply(rawReply) { + const reply = { + db: {} + }; + for (let i = 0; i < rawReply.length; i += 2) { + const key = rawReply[i]; + if (key.startsWith('db.')) { + const dbTuples = rawReply[i + 1], db = {}; + for (let j = 0; j < dbTuples.length; j += 2) { + db[DB_FIELDS_MAPPING[dbTuples[j]]] = dbTuples[j + 1]; + } + reply.db[key.substring(3)] = db; + continue; + } + reply[FIELDS_MAPPING[key]] = Number(rawReply[i + 1]); + } + return reply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/MEMORY_USAGE.d.ts b/node_modules/@redis/client/dist/lib/commands/MEMORY_USAGE.d.ts new file mode 100644 index 0000000..274788a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MEMORY_USAGE.d.ts @@ -0,0 +1,8 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +interface MemoryUsageOptions { + SAMPLES?: number; +} +export declare function transformArguments(key: string, options?: MemoryUsageOptions): Array; +export declare function transformReply(): number | null; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/MEMORY_USAGE.js b/node_modules/@redis/client/dist/lib/commands/MEMORY_USAGE.js new file mode 100644 index 0000000..043d874 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MEMORY_USAGE.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, options) { + const args = ['MEMORY', 'USAGE', key]; + if (options?.SAMPLES) { + args.push('SAMPLES', options.SAMPLES.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/MGET.d.ts b/node_modules/@redis/client/dist/lib/commands/MGET.d.ts new file mode 100644 index 0000000..e657213 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MGET.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(keys: Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/MGET.js b/node_modules/@redis/client/dist/lib/commands/MGET.js new file mode 100644 index 0000000..4ef34d8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MGET.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(keys) { + return ['MGET', ...keys]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/MIGRATE.d.ts b/node_modules/@redis/client/dist/lib/commands/MIGRATE.d.ts new file mode 100644 index 0000000..548ff18 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MIGRATE.d.ts @@ -0,0 +1,10 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { AuthOptions } from './AUTH'; +interface MigrateOptions { + COPY?: true; + REPLACE?: true; + AUTH?: AuthOptions; +} +export declare function transformArguments(host: RedisCommandArgument, port: number, key: RedisCommandArgument | Array, destinationDb: number, timeout: number, options?: MigrateOptions): RedisCommandArguments; +export declare function transformReply(): string; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/MIGRATE.js b/node_modules/@redis/client/dist/lib/commands/MIGRATE.js new file mode 100644 index 0000000..2c38fb1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MIGRATE.js @@ -0,0 +1,32 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(host, port, key, destinationDb, timeout, options) { + const args = ['MIGRATE', host, port.toString()], isKeyArray = Array.isArray(key); + if (isKeyArray) { + args.push(''); + } + else { + args.push(key); + } + args.push(destinationDb.toString(), timeout.toString()); + if (options?.COPY) { + args.push('COPY'); + } + if (options?.REPLACE) { + args.push('REPLACE'); + } + if (options?.AUTH) { + if (options.AUTH.username) { + args.push('AUTH2', options.AUTH.username, options.AUTH.password); + } + else { + args.push('AUTH', options.AUTH.password); + } + } + if (isKeyArray) { + args.push('KEYS', ...key); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/MODULE_LIST.d.ts b/node_modules/@redis/client/dist/lib/commands/MODULE_LIST.d.ts new file mode 100644 index 0000000..f7ef9c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MODULE_LIST.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/MODULE_LIST.js b/node_modules/@redis/client/dist/lib/commands/MODULE_LIST.js new file mode 100644 index 0000000..4fe8c5b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MODULE_LIST.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['MODULE', 'LIST']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/MODULE_LOAD.d.ts b/node_modules/@redis/client/dist/lib/commands/MODULE_LOAD.d.ts new file mode 100644 index 0000000..f25f608 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MODULE_LOAD.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(path: string, moduleArgs?: Array): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/MODULE_LOAD.js b/node_modules/@redis/client/dist/lib/commands/MODULE_LOAD.js new file mode 100644 index 0000000..34aeb4b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MODULE_LOAD.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(path, moduleArgs) { + const args = ['MODULE', 'LOAD', path]; + if (moduleArgs) { + args.push(...moduleArgs); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/MODULE_UNLOAD.d.ts b/node_modules/@redis/client/dist/lib/commands/MODULE_UNLOAD.d.ts new file mode 100644 index 0000000..ab218f9 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MODULE_UNLOAD.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(name: string): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/MODULE_UNLOAD.js b/node_modules/@redis/client/dist/lib/commands/MODULE_UNLOAD.js new file mode 100644 index 0000000..dbc9ac7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MODULE_UNLOAD.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(name) { + return ['MODULE', 'UNLOAD', name]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/MOVE.d.ts b/node_modules/@redis/client/dist/lib/commands/MOVE.d.ts new file mode 100644 index 0000000..88d4956 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MOVE.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, db: number): Array; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/MOVE.js b/node_modules/@redis/client/dist/lib/commands/MOVE.js new file mode 100644 index 0000000..20c9b79 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MOVE.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, db) { + return ['MOVE', key, db.toString()]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/MSET.d.ts b/node_modules/@redis/client/dist/lib/commands/MSET.d.ts new file mode 100644 index 0000000..75d7cd1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MSET.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export type MSetArguments = Array<[RedisCommandArgument, RedisCommandArgument]> | Array | Record; +export declare function transformArguments(toSet: MSetArguments): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/MSET.js b/node_modules/@redis/client/dist/lib/commands/MSET.js new file mode 100644 index 0000000..7662311 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MSET.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(toSet) { + const args = ['MSET']; + if (Array.isArray(toSet)) { + args.push(...toSet.flat()); + } + else { + for (const key of Object.keys(toSet)) { + args.push(key, toSet[key]); + } + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/MSETNX.d.ts b/node_modules/@redis/client/dist/lib/commands/MSETNX.d.ts new file mode 100644 index 0000000..cac4eae --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MSETNX.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '.'; +import { MSetArguments } from './MSET'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(toSet: MSetArguments): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/MSETNX.js b/node_modules/@redis/client/dist/lib/commands/MSETNX.js new file mode 100644 index 0000000..5ce398a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/MSETNX.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(toSet) { + const args = ['MSETNX']; + if (Array.isArray(toSet)) { + args.push(...toSet.flat()); + } + else { + for (const key of Object.keys(toSet)) { + args.push(key, toSet[key]); + } + } + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/OBJECT_ENCODING.d.ts b/node_modules/@redis/client/dist/lib/commands/OBJECT_ENCODING.d.ts new file mode 100644 index 0000000..e9929b7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/OBJECT_ENCODING.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): string | null; diff --git a/node_modules/@redis/client/dist/lib/commands/OBJECT_ENCODING.js b/node_modules/@redis/client/dist/lib/commands/OBJECT_ENCODING.js new file mode 100644 index 0000000..2b493bc --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/OBJECT_ENCODING.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 2; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['OBJECT', 'ENCODING', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/OBJECT_FREQ.d.ts b/node_modules/@redis/client/dist/lib/commands/OBJECT_FREQ.d.ts new file mode 100644 index 0000000..745bcb2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/OBJECT_FREQ.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number | null; diff --git a/node_modules/@redis/client/dist/lib/commands/OBJECT_FREQ.js b/node_modules/@redis/client/dist/lib/commands/OBJECT_FREQ.js new file mode 100644 index 0000000..8b20400 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/OBJECT_FREQ.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 2; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['OBJECT', 'FREQ', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/OBJECT_IDLETIME.d.ts b/node_modules/@redis/client/dist/lib/commands/OBJECT_IDLETIME.d.ts new file mode 100644 index 0000000..745bcb2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/OBJECT_IDLETIME.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number | null; diff --git a/node_modules/@redis/client/dist/lib/commands/OBJECT_IDLETIME.js b/node_modules/@redis/client/dist/lib/commands/OBJECT_IDLETIME.js new file mode 100644 index 0000000..d5f1144 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/OBJECT_IDLETIME.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 2; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['OBJECT', 'IDLETIME', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/OBJECT_REFCOUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/OBJECT_REFCOUNT.d.ts new file mode 100644 index 0000000..745bcb2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/OBJECT_REFCOUNT.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number | null; diff --git a/node_modules/@redis/client/dist/lib/commands/OBJECT_REFCOUNT.js b/node_modules/@redis/client/dist/lib/commands/OBJECT_REFCOUNT.js new file mode 100644 index 0000000..7ae4d66 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/OBJECT_REFCOUNT.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 2; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['OBJECT', 'REFCOUNT', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/PERSIST.d.ts b/node_modules/@redis/client/dist/lib/commands/PERSIST.d.ts new file mode 100644 index 0000000..6da96dc --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PERSIST.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/PERSIST.js b/node_modules/@redis/client/dist/lib/commands/PERSIST.js new file mode 100644 index 0000000..0430a7f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PERSIST.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['PERSIST', key]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/PEXPIRE.d.ts b/node_modules/@redis/client/dist/lib/commands/PEXPIRE.d.ts new file mode 100644 index 0000000..2285fc3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PEXPIRE.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, milliseconds: number, mode?: 'NX' | 'XX' | 'GT' | 'LT'): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/PEXPIRE.js b/node_modules/@redis/client/dist/lib/commands/PEXPIRE.js new file mode 100644 index 0000000..6203297 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PEXPIRE.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, milliseconds, mode) { + const args = ['PEXPIRE', key, milliseconds.toString()]; + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/PEXPIREAT.d.ts b/node_modules/@redis/client/dist/lib/commands/PEXPIREAT.d.ts new file mode 100644 index 0000000..0d3557a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PEXPIREAT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, millisecondsTimestamp: number | Date, mode?: 'NX' | 'XX' | 'GT' | 'LT'): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/PEXPIREAT.js b/node_modules/@redis/client/dist/lib/commands/PEXPIREAT.js new file mode 100644 index 0000000..67318b0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PEXPIREAT.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, millisecondsTimestamp, mode) { + const args = [ + 'PEXPIREAT', + key, + (0, generic_transformers_1.transformPXAT)(millisecondsTimestamp) + ]; + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_2 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_2.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/PEXPIRETIME.d.ts b/node_modules/@redis/client/dist/lib/commands/PEXPIRETIME.d.ts new file mode 100644 index 0000000..c35a19c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PEXPIRETIME.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/PEXPIRETIME.js b/node_modules/@redis/client/dist/lib/commands/PEXPIRETIME.js new file mode 100644 index 0000000..30ab6d9 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PEXPIRETIME.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['PEXPIRETIME', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/PFADD.d.ts b/node_modules/@redis/client/dist/lib/commands/PFADD.d.ts new file mode 100644 index 0000000..194c7c1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PFADD.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, element: RedisCommandArgument | Array): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/PFADD.js b/node_modules/@redis/client/dist/lib/commands/PFADD.js new file mode 100644 index 0000000..9c474a7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PFADD.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, element) { + return (0, generic_transformers_1.pushVerdictArguments)(['PFADD', key], element); +} +exports.transformArguments = transformArguments; +var generic_transformers_2 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_2.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/PFCOUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/PFCOUNT.d.ts new file mode 100644 index 0000000..13e0651 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PFCOUNT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/PFCOUNT.js b/node_modules/@redis/client/dist/lib/commands/PFCOUNT.js new file mode 100644 index 0000000..3a7a1da --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PFCOUNT.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return (0, generic_transformers_1.pushVerdictArguments)(['PFCOUNT'], key); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/PFMERGE.d.ts b/node_modules/@redis/client/dist/lib/commands/PFMERGE.d.ts new file mode 100644 index 0000000..d406d16 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PFMERGE.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(destination: string, source: string | Array): RedisCommandArguments; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/PFMERGE.js b/node_modules/@redis/client/dist/lib/commands/PFMERGE.js new file mode 100644 index 0000000..48305f9 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PFMERGE.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(destination, source) { + return (0, generic_transformers_1.pushVerdictArguments)(['PFMERGE', destination], source); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/PING.d.ts b/node_modules/@redis/client/dist/lib/commands/PING.d.ts new file mode 100644 index 0000000..7b7545d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PING.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformArguments(message?: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/PING.js b/node_modules/@redis/client/dist/lib/commands/PING.js new file mode 100644 index 0000000..cfc7297 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PING.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(message) { + const args = ['PING']; + if (message) { + args.push(message); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/PSETEX.d.ts b/node_modules/@redis/client/dist/lib/commands/PSETEX.d.ts new file mode 100644 index 0000000..167d175 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PSETEX.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, milliseconds: number, value: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/PSETEX.js b/node_modules/@redis/client/dist/lib/commands/PSETEX.js new file mode 100644 index 0000000..c3e7b8d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PSETEX.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, milliseconds, value) { + return [ + 'PSETEX', + key, + milliseconds.toString(), + value + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/PTTL.d.ts b/node_modules/@redis/client/dist/lib/commands/PTTL.d.ts new file mode 100644 index 0000000..367e9b8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PTTL.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/PTTL.js b/node_modules/@redis/client/dist/lib/commands/PTTL.js new file mode 100644 index 0000000..c7e9cfc --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PTTL.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['PTTL', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/PUBLISH.d.ts b/node_modules/@redis/client/dist/lib/commands/PUBLISH.d.ts new file mode 100644 index 0000000..e636aa0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PUBLISH.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(channel: RedisCommandArgument, message: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/PUBLISH.js b/node_modules/@redis/client/dist/lib/commands/PUBLISH.js new file mode 100644 index 0000000..b2ca1b5 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PUBLISH.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments(channel, message) { + return ['PUBLISH', channel, message]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/PUBSUB_CHANNELS.d.ts b/node_modules/@redis/client/dist/lib/commands/PUBSUB_CHANNELS.d.ts new file mode 100644 index 0000000..e892c7b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PUBSUB_CHANNELS.d.ts @@ -0,0 +1,3 @@ +export declare const IS_READ_ONLY = true; +export declare function transformArguments(pattern?: string): Array; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/PUBSUB_CHANNELS.js b/node_modules/@redis/client/dist/lib/commands/PUBSUB_CHANNELS.js new file mode 100644 index 0000000..f717e98 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PUBSUB_CHANNELS.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments(pattern) { + const args = ['PUBSUB', 'CHANNELS']; + if (pattern) { + args.push(pattern); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMPAT.d.ts b/node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMPAT.d.ts new file mode 100644 index 0000000..c3c75ec --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMPAT.d.ts @@ -0,0 +1,3 @@ +export declare const IS_READ_ONLY = true; +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMPAT.js b/node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMPAT.js new file mode 100644 index 0000000..81102b2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMPAT.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments() { + return ['PUBSUB', 'NUMPAT']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMSUB.d.ts b/node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMSUB.d.ts new file mode 100644 index 0000000..22ba677 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMSUB.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(channels?: Array | RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(rawReply: Array): Record; diff --git a/node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMSUB.js b/node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMSUB.js new file mode 100644 index 0000000..c4671b2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PUBSUB_NUMSUB.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.IS_READ_ONLY = true; +function transformArguments(channels) { + const args = ['PUBSUB', 'NUMSUB']; + if (channels) + return (0, generic_transformers_1.pushVerdictArguments)(args, channels); + return args; +} +exports.transformArguments = transformArguments; +function transformReply(rawReply) { + const transformedReply = Object.create(null); + for (let i = 0; i < rawReply.length; i += 2) { + transformedReply[rawReply[i]] = rawReply[i + 1]; + } + return transformedReply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDCHANNELS.d.ts b/node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDCHANNELS.d.ts new file mode 100644 index 0000000..548c5fe --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDCHANNELS.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(pattern?: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDCHANNELS.js b/node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDCHANNELS.js new file mode 100644 index 0000000..3b32c6b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDCHANNELS.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments(pattern) { + const args = ['PUBSUB', 'SHARDCHANNELS']; + if (pattern) + args.push(pattern); + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDNUMSUB.d.ts b/node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDNUMSUB.d.ts new file mode 100644 index 0000000..22ba677 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDNUMSUB.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(channels?: Array | RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(rawReply: Array): Record; diff --git a/node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDNUMSUB.js b/node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDNUMSUB.js new file mode 100644 index 0000000..0d32b64 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/PUBSUB_SHARDNUMSUB.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.IS_READ_ONLY = true; +function transformArguments(channels) { + const args = ['PUBSUB', 'SHARDNUMSUB']; + if (channels) + return (0, generic_transformers_1.pushVerdictArguments)(args, channels); + return args; +} +exports.transformArguments = transformArguments; +function transformReply(rawReply) { + const transformedReply = Object.create(null); + for (let i = 0; i < rawReply.length; i += 2) { + transformedReply[rawReply[i]] = rawReply[i + 1]; + } + return transformedReply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/RANDOMKEY.d.ts b/node_modules/@redis/client/dist/lib/commands/RANDOMKEY.d.ts new file mode 100644 index 0000000..5214486 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RANDOMKEY.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/RANDOMKEY.js b/node_modules/@redis/client/dist/lib/commands/RANDOMKEY.js new file mode 100644 index 0000000..34f6e1a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RANDOMKEY.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments() { + return ['RANDOMKEY']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/READONLY.d.ts b/node_modules/@redis/client/dist/lib/commands/READONLY.d.ts new file mode 100644 index 0000000..f7ef9c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/READONLY.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/READONLY.js b/node_modules/@redis/client/dist/lib/commands/READONLY.js new file mode 100644 index 0000000..1615646 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/READONLY.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['READONLY']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/READWRITE.d.ts b/node_modules/@redis/client/dist/lib/commands/READWRITE.d.ts new file mode 100644 index 0000000..f7ef9c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/READWRITE.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/READWRITE.js b/node_modules/@redis/client/dist/lib/commands/READWRITE.js new file mode 100644 index 0000000..10a4f88 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/READWRITE.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['READWRITE']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/RENAME.d.ts b/node_modules/@redis/client/dist/lib/commands/RENAME.d.ts new file mode 100644 index 0000000..05cdf04 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RENAME.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, newKey: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/RENAME.js b/node_modules/@redis/client/dist/lib/commands/RENAME.js new file mode 100644 index 0000000..5148f7a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RENAME.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, newKey) { + return ['RENAME', key, newKey]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/RENAMENX.d.ts b/node_modules/@redis/client/dist/lib/commands/RENAMENX.d.ts new file mode 100644 index 0000000..396dad0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RENAMENX.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, newKey: RedisCommandArgument): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/RENAMENX.js b/node_modules/@redis/client/dist/lib/commands/RENAMENX.js new file mode 100644 index 0000000..1c5276a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RENAMENX.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, newKey) { + return ['RENAMENX', key, newKey]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/REPLICAOF.d.ts b/node_modules/@redis/client/dist/lib/commands/REPLICAOF.d.ts new file mode 100644 index 0000000..b40e18c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/REPLICAOF.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(host: string, port: number): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/REPLICAOF.js b/node_modules/@redis/client/dist/lib/commands/REPLICAOF.js new file mode 100644 index 0000000..1194dbf --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/REPLICAOF.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(host, port) { + return ['REPLICAOF', host, port.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/RESTORE-ASKING.d.ts b/node_modules/@redis/client/dist/lib/commands/RESTORE-ASKING.d.ts new file mode 100644 index 0000000..f7ef9c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RESTORE-ASKING.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/RESTORE-ASKING.js b/node_modules/@redis/client/dist/lib/commands/RESTORE-ASKING.js new file mode 100644 index 0000000..6c636b8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RESTORE-ASKING.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['RESTORE-ASKING']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/RESTORE.d.ts b/node_modules/@redis/client/dist/lib/commands/RESTORE.d.ts new file mode 100644 index 0000000..97a8dc3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RESTORE.d.ts @@ -0,0 +1,11 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +interface RestoreOptions { + REPLACE?: true; + ABSTTL?: true; + IDLETIME?: number; + FREQ?: number; +} +export declare function transformArguments(key: RedisCommandArgument, ttl: number, serializedValue: RedisCommandArgument, options?: RestoreOptions): RedisCommandArguments; +export declare function transformReply(): 'OK'; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/RESTORE.js b/node_modules/@redis/client/dist/lib/commands/RESTORE.js new file mode 100644 index 0000000..86c9662 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RESTORE.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, ttl, serializedValue, options) { + const args = ['RESTORE', key, ttl.toString(), serializedValue]; + if (options?.REPLACE) { + args.push('REPLACE'); + } + if (options?.ABSTTL) { + args.push('ABSTTL'); + } + if (options?.IDLETIME) { + args.push('IDLETIME', options.IDLETIME.toString()); + } + if (options?.FREQ) { + args.push('FREQ', options.FREQ.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ROLE.d.ts b/node_modules/@redis/client/dist/lib/commands/ROLE.d.ts new file mode 100644 index 0000000..c6ca68a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ROLE.d.ts @@ -0,0 +1,32 @@ +export declare const IS_READ_ONLY = true; +export declare function transformArguments(): Array; +interface RoleReplyInterface { + role: T; +} +type RoleMasterRawReply = ['master', number, Array<[string, string, string]>]; +interface RoleMasterReply extends RoleReplyInterface<'master'> { + replicationOffest: number; + replicas: Array<{ + ip: string; + port: number; + replicationOffest: number; + }>; +} +type RoleReplicaState = 'connect' | 'connecting' | 'sync' | 'connected'; +type RoleReplicaRawReply = ['slave', string, number, RoleReplicaState, number]; +interface RoleReplicaReply extends RoleReplyInterface<'slave'> { + master: { + ip: string; + port: number; + }; + state: RoleReplicaState; + dataReceived: number; +} +type RoleSentinelRawReply = ['sentinel', Array]; +interface RoleSentinelReply extends RoleReplyInterface<'sentinel'> { + masterNames: Array; +} +type RoleRawReply = RoleMasterRawReply | RoleReplicaRawReply | RoleSentinelRawReply; +type RoleReply = RoleMasterReply | RoleReplicaReply | RoleSentinelReply; +export declare function transformReply(reply: RoleRawReply): RoleReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/ROLE.js b/node_modules/@redis/client/dist/lib/commands/ROLE.js new file mode 100644 index 0000000..30ede94 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ROLE.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments() { + return ['ROLE']; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + switch (reply[0]) { + case 'master': + return { + role: 'master', + replicationOffest: reply[1], + replicas: reply[2].map(([ip, port, replicationOffest]) => ({ + ip, + port: Number(port), + replicationOffest: Number(replicationOffest) + })) + }; + case 'slave': + return { + role: 'slave', + master: { + ip: reply[1], + port: reply[2] + }, + state: reply[3], + dataReceived: reply[4] + }; + case 'sentinel': + return { + role: 'sentinel', + masterNames: reply[1] + }; + } +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/RPOP.d.ts b/node_modules/@redis/client/dist/lib/commands/RPOP.d.ts new file mode 100644 index 0000000..15f87fd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RPOP.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/RPOP.js b/node_modules/@redis/client/dist/lib/commands/RPOP.js new file mode 100644 index 0000000..b5c9543 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RPOP.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['RPOP', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/RPOPLPUSH.d.ts b/node_modules/@redis/client/dist/lib/commands/RPOPLPUSH.d.ts new file mode 100644 index 0000000..86c0fad --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RPOPLPUSH.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(source: RedisCommandArgument, destination: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/RPOPLPUSH.js b/node_modules/@redis/client/dist/lib/commands/RPOPLPUSH.js new file mode 100644 index 0000000..d314c3d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RPOPLPUSH.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(source, destination) { + return ['RPOPLPUSH', source, destination]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/RPOP_COUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/RPOP_COUNT.d.ts new file mode 100644 index 0000000..e48bfcb --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RPOP_COUNT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, count: number): RedisCommandArguments; +export declare function transformReply(): Array | null; diff --git a/node_modules/@redis/client/dist/lib/commands/RPOP_COUNT.js b/node_modules/@redis/client/dist/lib/commands/RPOP_COUNT.js new file mode 100644 index 0000000..baf43bd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RPOP_COUNT.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, count) { + return ['RPOP', key, count.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/RPUSH.d.ts b/node_modules/@redis/client/dist/lib/commands/RPUSH.d.ts new file mode 100644 index 0000000..ce8878e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RPUSH.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, element: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/RPUSH.js b/node_modules/@redis/client/dist/lib/commands/RPUSH.js new file mode 100644 index 0000000..58b9de5 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RPUSH.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, element) { + return (0, generic_transformers_1.pushVerdictArguments)(['RPUSH', key], element); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/RPUSHX.d.ts b/node_modules/@redis/client/dist/lib/commands/RPUSHX.d.ts new file mode 100644 index 0000000..ce8878e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RPUSHX.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, element: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/RPUSHX.js b/node_modules/@redis/client/dist/lib/commands/RPUSHX.js new file mode 100644 index 0000000..1eec61d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/RPUSHX.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, element) { + return (0, generic_transformers_1.pushVerdictArguments)(['RPUSHX', key], element); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SADD.d.ts b/node_modules/@redis/client/dist/lib/commands/SADD.d.ts new file mode 100644 index 0000000..7586a6d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SADD.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, members: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/SADD.js b/node_modules/@redis/client/dist/lib/commands/SADD.js new file mode 100644 index 0000000..f4b2072 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SADD.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, members) { + return (0, generic_transformers_1.pushVerdictArguments)(['SADD', key], members); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SAVE.d.ts b/node_modules/@redis/client/dist/lib/commands/SAVE.d.ts new file mode 100644 index 0000000..3e8dbce --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SAVE.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArgument } from '.'; +export declare function transformArguments(): Array; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/SAVE.js b/node_modules/@redis/client/dist/lib/commands/SAVE.js new file mode 100644 index 0000000..051bb9c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SAVE.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['SAVE']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SCAN.d.ts b/node_modules/@redis/client/dist/lib/commands/SCAN.d.ts new file mode 100644 index 0000000..0c95616 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCAN.d.ts @@ -0,0 +1,14 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { ScanOptions } from './generic-transformers'; +export declare const IS_READ_ONLY = true; +export interface ScanCommandOptions extends ScanOptions { + TYPE?: RedisCommandArgument; +} +export declare function transformArguments(cursor: number, options?: ScanCommandOptions): RedisCommandArguments; +type ScanRawReply = [string, Array]; +export interface ScanReply { + cursor: number; + keys: Array; +} +export declare function transformReply([cursor, keys]: ScanRawReply): ScanReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/SCAN.js b/node_modules/@redis/client/dist/lib/commands/SCAN.js new file mode 100644 index 0000000..584b4b0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCAN.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.IS_READ_ONLY = true; +function transformArguments(cursor, options) { + const args = (0, generic_transformers_1.pushScanArguments)(['SCAN'], cursor, options); + if (options?.TYPE) { + args.push('TYPE', options.TYPE); + } + return args; +} +exports.transformArguments = transformArguments; +function transformReply([cursor, keys]) { + return { + cursor: Number(cursor), + keys + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/SCARD.d.ts b/node_modules/@redis/client/dist/lib/commands/SCARD.d.ts new file mode 100644 index 0000000..c1eb736 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCARD.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/SCARD.js b/node_modules/@redis/client/dist/lib/commands/SCARD.js new file mode 100644 index 0000000..4a1bdab --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCARD.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['SCARD', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SCRIPT_DEBUG.d.ts b/node_modules/@redis/client/dist/lib/commands/SCRIPT_DEBUG.d.ts new file mode 100644 index 0000000..56af8af --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCRIPT_DEBUG.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(mode: 'YES' | 'SYNC' | 'NO'): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/SCRIPT_DEBUG.js b/node_modules/@redis/client/dist/lib/commands/SCRIPT_DEBUG.js new file mode 100644 index 0000000..15b9a0b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCRIPT_DEBUG.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(mode) { + return ['SCRIPT', 'DEBUG', mode]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SCRIPT_EXISTS.d.ts b/node_modules/@redis/client/dist/lib/commands/SCRIPT_EXISTS.d.ts new file mode 100644 index 0000000..12fc6da --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCRIPT_EXISTS.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArguments } from '.'; +export declare function transformArguments(sha1: string | Array): RedisCommandArguments; +export { transformBooleanArrayReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/SCRIPT_EXISTS.js b/node_modules/@redis/client/dist/lib/commands/SCRIPT_EXISTS.js new file mode 100644 index 0000000..475c3b5 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCRIPT_EXISTS.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +const generic_transformers_1 = require("./generic-transformers"); +function transformArguments(sha1) { + return (0, generic_transformers_1.pushVerdictArguments)(['SCRIPT', 'EXISTS'], sha1); +} +exports.transformArguments = transformArguments; +var generic_transformers_2 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_2.transformBooleanArrayReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/SCRIPT_FLUSH.d.ts b/node_modules/@redis/client/dist/lib/commands/SCRIPT_FLUSH.d.ts new file mode 100644 index 0000000..8675eff --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCRIPT_FLUSH.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(mode?: 'ASYNC' | 'SYNC'): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/SCRIPT_FLUSH.js b/node_modules/@redis/client/dist/lib/commands/SCRIPT_FLUSH.js new file mode 100644 index 0000000..207885f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCRIPT_FLUSH.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(mode) { + const args = ['SCRIPT', 'FLUSH']; + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SCRIPT_KILL.d.ts b/node_modules/@redis/client/dist/lib/commands/SCRIPT_KILL.d.ts new file mode 100644 index 0000000..f7ef9c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCRIPT_KILL.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/SCRIPT_KILL.js b/node_modules/@redis/client/dist/lib/commands/SCRIPT_KILL.js new file mode 100644 index 0000000..33b5ea0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCRIPT_KILL.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['SCRIPT', 'KILL']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SCRIPT_LOAD.d.ts b/node_modules/@redis/client/dist/lib/commands/SCRIPT_LOAD.d.ts new file mode 100644 index 0000000..8995e3e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCRIPT_LOAD.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(script: string): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/SCRIPT_LOAD.js b/node_modules/@redis/client/dist/lib/commands/SCRIPT_LOAD.js new file mode 100644 index 0000000..41c0836 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SCRIPT_LOAD.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(script) { + return ['SCRIPT', 'LOAD', script]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SDIFF.d.ts b/node_modules/@redis/client/dist/lib/commands/SDIFF.d.ts new file mode 100644 index 0000000..354b335 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SDIFF.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(keys: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/SDIFF.js b/node_modules/@redis/client/dist/lib/commands/SDIFF.js new file mode 100644 index 0000000..10b3d18 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SDIFF.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(keys) { + return (0, generic_transformers_1.pushVerdictArguments)(['SDIFF'], keys); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SDIFFSTORE.d.ts b/node_modules/@redis/client/dist/lib/commands/SDIFFSTORE.d.ts new file mode 100644 index 0000000..cc3d9ec --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SDIFFSTORE.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(destination: RedisCommandArgument, keys: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/SDIFFSTORE.js b/node_modules/@redis/client/dist/lib/commands/SDIFFSTORE.js new file mode 100644 index 0000000..c5dba76 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SDIFFSTORE.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(destination, keys) { + return (0, generic_transformers_1.pushVerdictArguments)(['SDIFFSTORE', destination], keys); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SET.d.ts b/node_modules/@redis/client/dist/lib/commands/SET.d.ts new file mode 100644 index 0000000..f7a2a2b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SET.d.ts @@ -0,0 +1,23 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +type MaximumOneOf = K extends keyof T ? { + [P in K]?: T[K]; +} & Partial, never>> : never; +type SetTTL = MaximumOneOf<{ + EX: number; + PX: number; + EXAT: number; + PXAT: number; + KEEPTTL: true; +}>; +type SetGuards = MaximumOneOf<{ + NX: true; + XX: true; +}>; +interface SetCommonOptions { + GET?: true; +} +export type SetOptions = SetTTL & SetGuards & SetCommonOptions; +export declare function transformArguments(key: RedisCommandArgument, value: RedisCommandArgument | number, options?: SetOptions): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/SET.js b/node_modules/@redis/client/dist/lib/commands/SET.js new file mode 100644 index 0000000..5470edb --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SET.js @@ -0,0 +1,37 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, value, options) { + const args = [ + 'SET', + key, + typeof value === 'number' ? value.toString() : value + ]; + if (options?.EX !== undefined) { + args.push('EX', options.EX.toString()); + } + else if (options?.PX !== undefined) { + args.push('PX', options.PX.toString()); + } + else if (options?.EXAT !== undefined) { + args.push('EXAT', options.EXAT.toString()); + } + else if (options?.PXAT !== undefined) { + args.push('PXAT', options.PXAT.toString()); + } + else if (options?.KEEPTTL) { + args.push('KEEPTTL'); + } + if (options?.NX) { + args.push('NX'); + } + else if (options?.XX) { + args.push('XX'); + } + if (options?.GET) { + args.push('GET'); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SETBIT.d.ts b/node_modules/@redis/client/dist/lib/commands/SETBIT.d.ts new file mode 100644 index 0000000..e1bb713 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SETBIT.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { BitValue } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, offset: number, value: BitValue): RedisCommandArguments; +export declare function transformReply(): BitValue; diff --git a/node_modules/@redis/client/dist/lib/commands/SETBIT.js b/node_modules/@redis/client/dist/lib/commands/SETBIT.js new file mode 100644 index 0000000..87882a2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SETBIT.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, offset, value) { + return ['SETBIT', key, offset.toString(), value.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SETEX.d.ts b/node_modules/@redis/client/dist/lib/commands/SETEX.d.ts new file mode 100644 index 0000000..a89b023 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SETEX.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, seconds: number, value: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/SETEX.js b/node_modules/@redis/client/dist/lib/commands/SETEX.js new file mode 100644 index 0000000..4c61175 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SETEX.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, seconds, value) { + return [ + 'SETEX', + key, + seconds.toString(), + value + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SETNX.d.ts b/node_modules/@redis/client/dist/lib/commands/SETNX.d.ts new file mode 100644 index 0000000..fd42e1d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SETNX.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, value: RedisCommandArgument): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/SETNX.js b/node_modules/@redis/client/dist/lib/commands/SETNX.js new file mode 100644 index 0000000..6745e4b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SETNX.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, value) { + return ['SETNX', key, value]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/SETRANGE.d.ts b/node_modules/@redis/client/dist/lib/commands/SETRANGE.d.ts new file mode 100644 index 0000000..b8239a6 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SETRANGE.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, offset: number, value: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/SETRANGE.js b/node_modules/@redis/client/dist/lib/commands/SETRANGE.js new file mode 100644 index 0000000..2d8502e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SETRANGE.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, offset, value) { + return ['SETRANGE', key, offset.toString(), value]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SHUTDOWN.d.ts b/node_modules/@redis/client/dist/lib/commands/SHUTDOWN.d.ts new file mode 100644 index 0000000..88018cd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SHUTDOWN.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(mode?: 'NOSAVE' | 'SAVE'): Array; +export declare function transformReply(): void; diff --git a/node_modules/@redis/client/dist/lib/commands/SHUTDOWN.js b/node_modules/@redis/client/dist/lib/commands/SHUTDOWN.js new file mode 100644 index 0000000..05ab37d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SHUTDOWN.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(mode) { + const args = ['SHUTDOWN']; + if (mode) { + args.push(mode); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SINTER.d.ts b/node_modules/@redis/client/dist/lib/commands/SINTER.d.ts new file mode 100644 index 0000000..354b335 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SINTER.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(keys: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/SINTER.js b/node_modules/@redis/client/dist/lib/commands/SINTER.js new file mode 100644 index 0000000..5af0ecd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SINTER.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(keys) { + return (0, generic_transformers_1.pushVerdictArguments)(['SINTER'], keys); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SINTERCARD.d.ts b/node_modules/@redis/client/dist/lib/commands/SINTERCARD.d.ts new file mode 100644 index 0000000..1606dd4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SINTERCARD.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(keys: Array | RedisCommandArgument, limit?: number): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/SINTERCARD.js b/node_modules/@redis/client/dist/lib/commands/SINTERCARD.js new file mode 100644 index 0000000..449b2b2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SINTERCARD.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 2; +exports.IS_READ_ONLY = true; +function transformArguments(keys, limit) { + const args = (0, generic_transformers_1.pushVerdictArgument)(['SINTERCARD'], keys); + if (limit) { + args.push('LIMIT', limit.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SINTERSTORE.d.ts b/node_modules/@redis/client/dist/lib/commands/SINTERSTORE.d.ts new file mode 100644 index 0000000..044a3a2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SINTERSTORE.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(destination: RedisCommandArgument, keys: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/SINTERSTORE.js b/node_modules/@redis/client/dist/lib/commands/SINTERSTORE.js new file mode 100644 index 0000000..c0b5e60 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SINTERSTORE.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(destination, keys) { + return (0, generic_transformers_1.pushVerdictArguments)(['SINTERSTORE', destination], keys); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SISMEMBER.d.ts b/node_modules/@redis/client/dist/lib/commands/SISMEMBER.d.ts new file mode 100644 index 0000000..c9f4031 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SISMEMBER.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, member: RedisCommandArgument): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/SISMEMBER.js b/node_modules/@redis/client/dist/lib/commands/SISMEMBER.js new file mode 100644 index 0000000..0baffee --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SISMEMBER.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, member) { + return ['SISMEMBER', key, member]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/SMEMBERS.d.ts b/node_modules/@redis/client/dist/lib/commands/SMEMBERS.d.ts new file mode 100644 index 0000000..b5c1c09 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SMEMBERS.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/SMEMBERS.js b/node_modules/@redis/client/dist/lib/commands/SMEMBERS.js new file mode 100644 index 0000000..ac54aba --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SMEMBERS.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['SMEMBERS', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SMISMEMBER.d.ts b/node_modules/@redis/client/dist/lib/commands/SMISMEMBER.d.ts new file mode 100644 index 0000000..a17d5f6 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SMISMEMBER.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, members: Array): RedisCommandArguments; +export { transformBooleanArrayReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/SMISMEMBER.js b/node_modules/@redis/client/dist/lib/commands/SMISMEMBER.js new file mode 100644 index 0000000..dfa5ae3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SMISMEMBER.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, members) { + return ['SMISMEMBER', key, ...members]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanArrayReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/SMOVE.d.ts b/node_modules/@redis/client/dist/lib/commands/SMOVE.d.ts new file mode 100644 index 0000000..7eee513 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SMOVE.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(source: RedisCommandArgument, destination: RedisCommandArgument, member: RedisCommandArgument): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/SMOVE.js b/node_modules/@redis/client/dist/lib/commands/SMOVE.js new file mode 100644 index 0000000..be14ce7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SMOVE.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(source, destination, member) { + return ['SMOVE', source, destination, member]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/SORT.d.ts b/node_modules/@redis/client/dist/lib/commands/SORT.d.ts new file mode 100644 index 0000000..f63a21d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SORT.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '.'; +import { SortOptions } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, options?: SortOptions): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/SORT.js b/node_modules/@redis/client/dist/lib/commands/SORT.js new file mode 100644 index 0000000..706a57d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SORT.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, options) { + return (0, generic_transformers_1.pushSortArguments)(['SORT', key], options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SORT_RO.d.ts b/node_modules/@redis/client/dist/lib/commands/SORT_RO.d.ts new file mode 100644 index 0000000..d00bd97 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SORT_RO.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArguments } from '.'; +import { SortOptions } from "./generic-transformers"; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, options?: SortOptions): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/SORT_RO.js b/node_modules/@redis/client/dist/lib/commands/SORT_RO.js new file mode 100644 index 0000000..d8fa37d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SORT_RO.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, options) { + return (0, generic_transformers_1.pushSortArguments)(['SORT_RO', key], options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SORT_STORE.d.ts b/node_modules/@redis/client/dist/lib/commands/SORT_STORE.d.ts new file mode 100644 index 0000000..8d6b555 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SORT_STORE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '.'; +import { SortOptions } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(source: string, destination: string, options?: SortOptions): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/SORT_STORE.js b/node_modules/@redis/client/dist/lib/commands/SORT_STORE.js new file mode 100644 index 0000000..b5bc7ad --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SORT_STORE.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const SORT_1 = require("./SORT"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(source, destination, options) { + const args = (0, SORT_1.transformArguments)(source, options); + args.push('STORE', destination); + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SPOP.d.ts b/node_modules/@redis/client/dist/lib/commands/SPOP.d.ts new file mode 100644 index 0000000..72cf152 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SPOP.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, count?: number): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/SPOP.js b/node_modules/@redis/client/dist/lib/commands/SPOP.js new file mode 100644 index 0000000..51d8244 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SPOP.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, count) { + const args = ['SPOP', key]; + if (typeof count === 'number') { + args.push(count.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SPUBLISH.d.ts b/node_modules/@redis/client/dist/lib/commands/SPUBLISH.d.ts new file mode 100644 index 0000000..d3d20d3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SPUBLISH.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const IS_READ_ONLY = true; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(channel: RedisCommandArgument, message: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/SPUBLISH.js b/node_modules/@redis/client/dist/lib/commands/SPUBLISH.js new file mode 100644 index 0000000..8a18640 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SPUBLISH.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(channel, message) { + return ['SPUBLISH', channel, message]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SRANDMEMBER.d.ts b/node_modules/@redis/client/dist/lib/commands/SRANDMEMBER.d.ts new file mode 100644 index 0000000..15f87fd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SRANDMEMBER.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/SRANDMEMBER.js b/node_modules/@redis/client/dist/lib/commands/SRANDMEMBER.js new file mode 100644 index 0000000..cfd1cb3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SRANDMEMBER.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['SRANDMEMBER', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SRANDMEMBER_COUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/SRANDMEMBER_COUNT.d.ts new file mode 100644 index 0000000..b149d68 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SRANDMEMBER_COUNT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export { FIRST_KEY_INDEX } from './SRANDMEMBER'; +export declare function transformArguments(key: RedisCommandArgument, count: number): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/SRANDMEMBER_COUNT.js b/node_modules/@redis/client/dist/lib/commands/SRANDMEMBER_COUNT.js new file mode 100644 index 0000000..6e0c4a3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SRANDMEMBER_COUNT.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const SRANDMEMBER_1 = require("./SRANDMEMBER"); +var SRANDMEMBER_2 = require("./SRANDMEMBER"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return SRANDMEMBER_2.FIRST_KEY_INDEX; } }); +function transformArguments(key, count) { + return [ + ...(0, SRANDMEMBER_1.transformArguments)(key), + count.toString() + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SREM.d.ts b/node_modules/@redis/client/dist/lib/commands/SREM.d.ts new file mode 100644 index 0000000..7586a6d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SREM.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, members: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/SREM.js b/node_modules/@redis/client/dist/lib/commands/SREM.js new file mode 100644 index 0000000..121b1ef --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SREM.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, members) { + return (0, generic_transformers_1.pushVerdictArguments)(['SREM', key], members); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SSCAN.d.ts b/node_modules/@redis/client/dist/lib/commands/SSCAN.d.ts new file mode 100644 index 0000000..b8bea7b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SSCAN.d.ts @@ -0,0 +1,12 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { ScanOptions } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, cursor: number, options?: ScanOptions): RedisCommandArguments; +type SScanRawReply = [string, Array]; +interface SScanReply { + cursor: number; + members: Array; +} +export declare function transformReply([cursor, members]: SScanRawReply): SScanReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/SSCAN.js b/node_modules/@redis/client/dist/lib/commands/SSCAN.js new file mode 100644 index 0000000..e14f086 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SSCAN.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, cursor, options) { + return (0, generic_transformers_1.pushScanArguments)([ + 'SSCAN', + key, + ], cursor, options); +} +exports.transformArguments = transformArguments; +function transformReply([cursor, members]) { + return { + cursor: Number(cursor), + members + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/STRLEN.d.ts b/node_modules/@redis/client/dist/lib/commands/STRLEN.d.ts new file mode 100644 index 0000000..367e9b8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/STRLEN.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/STRLEN.js b/node_modules/@redis/client/dist/lib/commands/STRLEN.js new file mode 100644 index 0000000..4786064 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/STRLEN.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['STRLEN', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SUNION.d.ts b/node_modules/@redis/client/dist/lib/commands/SUNION.d.ts new file mode 100644 index 0000000..354b335 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SUNION.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(keys: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/SUNION.js b/node_modules/@redis/client/dist/lib/commands/SUNION.js new file mode 100644 index 0000000..d3d7521 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SUNION.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(keys) { + return (0, generic_transformers_1.pushVerdictArguments)(['SUNION'], keys); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SUNIONSTORE.d.ts b/node_modules/@redis/client/dist/lib/commands/SUNIONSTORE.d.ts new file mode 100644 index 0000000..cc3d9ec --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SUNIONSTORE.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(destination: RedisCommandArgument, keys: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/SUNIONSTORE.js b/node_modules/@redis/client/dist/lib/commands/SUNIONSTORE.js new file mode 100644 index 0000000..93d85fd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SUNIONSTORE.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(destination, keys) { + return (0, generic_transformers_1.pushVerdictArguments)(['SUNIONSTORE', destination], keys); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/SWAPDB.d.ts b/node_modules/@redis/client/dist/lib/commands/SWAPDB.d.ts new file mode 100644 index 0000000..21e7bbd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SWAPDB.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(index1: number, index2: number): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/SWAPDB.js b/node_modules/@redis/client/dist/lib/commands/SWAPDB.js new file mode 100644 index 0000000..4967529 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/SWAPDB.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(index1, index2) { + return ['SWAPDB', index1.toString(), index2.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/TIME.d.ts b/node_modules/@redis/client/dist/lib/commands/TIME.d.ts new file mode 100644 index 0000000..806bc25 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/TIME.d.ts @@ -0,0 +1,6 @@ +export declare function transformArguments(): Array; +interface TimeReply extends Date { + microseconds: number; +} +export declare function transformReply(reply: [string, string]): TimeReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/TIME.js b/node_modules/@redis/client/dist/lib/commands/TIME.js new file mode 100644 index 0000000..087bc18 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/TIME.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments() { + return ['TIME']; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + const seconds = Number(reply[0]), microseconds = Number(reply[1]), d = new Date(seconds * 1000 + microseconds / 1000); + d.microseconds = microseconds; + return d; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/TOUCH.d.ts b/node_modules/@redis/client/dist/lib/commands/TOUCH.d.ts new file mode 100644 index 0000000..13e0651 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/TOUCH.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/TOUCH.js b/node_modules/@redis/client/dist/lib/commands/TOUCH.js new file mode 100644 index 0000000..96fe9fe --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/TOUCH.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return (0, generic_transformers_1.pushVerdictArguments)(['TOUCH'], key); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/TTL.d.ts b/node_modules/@redis/client/dist/lib/commands/TTL.d.ts new file mode 100644 index 0000000..367e9b8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/TTL.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/TTL.js b/node_modules/@redis/client/dist/lib/commands/TTL.js new file mode 100644 index 0000000..7a3949f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/TTL.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['TTL', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/TYPE.d.ts b/node_modules/@redis/client/dist/lib/commands/TYPE.d.ts new file mode 100644 index 0000000..021d48e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/TYPE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/TYPE.js b/node_modules/@redis/client/dist/lib/commands/TYPE.js new file mode 100644 index 0000000..53b79e7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/TYPE.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['TYPE', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/UNLINK.d.ts b/node_modules/@redis/client/dist/lib/commands/UNLINK.d.ts new file mode 100644 index 0000000..13e0651 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/UNLINK.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/UNLINK.js b/node_modules/@redis/client/dist/lib/commands/UNLINK.js new file mode 100644 index 0000000..7e8591c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/UNLINK.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return (0, generic_transformers_1.pushVerdictArguments)(['UNLINK'], key); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/UNWATCH.d.ts b/node_modules/@redis/client/dist/lib/commands/UNWATCH.d.ts new file mode 100644 index 0000000..f7ef9c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/UNWATCH.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/UNWATCH.js b/node_modules/@redis/client/dist/lib/commands/UNWATCH.js new file mode 100644 index 0000000..085fec8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/UNWATCH.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['UNWATCH']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/WAIT.d.ts b/node_modules/@redis/client/dist/lib/commands/WAIT.d.ts new file mode 100644 index 0000000..3b9e386 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/WAIT.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(numberOfReplicas: number, timeout: number): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/WAIT.js b/node_modules/@redis/client/dist/lib/commands/WAIT.js new file mode 100644 index 0000000..ad980ee --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/WAIT.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(numberOfReplicas, timeout) { + return ['WAIT', numberOfReplicas.toString(), timeout.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/WATCH.d.ts b/node_modules/@redis/client/dist/lib/commands/WATCH.d.ts new file mode 100644 index 0000000..2408532 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/WATCH.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string | Array): RedisCommandArguments; +export declare function transformReply(): string; diff --git a/node_modules/@redis/client/dist/lib/commands/WATCH.js b/node_modules/@redis/client/dist/lib/commands/WATCH.js new file mode 100644 index 0000000..2e4048a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/WATCH.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return (0, generic_transformers_1.pushVerdictArguments)(['WATCH'], key); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/XACK.d.ts b/node_modules/@redis/client/dist/lib/commands/XACK.d.ts new file mode 100644 index 0000000..706f075 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XACK.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, group: RedisCommandArgument, id: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/XACK.js b/node_modules/@redis/client/dist/lib/commands/XACK.js new file mode 100644 index 0000000..07bd8db --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XACK.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, group, id) { + return (0, generic_transformers_1.pushVerdictArguments)(['XACK', key, group], id); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/XADD.d.ts b/node_modules/@redis/client/dist/lib/commands/XADD.d.ts new file mode 100644 index 0000000..a5fee51 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XADD.d.ts @@ -0,0 +1,14 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +interface XAddOptions { + NOMKSTREAM?: true; + TRIM?: { + strategy?: 'MAXLEN' | 'MINID'; + strategyModifier?: '=' | '~'; + threshold: number; + limit?: number; + }; +} +export declare function transformArguments(key: RedisCommandArgument, id: RedisCommandArgument, message: Record, options?: XAddOptions): RedisCommandArguments; +export declare function transformReply(): string; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/XADD.js b/node_modules/@redis/client/dist/lib/commands/XADD.js new file mode 100644 index 0000000..3eee1be --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XADD.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, id, message, options) { + const args = ['XADD', key]; + if (options?.NOMKSTREAM) { + args.push('NOMKSTREAM'); + } + if (options?.TRIM) { + if (options.TRIM.strategy) { + args.push(options.TRIM.strategy); + } + if (options.TRIM.strategyModifier) { + args.push(options.TRIM.strategyModifier); + } + args.push(options.TRIM.threshold.toString()); + if (options.TRIM.limit) { + args.push('LIMIT', options.TRIM.limit.toString()); + } + } + args.push(id); + for (const [key, value] of Object.entries(message)) { + args.push(key, value); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM.d.ts b/node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM.d.ts new file mode 100644 index 0000000..c3ddaac --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM.d.ts @@ -0,0 +1,14 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { StreamMessagesNullReply } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export interface XAutoClaimOptions { + COUNT?: number; +} +export declare function transformArguments(key: RedisCommandArgument, group: RedisCommandArgument, consumer: RedisCommandArgument, minIdleTime: number, start: string, options?: XAutoClaimOptions): RedisCommandArguments; +type XAutoClaimRawReply = [RedisCommandArgument, Array]; +interface XAutoClaimReply { + nextId: RedisCommandArgument; + messages: StreamMessagesNullReply; +} +export declare function transformReply(reply: XAutoClaimRawReply): XAutoClaimReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM.js b/node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM.js new file mode 100644 index 0000000..6b93781 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, group, consumer, minIdleTime, start, options) { + const args = ['XAUTOCLAIM', key, group, consumer, minIdleTime.toString(), start]; + if (options?.COUNT) { + args.push('COUNT', options.COUNT.toString()); + } + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + nextId: reply[0], + messages: (0, generic_transformers_1.transformStreamMessagesNullReply)(reply[1]) + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM_JUSTID.d.ts b/node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM_JUSTID.d.ts new file mode 100644 index 0000000..373775f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM_JUSTID.d.ts @@ -0,0 +1,10 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { transformArguments as transformXAutoClaimArguments } from './XAUTOCLAIM'; +export { FIRST_KEY_INDEX } from './XAUTOCLAIM'; +export declare function transformArguments(...args: Parameters): RedisCommandArguments; +type XAutoClaimJustIdRawReply = [RedisCommandArgument, Array]; +interface XAutoClaimJustIdReply { + nextId: RedisCommandArgument; + messages: Array; +} +export declare function transformReply(reply: XAutoClaimJustIdRawReply): XAutoClaimJustIdReply; diff --git a/node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM_JUSTID.js b/node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM_JUSTID.js new file mode 100644 index 0000000..8835b47 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XAUTOCLAIM_JUSTID.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const XAUTOCLAIM_1 = require("./XAUTOCLAIM"); +var XAUTOCLAIM_2 = require("./XAUTOCLAIM"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return XAUTOCLAIM_2.FIRST_KEY_INDEX; } }); +function transformArguments(...args) { + return [ + ...(0, XAUTOCLAIM_1.transformArguments)(...args), + 'JUSTID' + ]; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + nextId: reply[0], + messages: reply[1] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/XCLAIM.d.ts b/node_modules/@redis/client/dist/lib/commands/XCLAIM.d.ts new file mode 100644 index 0000000..62d5fb7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XCLAIM.d.ts @@ -0,0 +1,10 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export interface XClaimOptions { + IDLE?: number; + TIME?: number | Date; + RETRYCOUNT?: number; + FORCE?: true; +} +export declare function transformArguments(key: RedisCommandArgument, group: RedisCommandArgument, consumer: RedisCommandArgument, minIdleTime: number, id: RedisCommandArgument | Array, options?: XClaimOptions): RedisCommandArguments; +export { transformStreamMessagesNullReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/XCLAIM.js b/node_modules/@redis/client/dist/lib/commands/XCLAIM.js new file mode 100644 index 0000000..b30c854 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XCLAIM.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, group, consumer, minIdleTime, id, options) { + const args = (0, generic_transformers_1.pushVerdictArguments)(['XCLAIM', key, group, consumer, minIdleTime.toString()], id); + if (options?.IDLE) { + args.push('IDLE', options.IDLE.toString()); + } + if (options?.TIME) { + args.push('TIME', (typeof options.TIME === 'number' ? options.TIME : options.TIME.getTime()).toString()); + } + if (options?.RETRYCOUNT) { + args.push('RETRYCOUNT', options.RETRYCOUNT.toString()); + } + if (options?.FORCE) { + args.push('FORCE'); + } + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_2 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_2.transformStreamMessagesNullReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/XCLAIM_JUSTID.d.ts b/node_modules/@redis/client/dist/lib/commands/XCLAIM_JUSTID.d.ts new file mode 100644 index 0000000..c0a206d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XCLAIM_JUSTID.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { transformArguments as transformXClaimArguments } from './XCLAIM'; +export { FIRST_KEY_INDEX } from './XCLAIM'; +export declare function transformArguments(...args: Parameters): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/XCLAIM_JUSTID.js b/node_modules/@redis/client/dist/lib/commands/XCLAIM_JUSTID.js new file mode 100644 index 0000000..ef7093b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XCLAIM_JUSTID.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const XCLAIM_1 = require("./XCLAIM"); +var XCLAIM_2 = require("./XCLAIM"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return XCLAIM_2.FIRST_KEY_INDEX; } }); +function transformArguments(...args) { + return [ + ...(0, XCLAIM_1.transformArguments)(...args), + 'JUSTID' + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/XDEL.d.ts b/node_modules/@redis/client/dist/lib/commands/XDEL.d.ts new file mode 100644 index 0000000..5b4e2d3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XDEL.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, id: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/XDEL.js b/node_modules/@redis/client/dist/lib/commands/XDEL.js new file mode 100644 index 0000000..2713cea --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XDEL.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, id) { + return (0, generic_transformers_1.pushVerdictArguments)(['XDEL', key], id); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/XGROUP_CREATE.d.ts b/node_modules/@redis/client/dist/lib/commands/XGROUP_CREATE.d.ts new file mode 100644 index 0000000..eb83a0c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XGROUP_CREATE.d.ts @@ -0,0 +1,8 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +interface XGroupCreateOptions { + MKSTREAM?: true; +} +export declare function transformArguments(key: RedisCommandArgument, group: RedisCommandArgument, id: RedisCommandArgument, options?: XGroupCreateOptions): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/XGROUP_CREATE.js b/node_modules/@redis/client/dist/lib/commands/XGROUP_CREATE.js new file mode 100644 index 0000000..4aa4976 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XGROUP_CREATE.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 2; +function transformArguments(key, group, id, options) { + const args = ['XGROUP', 'CREATE', key, group, id]; + if (options?.MKSTREAM) { + args.push('MKSTREAM'); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/XGROUP_CREATECONSUMER.d.ts b/node_modules/@redis/client/dist/lib/commands/XGROUP_CREATECONSUMER.d.ts new file mode 100644 index 0000000..2c88f0b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XGROUP_CREATECONSUMER.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare function transformArguments(key: RedisCommandArgument, group: RedisCommandArgument, consumer: RedisCommandArgument): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/XGROUP_CREATECONSUMER.js b/node_modules/@redis/client/dist/lib/commands/XGROUP_CREATECONSUMER.js new file mode 100644 index 0000000..2e533de --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XGROUP_CREATECONSUMER.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 2; +function transformArguments(key, group, consumer) { + return ['XGROUP', 'CREATECONSUMER', key, group, consumer]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/XGROUP_DELCONSUMER.d.ts b/node_modules/@redis/client/dist/lib/commands/XGROUP_DELCONSUMER.d.ts new file mode 100644 index 0000000..0e433e0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XGROUP_DELCONSUMER.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare function transformArguments(key: RedisCommandArgument, group: RedisCommandArgument, consumer: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/XGROUP_DELCONSUMER.js b/node_modules/@redis/client/dist/lib/commands/XGROUP_DELCONSUMER.js new file mode 100644 index 0000000..32b1bd3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XGROUP_DELCONSUMER.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 2; +function transformArguments(key, group, consumer) { + return ['XGROUP', 'DELCONSUMER', key, group, consumer]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/XGROUP_DESTROY.d.ts b/node_modules/@redis/client/dist/lib/commands/XGROUP_DESTROY.d.ts new file mode 100644 index 0000000..222c00b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XGROUP_DESTROY.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare function transformArguments(key: RedisCommandArgument, group: RedisCommandArgument): RedisCommandArguments; +export { transformBooleanReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/XGROUP_DESTROY.js b/node_modules/@redis/client/dist/lib/commands/XGROUP_DESTROY.js new file mode 100644 index 0000000..043a5f4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XGROUP_DESTROY.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 2; +function transformArguments(key, group) { + return ['XGROUP', 'DESTROY', key, group]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/XGROUP_SETID.d.ts b/node_modules/@redis/client/dist/lib/commands/XGROUP_SETID.d.ts new file mode 100644 index 0000000..3e0f98d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XGROUP_SETID.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare function transformArguments(key: RedisCommandArgument, group: RedisCommandArgument, id: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument; diff --git a/node_modules/@redis/client/dist/lib/commands/XGROUP_SETID.js b/node_modules/@redis/client/dist/lib/commands/XGROUP_SETID.js new file mode 100644 index 0000000..0accac4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XGROUP_SETID.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 2; +function transformArguments(key, group, id) { + return ['XGROUP', 'SETID', key, group, id]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/XINFO_CONSUMERS.d.ts b/node_modules/@redis/client/dist/lib/commands/XINFO_CONSUMERS.d.ts new file mode 100644 index 0000000..23c4ec2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XINFO_CONSUMERS.d.ts @@ -0,0 +1,12 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, group: RedisCommandArgument): RedisCommandArguments; +type XInfoConsumersReply = Array<{ + name: RedisCommandArgument; + pending: number; + idle: number; + inactive: number; +}>; +export declare function transformReply(rawReply: Array): XInfoConsumersReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/XINFO_CONSUMERS.js b/node_modules/@redis/client/dist/lib/commands/XINFO_CONSUMERS.js new file mode 100644 index 0000000..ad0ebad --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XINFO_CONSUMERS.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 2; +exports.IS_READ_ONLY = true; +function transformArguments(key, group) { + return ['XINFO', 'CONSUMERS', key, group]; +} +exports.transformArguments = transformArguments; +function transformReply(rawReply) { + return rawReply.map(consumer => ({ + name: consumer[1], + pending: consumer[3], + idle: consumer[5], + inactive: consumer[7] + })); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/XINFO_GROUPS.d.ts b/node_modules/@redis/client/dist/lib/commands/XINFO_GROUPS.d.ts new file mode 100644 index 0000000..35f5216 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XINFO_GROUPS.d.ts @@ -0,0 +1,12 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +type XInfoGroupsReply = Array<{ + name: RedisCommandArgument; + consumers: number; + pending: number; + lastDeliveredId: RedisCommandArgument; +}>; +export declare function transformReply(rawReply: Array): XInfoGroupsReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/XINFO_GROUPS.js b/node_modules/@redis/client/dist/lib/commands/XINFO_GROUPS.js new file mode 100644 index 0000000..45ede76 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XINFO_GROUPS.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 2; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['XINFO', 'GROUPS', key]; +} +exports.transformArguments = transformArguments; +function transformReply(rawReply) { + return rawReply.map(group => ({ + name: group[1], + consumers: group[3], + pending: group[5], + lastDeliveredId: group[7] + })); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/XINFO_STREAM.d.ts b/node_modules/@redis/client/dist/lib/commands/XINFO_STREAM.d.ts new file mode 100644 index 0000000..18fc057 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XINFO_STREAM.d.ts @@ -0,0 +1,16 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { StreamMessageReply } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 2; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +interface XInfoStreamReply { + length: number; + radixTreeKeys: number; + radixTreeNodes: number; + groups: number; + lastGeneratedId: RedisCommandArgument; + firstEntry: StreamMessageReply | null; + lastEntry: StreamMessageReply | null; +} +export declare function transformReply(rawReply: Array): XInfoStreamReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/XINFO_STREAM.js b/node_modules/@redis/client/dist/lib/commands/XINFO_STREAM.js new file mode 100644 index 0000000..f636807 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XINFO_STREAM.js @@ -0,0 +1,46 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 2; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['XINFO', 'STREAM', key]; +} +exports.transformArguments = transformArguments; +function transformReply(rawReply) { + const parsedReply = {}; + for (let i = 0; i < rawReply.length; i += 2) { + switch (rawReply[i]) { + case 'length': + parsedReply.length = rawReply[i + 1]; + break; + case 'radix-tree-keys': + parsedReply.radixTreeKeys = rawReply[i + 1]; + break; + case 'radix-tree-nodes': + parsedReply.radixTreeNodes = rawReply[i + 1]; + break; + case 'groups': + parsedReply.groups = rawReply[i + 1]; + break; + case 'last-generated-id': + parsedReply.lastGeneratedId = rawReply[i + 1]; + break; + case 'first-entry': + parsedReply.firstEntry = rawReply[i + 1] ? { + id: rawReply[i + 1][0], + message: (0, generic_transformers_1.transformTuplesReply)(rawReply[i + 1][1]) + } : null; + break; + case 'last-entry': + parsedReply.lastEntry = rawReply[i + 1] ? { + id: rawReply[i + 1][0], + message: (0, generic_transformers_1.transformTuplesReply)(rawReply[i + 1][1]) + } : null; + break; + } + } + return parsedReply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/XLEN.d.ts b/node_modules/@redis/client/dist/lib/commands/XLEN.d.ts new file mode 100644 index 0000000..367e9b8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XLEN.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/XLEN.js b/node_modules/@redis/client/dist/lib/commands/XLEN.js new file mode 100644 index 0000000..cbac3ec --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XLEN.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['XLEN', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/XPENDING.d.ts b/node_modules/@redis/client/dist/lib/commands/XPENDING.d.ts new file mode 100644 index 0000000..bd166d5 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XPENDING.d.ts @@ -0,0 +1,24 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, group: RedisCommandArgument): RedisCommandArguments; +type XPendingRawReply = [ + pending: number, + firstId: RedisCommandArgument | null, + lastId: RedisCommandArgument | null, + consumers: Array<[ + name: RedisCommandArgument, + deliveriesCounter: RedisCommandArgument + ]> | null +]; +interface XPendingReply { + pending: number; + firstId: RedisCommandArgument | null; + lastId: RedisCommandArgument | null; + consumers: Array<{ + name: RedisCommandArgument; + deliveriesCounter: number; + }> | null; +} +export declare function transformReply(reply: XPendingRawReply): XPendingReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/XPENDING.js b/node_modules/@redis/client/dist/lib/commands/XPENDING.js new file mode 100644 index 0000000..019c493 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XPENDING.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, group) { + return ['XPENDING', key, group]; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + pending: reply[0], + firstId: reply[1], + lastId: reply[2], + consumers: reply[3] === null ? null : reply[3].map(([name, deliveriesCounter]) => ({ + name, + deliveriesCounter: Number(deliveriesCounter) + })) + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/XPENDING_RANGE.d.ts b/node_modules/@redis/client/dist/lib/commands/XPENDING_RANGE.d.ts new file mode 100644 index 0000000..ca8e022 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XPENDING_RANGE.d.ts @@ -0,0 +1,22 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +interface XPendingRangeOptions { + IDLE?: number; + consumer?: RedisCommandArgument; +} +export declare function transformArguments(key: RedisCommandArgument, group: RedisCommandArgument, start: string, end: string, count: number, options?: XPendingRangeOptions): RedisCommandArguments; +type XPendingRangeRawReply = Array<[ + id: RedisCommandArgument, + consumer: RedisCommandArgument, + millisecondsSinceLastDelivery: number, + deliveriesCounter: number +]>; +type XPendingRangeReply = Array<{ + id: RedisCommandArgument; + owner: RedisCommandArgument; + millisecondsSinceLastDelivery: number; + deliveriesCounter: number; +}>; +export declare function transformReply(reply: XPendingRangeRawReply): XPendingRangeReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/XPENDING_RANGE.js b/node_modules/@redis/client/dist/lib/commands/XPENDING_RANGE.js new file mode 100644 index 0000000..2f051fb --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XPENDING_RANGE.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, group, start, end, count, options) { + const args = ['XPENDING', key, group]; + if (options?.IDLE) { + args.push('IDLE', options.IDLE.toString()); + } + args.push(start, end, count.toString()); + if (options?.consumer) { + args.push(options.consumer); + } + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply.map(([id, owner, millisecondsSinceLastDelivery, deliveriesCounter]) => ({ + id, + owner, + millisecondsSinceLastDelivery, + deliveriesCounter + })); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/XRANGE.d.ts b/node_modules/@redis/client/dist/lib/commands/XRANGE.d.ts new file mode 100644 index 0000000..a01cf54 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XRANGE.d.ts @@ -0,0 +1,8 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +interface XRangeOptions { + COUNT?: number; +} +export declare function transformArguments(key: RedisCommandArgument, start: RedisCommandArgument, end: RedisCommandArgument, options?: XRangeOptions): RedisCommandArguments; +export { transformStreamMessagesReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/XRANGE.js b/node_modules/@redis/client/dist/lib/commands/XRANGE.js new file mode 100644 index 0000000..6817372 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XRANGE.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, start, end, options) { + const args = ['XRANGE', key, start, end]; + if (options?.COUNT) { + args.push('COUNT', options.COUNT.toString()); + } + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformStreamMessagesReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/XREAD.d.ts b/node_modules/@redis/client/dist/lib/commands/XREAD.d.ts new file mode 100644 index 0000000..a054dfe --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XREAD.d.ts @@ -0,0 +1,13 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX: (streams: Array | XReadStream) => RedisCommandArgument; +export declare const IS_READ_ONLY = true; +interface XReadStream { + key: RedisCommandArgument; + id: RedisCommandArgument; +} +interface XReadOptions { + COUNT?: number; + BLOCK?: number; +} +export declare function transformArguments(streams: Array | XReadStream, options?: XReadOptions): RedisCommandArguments; +export { transformStreamsMessagesReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/XREAD.js b/node_modules/@redis/client/dist/lib/commands/XREAD.js new file mode 100644 index 0000000..e23b13d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XREAD.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const FIRST_KEY_INDEX = (streams) => { + return Array.isArray(streams) ? streams[0].key : streams.key; +}; +exports.FIRST_KEY_INDEX = FIRST_KEY_INDEX; +exports.IS_READ_ONLY = true; +function transformArguments(streams, options) { + const args = ['XREAD']; + if (options?.COUNT) { + args.push('COUNT', options.COUNT.toString()); + } + if (typeof options?.BLOCK === 'number') { + args.push('BLOCK', options.BLOCK.toString()); + } + args.push('STREAMS'); + const streamsArray = Array.isArray(streams) ? streams : [streams], argsLength = args.length; + for (let i = 0; i < streamsArray.length; i++) { + const stream = streamsArray[i]; + args[argsLength + i] = stream.key; + args[argsLength + streamsArray.length + i] = stream.id; + } + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformStreamsMessagesReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/XREADGROUP.d.ts b/node_modules/@redis/client/dist/lib/commands/XREADGROUP.d.ts new file mode 100644 index 0000000..11f648d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XREADGROUP.d.ts @@ -0,0 +1,14 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export interface XReadGroupStream { + key: RedisCommandArgument; + id: RedisCommandArgument; +} +export interface XReadGroupOptions { + COUNT?: number; + BLOCK?: number; + NOACK?: true; +} +export declare const FIRST_KEY_INDEX: (_group: RedisCommandArgument, _consumer: RedisCommandArgument, streams: Array | XReadGroupStream) => RedisCommandArgument; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(group: RedisCommandArgument, consumer: RedisCommandArgument, streams: Array | XReadGroupStream, options?: XReadGroupOptions): RedisCommandArguments; +export { transformStreamsMessagesReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/XREADGROUP.js b/node_modules/@redis/client/dist/lib/commands/XREADGROUP.js new file mode 100644 index 0000000..73da656 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XREADGROUP.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const FIRST_KEY_INDEX = (_group, _consumer, streams) => { + return Array.isArray(streams) ? streams[0].key : streams.key; +}; +exports.FIRST_KEY_INDEX = FIRST_KEY_INDEX; +exports.IS_READ_ONLY = true; +function transformArguments(group, consumer, streams, options) { + const args = ['XREADGROUP', 'GROUP', group, consumer]; + if (options?.COUNT) { + args.push('COUNT', options.COUNT.toString()); + } + if (typeof options?.BLOCK === 'number') { + args.push('BLOCK', options.BLOCK.toString()); + } + if (options?.NOACK) { + args.push('NOACK'); + } + args.push('STREAMS'); + const streamsArray = Array.isArray(streams) ? streams : [streams], argsLength = args.length; + for (let i = 0; i < streamsArray.length; i++) { + const stream = streamsArray[i]; + args[argsLength + i] = stream.key; + args[argsLength + streamsArray.length + i] = stream.id; + } + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformStreamsMessagesReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/XREVRANGE.d.ts b/node_modules/@redis/client/dist/lib/commands/XREVRANGE.d.ts new file mode 100644 index 0000000..b5f4c05 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XREVRANGE.d.ts @@ -0,0 +1,8 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +interface XRangeRevOptions { + COUNT?: number; +} +export declare function transformArguments(key: RedisCommandArgument, start: RedisCommandArgument, end: RedisCommandArgument, options?: XRangeRevOptions): RedisCommandArguments; +export { transformStreamMessagesReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/XREVRANGE.js b/node_modules/@redis/client/dist/lib/commands/XREVRANGE.js new file mode 100644 index 0000000..242ac79 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XREVRANGE.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, start, end, options) { + const args = ['XREVRANGE', key, start, end]; + if (options?.COUNT) { + args.push('COUNT', options.COUNT.toString()); + } + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformStreamMessagesReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/XSETID.d.ts b/node_modules/@redis/client/dist/lib/commands/XSETID.d.ts new file mode 100644 index 0000000..ac55f65 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XSETID.d.ts @@ -0,0 +1,9 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +interface XSetIdOptions { + ENTRIESADDED?: number; + MAXDELETEDID?: RedisCommandArgument; +} +export declare function transformArguments(key: RedisCommandArgument, lastId: RedisCommandArgument, options?: XSetIdOptions): RedisCommandArguments; +export declare function transformReply(): 'OK'; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/XSETID.js b/node_modules/@redis/client/dist/lib/commands/XSETID.js new file mode 100644 index 0000000..2212bbc --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XSETID.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, lastId, options) { + const args = ['XSETID', key, lastId]; + if (options?.ENTRIESADDED) { + args.push('ENTRIESADDED', options.ENTRIESADDED.toString()); + } + if (options?.MAXDELETEDID) { + args.push('MAXDELETEDID', options.MAXDELETEDID); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/XTRIM.d.ts b/node_modules/@redis/client/dist/lib/commands/XTRIM.d.ts new file mode 100644 index 0000000..f90c4c7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XTRIM.d.ts @@ -0,0 +1,9 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +interface XTrimOptions { + strategyModifier?: '=' | '~'; + LIMIT?: number; +} +export declare function transformArguments(key: RedisCommandArgument, strategy: 'MAXLEN' | 'MINID', threshold: number, options?: XTrimOptions): RedisCommandArguments; +export declare function transformReply(): number; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/XTRIM.js b/node_modules/@redis/client/dist/lib/commands/XTRIM.js new file mode 100644 index 0000000..c2b7530 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/XTRIM.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, strategy, threshold, options) { + const args = ['XTRIM', key, strategy]; + if (options?.strategyModifier) { + args.push(options.strategyModifier); + } + args.push(threshold.toString()); + if (options?.LIMIT) { + args.push('LIMIT', options.LIMIT.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZADD.d.ts b/node_modules/@redis/client/dist/lib/commands/ZADD.d.ts new file mode 100644 index 0000000..a269c33 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZADD.d.ts @@ -0,0 +1,24 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { ZMember } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +interface NX { + NX?: true; +} +interface XX { + XX?: true; +} +interface LT { + LT?: true; +} +interface GT { + GT?: true; +} +interface CH { + CH?: true; +} +interface INCR { + INCR?: true; +} +type ZAddOptions = (NX | (XX & LT & GT)) & CH & INCR; +export declare function transformArguments(key: RedisCommandArgument, members: ZMember | Array, options?: ZAddOptions): RedisCommandArguments; +export { transformNumberInfinityReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZADD.js b/node_modules/@redis/client/dist/lib/commands/ZADD.js new file mode 100644 index 0000000..643d425 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZADD.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, members, options) { + const args = ['ZADD', key]; + if (options?.NX) { + args.push('NX'); + } + else { + if (options?.XX) { + args.push('XX'); + } + if (options?.GT) { + args.push('GT'); + } + else if (options?.LT) { + args.push('LT'); + } + } + if (options?.CH) { + args.push('CH'); + } + if (options?.INCR) { + args.push('INCR'); + } + for (const { score, value } of (Array.isArray(members) ? members : [members])) { + args.push((0, generic_transformers_1.transformNumberInfinityArgument)(score), value); + } + return args; +} +exports.transformArguments = transformArguments; +var generic_transformers_2 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_2.transformNumberInfinityReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/ZCARD.d.ts b/node_modules/@redis/client/dist/lib/commands/ZCARD.d.ts new file mode 100644 index 0000000..367e9b8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZCARD.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/ZCARD.js b/node_modules/@redis/client/dist/lib/commands/ZCARD.js new file mode 100644 index 0000000..9ac6f61 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZCARD.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['ZCARD', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZCOUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/ZCOUNT.d.ts new file mode 100644 index 0000000..c80e931 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZCOUNT.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, min: RedisCommandArgument | number, max: RedisCommandArgument | number): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/ZCOUNT.js b/node_modules/@redis/client/dist/lib/commands/ZCOUNT.js new file mode 100644 index 0000000..04ab60e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZCOUNT.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, min, max) { + return [ + 'ZCOUNT', + key, + (0, generic_transformers_1.transformStringNumberInfinityArgument)(min), + (0, generic_transformers_1.transformStringNumberInfinityArgument)(max) + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZDIFF.d.ts b/node_modules/@redis/client/dist/lib/commands/ZDIFF.d.ts new file mode 100644 index 0000000..b6b465b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZDIFF.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(keys: Array | RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/ZDIFF.js b/node_modules/@redis/client/dist/lib/commands/ZDIFF.js new file mode 100644 index 0000000..94386d0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZDIFF.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 2; +exports.IS_READ_ONLY = true; +function transformArguments(keys) { + return (0, generic_transformers_1.pushVerdictArgument)(['ZDIFF'], keys); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZDIFFSTORE.d.ts b/node_modules/@redis/client/dist/lib/commands/ZDIFFSTORE.d.ts new file mode 100644 index 0000000..556dbef --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZDIFFSTORE.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(destination: RedisCommandArgument, keys: Array | RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/ZDIFFSTORE.js b/node_modules/@redis/client/dist/lib/commands/ZDIFFSTORE.js new file mode 100644 index 0000000..da3ef86 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZDIFFSTORE.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(destination, keys) { + return (0, generic_transformers_1.pushVerdictArgument)(['ZDIFFSTORE', destination], keys); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZDIFF_WITHSCORES.d.ts b/node_modules/@redis/client/dist/lib/commands/ZDIFF_WITHSCORES.d.ts new file mode 100644 index 0000000..ef74c30 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZDIFF_WITHSCORES.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '.'; +import { transformArguments as transformZDiffArguments } from './ZDIFF'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './ZDIFF'; +export declare function transformArguments(...args: Parameters): RedisCommandArguments; +export { transformSortedSetWithScoresReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZDIFF_WITHSCORES.js b/node_modules/@redis/client/dist/lib/commands/ZDIFF_WITHSCORES.js new file mode 100644 index 0000000..2d1c953 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZDIFF_WITHSCORES.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const ZDIFF_1 = require("./ZDIFF"); +var ZDIFF_2 = require("./ZDIFF"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return ZDIFF_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return ZDIFF_2.IS_READ_ONLY; } }); +function transformArguments(...args) { + return [ + ...(0, ZDIFF_1.transformArguments)(...args), + 'WITHSCORES' + ]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformSortedSetWithScoresReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/ZINCRBY.d.ts b/node_modules/@redis/client/dist/lib/commands/ZINCRBY.d.ts new file mode 100644 index 0000000..92378f7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZINCRBY.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, increment: number, member: RedisCommandArgument): RedisCommandArguments; +export { transformNumberInfinityReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZINCRBY.js b/node_modules/@redis/client/dist/lib/commands/ZINCRBY.js new file mode 100644 index 0000000..3ca7e2f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZINCRBY.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, increment, member) { + return [ + 'ZINCRBY', + key, + (0, generic_transformers_1.transformNumberInfinityArgument)(increment), + member + ]; +} +exports.transformArguments = transformArguments; +var generic_transformers_2 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_2.transformNumberInfinityReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/ZINTER.d.ts b/node_modules/@redis/client/dist/lib/commands/ZINTER.d.ts new file mode 100644 index 0000000..9938be3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZINTER.d.ts @@ -0,0 +1,10 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare const IS_READ_ONLY = true; +interface ZInterOptions { + WEIGHTS?: Array; + AGGREGATE?: 'SUM' | 'MIN' | 'MAX'; +} +export declare function transformArguments(keys: Array | RedisCommandArgument, options?: ZInterOptions): RedisCommandArguments; +export declare function transformReply(): Array; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/ZINTER.js b/node_modules/@redis/client/dist/lib/commands/ZINTER.js new file mode 100644 index 0000000..15fd2d4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZINTER.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 2; +exports.IS_READ_ONLY = true; +function transformArguments(keys, options) { + const args = (0, generic_transformers_1.pushVerdictArgument)(['ZINTER'], keys); + if (options?.WEIGHTS) { + args.push('WEIGHTS', ...options.WEIGHTS.map(weight => weight.toString())); + } + if (options?.AGGREGATE) { + args.push('AGGREGATE', options.AGGREGATE); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZINTERCARD.d.ts b/node_modules/@redis/client/dist/lib/commands/ZINTERCARD.d.ts new file mode 100644 index 0000000..1606dd4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZINTERCARD.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(keys: Array | RedisCommandArgument, limit?: number): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/ZINTERCARD.js b/node_modules/@redis/client/dist/lib/commands/ZINTERCARD.js new file mode 100644 index 0000000..9f8cd3c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZINTERCARD.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 2; +exports.IS_READ_ONLY = true; +function transformArguments(keys, limit) { + const args = (0, generic_transformers_1.pushVerdictArgument)(['ZINTERCARD'], keys); + if (limit) { + args.push('LIMIT', limit.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZINTERSTORE.d.ts b/node_modules/@redis/client/dist/lib/commands/ZINTERSTORE.d.ts new file mode 100644 index 0000000..3bda1a0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZINTERSTORE.d.ts @@ -0,0 +1,9 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +interface ZInterStoreOptions { + WEIGHTS?: Array; + AGGREGATE?: 'SUM' | 'MIN' | 'MAX'; +} +export declare function transformArguments(destination: RedisCommandArgument, keys: Array | RedisCommandArgument, options?: ZInterStoreOptions): RedisCommandArguments; +export declare function transformReply(): number; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/ZINTERSTORE.js b/node_modules/@redis/client/dist/lib/commands/ZINTERSTORE.js new file mode 100644 index 0000000..4005555 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZINTERSTORE.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(destination, keys, options) { + const args = (0, generic_transformers_1.pushVerdictArgument)(['ZINTERSTORE', destination], keys); + if (options?.WEIGHTS) { + args.push('WEIGHTS', ...options.WEIGHTS.map(weight => weight.toString())); + } + if (options?.AGGREGATE) { + args.push('AGGREGATE', options.AGGREGATE); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZINTER_WITHSCORES.d.ts b/node_modules/@redis/client/dist/lib/commands/ZINTER_WITHSCORES.d.ts new file mode 100644 index 0000000..0899841 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZINTER_WITHSCORES.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '.'; +import { transformArguments as transformZInterArguments } from './ZINTER'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './ZINTER'; +export declare function transformArguments(...args: Parameters): RedisCommandArguments; +export { transformSortedSetWithScoresReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZINTER_WITHSCORES.js b/node_modules/@redis/client/dist/lib/commands/ZINTER_WITHSCORES.js new file mode 100644 index 0000000..c908e64 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZINTER_WITHSCORES.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const ZINTER_1 = require("./ZINTER"); +var ZINTER_2 = require("./ZINTER"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return ZINTER_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return ZINTER_2.IS_READ_ONLY; } }); +function transformArguments(...args) { + return [ + ...(0, ZINTER_1.transformArguments)(...args), + 'WITHSCORES' + ]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformSortedSetWithScoresReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/ZLEXCOUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/ZLEXCOUNT.d.ts new file mode 100644 index 0000000..b17344b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZLEXCOUNT.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, min: RedisCommandArgument, max: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/ZLEXCOUNT.js b/node_modules/@redis/client/dist/lib/commands/ZLEXCOUNT.js new file mode 100644 index 0000000..31a02b9 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZLEXCOUNT.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, min, max) { + return [ + 'ZLEXCOUNT', + key, + min, + max + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZMPOP.d.ts b/node_modules/@redis/client/dist/lib/commands/ZMPOP.d.ts new file mode 100644 index 0000000..07a23c4 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZMPOP.d.ts @@ -0,0 +1,14 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { SortedSetSide, ZMember, ZMPopOptions } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 2; +export declare function transformArguments(keys: RedisCommandArgument | Array, side: SortedSetSide, options?: ZMPopOptions): RedisCommandArguments; +type ZMPopRawReply = null | [ + key: string, + elements: Array<[RedisCommandArgument, RedisCommandArgument]> +]; +type ZMPopReply = null | { + key: string; + elements: Array; +}; +export declare function transformReply(reply: ZMPopRawReply): ZMPopReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/ZMPOP.js b/node_modules/@redis/client/dist/lib/commands/ZMPOP.js new file mode 100644 index 0000000..e2a5787 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZMPOP.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 2; +function transformArguments(keys, side, options) { + return (0, generic_transformers_1.transformZMPopArguments)(['ZMPOP'], keys, side, options); +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply === null ? null : { + key: reply[0], + elements: reply[1].map(generic_transformers_1.transformSortedSetMemberReply) + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/ZMSCORE.d.ts b/node_modules/@redis/client/dist/lib/commands/ZMSCORE.d.ts new file mode 100644 index 0000000..37b9d1c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZMSCORE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, member: RedisCommandArgument | Array): RedisCommandArguments; +export { transformNumberInfinityNullArrayReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZMSCORE.js b/node_modules/@redis/client/dist/lib/commands/ZMSCORE.js new file mode 100644 index 0000000..8f06a03 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZMSCORE.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, member) { + return (0, generic_transformers_1.pushVerdictArguments)(['ZMSCORE', key], member); +} +exports.transformArguments = transformArguments; +var generic_transformers_2 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_2.transformNumberInfinityNullArrayReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/ZPOPMAX.d.ts b/node_modules/@redis/client/dist/lib/commands/ZPOPMAX.d.ts new file mode 100644 index 0000000..af0daa2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZPOPMAX.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export { transformSortedSetMemberNullReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZPOPMAX.js b/node_modules/@redis/client/dist/lib/commands/ZPOPMAX.js new file mode 100644 index 0000000..d5cfb11 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZPOPMAX.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return [ + 'ZPOPMAX', + key + ]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformSortedSetMemberNullReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/ZPOPMAX_COUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/ZPOPMAX_COUNT.d.ts new file mode 100644 index 0000000..b46939e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZPOPMAX_COUNT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export { FIRST_KEY_INDEX } from './ZPOPMAX'; +export declare function transformArguments(key: RedisCommandArgument, count: number): RedisCommandArguments; +export { transformSortedSetWithScoresReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZPOPMAX_COUNT.js b/node_modules/@redis/client/dist/lib/commands/ZPOPMAX_COUNT.js new file mode 100644 index 0000000..2bbfef7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZPOPMAX_COUNT.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const ZPOPMAX_1 = require("./ZPOPMAX"); +var ZPOPMAX_2 = require("./ZPOPMAX"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return ZPOPMAX_2.FIRST_KEY_INDEX; } }); +function transformArguments(key, count) { + return [ + ...(0, ZPOPMAX_1.transformArguments)(key), + count.toString() + ]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformSortedSetWithScoresReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/ZPOPMIN.d.ts b/node_modules/@redis/client/dist/lib/commands/ZPOPMIN.d.ts new file mode 100644 index 0000000..af0daa2 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZPOPMIN.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export { transformSortedSetMemberNullReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZPOPMIN.js b/node_modules/@redis/client/dist/lib/commands/ZPOPMIN.js new file mode 100644 index 0000000..c460010 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZPOPMIN.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return [ + 'ZPOPMIN', + key + ]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformSortedSetMemberNullReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/ZPOPMIN_COUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/ZPOPMIN_COUNT.d.ts new file mode 100644 index 0000000..9856992 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZPOPMIN_COUNT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export { FIRST_KEY_INDEX } from './ZPOPMIN'; +export declare function transformArguments(key: RedisCommandArgument, count: number): RedisCommandArguments; +export { transformSortedSetWithScoresReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZPOPMIN_COUNT.js b/node_modules/@redis/client/dist/lib/commands/ZPOPMIN_COUNT.js new file mode 100644 index 0000000..e1328fc --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZPOPMIN_COUNT.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const ZPOPMIN_1 = require("./ZPOPMIN"); +var ZPOPMIN_2 = require("./ZPOPMIN"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return ZPOPMIN_2.FIRST_KEY_INDEX; } }); +function transformArguments(key, count) { + return [ + ...(0, ZPOPMIN_1.transformArguments)(key), + count.toString() + ]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformSortedSetWithScoresReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER.d.ts b/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER.d.ts new file mode 100644 index 0000000..ac0d54c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): RedisCommandArgument | null; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER.js b/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER.js new file mode 100644 index 0000000..2792f2a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['ZRANDMEMBER', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT.d.ts b/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT.d.ts new file mode 100644 index 0000000..3a985d0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './ZRANDMEMBER'; +export declare function transformArguments(key: RedisCommandArgument, count: number): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT.js b/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT.js new file mode 100644 index 0000000..7a0d4cd --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const ZRANDMEMBER_1 = require("./ZRANDMEMBER"); +var ZRANDMEMBER_2 = require("./ZRANDMEMBER"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return ZRANDMEMBER_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return ZRANDMEMBER_2.IS_READ_ONLY; } }); +function transformArguments(key, count) { + return [ + ...(0, ZRANDMEMBER_1.transformArguments)(key), + count.toString() + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.d.ts b/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.d.ts new file mode 100644 index 0000000..aeb80a8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '.'; +import { transformArguments as transformZRandMemberCountArguments } from './ZRANDMEMBER_COUNT'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './ZRANDMEMBER_COUNT'; +export declare function transformArguments(...args: Parameters): RedisCommandArguments; +export { transformSortedSetWithScoresReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.js b/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.js new file mode 100644 index 0000000..3280888 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANDMEMBER_COUNT_WITHSCORES.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const ZRANDMEMBER_COUNT_1 = require("./ZRANDMEMBER_COUNT"); +var ZRANDMEMBER_COUNT_2 = require("./ZRANDMEMBER_COUNT"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return ZRANDMEMBER_COUNT_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return ZRANDMEMBER_COUNT_2.IS_READ_ONLY; } }); +function transformArguments(...args) { + return [ + ...(0, ZRANDMEMBER_COUNT_1.transformArguments)(...args), + 'WITHSCORES' + ]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformSortedSetWithScoresReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANGE.d.ts b/node_modules/@redis/client/dist/lib/commands/ZRANGE.d.ts new file mode 100644 index 0000000..85c18b8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANGE.d.ts @@ -0,0 +1,14 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +interface ZRangeOptions { + BY?: 'SCORE' | 'LEX'; + REV?: true; + LIMIT?: { + offset: number; + count: number; + }; +} +export declare function transformArguments(key: RedisCommandArgument, min: RedisCommandArgument | number, max: RedisCommandArgument | number, options?: ZRangeOptions): RedisCommandArguments; +export declare function transformReply(): Array; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANGE.js b/node_modules/@redis/client/dist/lib/commands/ZRANGE.js new file mode 100644 index 0000000..3e43721 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANGE.js @@ -0,0 +1,30 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, min, max, options) { + const args = [ + 'ZRANGE', + key, + (0, generic_transformers_1.transformStringNumberInfinityArgument)(min), + (0, generic_transformers_1.transformStringNumberInfinityArgument)(max) + ]; + switch (options?.BY) { + case 'SCORE': + args.push('BYSCORE'); + break; + case 'LEX': + args.push('BYLEX'); + break; + } + if (options?.REV) { + args.push('REV'); + } + if (options?.LIMIT) { + args.push('LIMIT', options.LIMIT.offset.toString(), options.LIMIT.count.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANGEBYLEX.d.ts b/node_modules/@redis/client/dist/lib/commands/ZRANGEBYLEX.d.ts new file mode 100644 index 0000000..45eddb8 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANGEBYLEX.d.ts @@ -0,0 +1,11 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export interface ZRangeByLexOptions { + LIMIT?: { + offset: number; + count: number; + }; +} +export declare function transformArguments(key: RedisCommandArgument, min: RedisCommandArgument, max: RedisCommandArgument, options?: ZRangeByLexOptions): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANGEBYLEX.js b/node_modules/@redis/client/dist/lib/commands/ZRANGEBYLEX.js new file mode 100644 index 0000000..771a3ba --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANGEBYLEX.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, min, max, options) { + const args = [ + 'ZRANGEBYLEX', + key, + (0, generic_transformers_1.transformStringNumberInfinityArgument)(min), + (0, generic_transformers_1.transformStringNumberInfinityArgument)(max) + ]; + if (options?.LIMIT) { + args.push('LIMIT', options.LIMIT.offset.toString(), options.LIMIT.count.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE.d.ts b/node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE.d.ts new file mode 100644 index 0000000..511c92b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE.d.ts @@ -0,0 +1,11 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export interface ZRangeByScoreOptions { + LIMIT?: { + offset: number; + count: number; + }; +} +export declare function transformArguments(key: RedisCommandArgument, min: string | number, max: string | number, options?: ZRangeByScoreOptions): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE.js b/node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE.js new file mode 100644 index 0000000..011d222 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, min, max, options) { + const args = [ + 'ZRANGEBYSCORE', + key, + (0, generic_transformers_1.transformStringNumberInfinityArgument)(min), + (0, generic_transformers_1.transformStringNumberInfinityArgument)(max) + ]; + if (options?.LIMIT) { + args.push('LIMIT', options.LIMIT.offset.toString(), options.LIMIT.count.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE_WITHSCORES.d.ts b/node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE_WITHSCORES.d.ts new file mode 100644 index 0000000..522af76 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE_WITHSCORES.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { ZRangeByScoreOptions } from './ZRANGEBYSCORE'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './ZRANGEBYSCORE'; +export declare function transformArguments(key: RedisCommandArgument, min: string | number, max: string | number, options?: ZRangeByScoreOptions): RedisCommandArguments; +export { transformSortedSetWithScoresReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE_WITHSCORES.js b/node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE_WITHSCORES.js new file mode 100644 index 0000000..5f57e2e --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANGEBYSCORE_WITHSCORES.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const ZRANGEBYSCORE_1 = require("./ZRANGEBYSCORE"); +var ZRANGEBYSCORE_2 = require("./ZRANGEBYSCORE"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return ZRANGEBYSCORE_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return ZRANGEBYSCORE_2.IS_READ_ONLY; } }); +function transformArguments(key, min, max, options) { + return [ + ...(0, ZRANGEBYSCORE_1.transformArguments)(key, min, max, options), + 'WITHSCORES' + ]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformSortedSetWithScoresReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANGESTORE.d.ts b/node_modules/@redis/client/dist/lib/commands/ZRANGESTORE.d.ts new file mode 100644 index 0000000..0facbc7 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANGESTORE.d.ts @@ -0,0 +1,14 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +interface ZRangeStoreOptions { + BY?: 'SCORE' | 'LEX'; + REV?: true; + LIMIT?: { + offset: number; + count: number; + }; + WITHSCORES?: true; +} +export declare function transformArguments(dst: RedisCommandArgument, src: RedisCommandArgument, min: RedisCommandArgument | number, max: RedisCommandArgument | number, options?: ZRangeStoreOptions): RedisCommandArguments; +export declare function transformReply(reply: number): number; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANGESTORE.js b/node_modules/@redis/client/dist/lib/commands/ZRANGESTORE.js new file mode 100644 index 0000000..69b8046 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANGESTORE.js @@ -0,0 +1,40 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(dst, src, min, max, options) { + const args = [ + 'ZRANGESTORE', + dst, + src, + (0, generic_transformers_1.transformStringNumberInfinityArgument)(min), + (0, generic_transformers_1.transformStringNumberInfinityArgument)(max) + ]; + switch (options?.BY) { + case 'SCORE': + args.push('BYSCORE'); + break; + case 'LEX': + args.push('BYLEX'); + break; + } + if (options?.REV) { + args.push('REV'); + } + if (options?.LIMIT) { + args.push('LIMIT', options.LIMIT.offset.toString(), options.LIMIT.count.toString()); + } + if (options?.WITHSCORES) { + args.push('WITHSCORES'); + } + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + if (typeof reply !== 'number') { + throw new TypeError(`Upgrade to Redis 6.2.5 and up (https://github.com/redis/redis/pull/9089)`); + } + return reply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANGE_WITHSCORES.d.ts b/node_modules/@redis/client/dist/lib/commands/ZRANGE_WITHSCORES.d.ts new file mode 100644 index 0000000..fc78d2b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANGE_WITHSCORES.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '.'; +import { transformArguments as transformZRangeArguments } from './ZRANGE'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './ZRANGE'; +export declare function transformArguments(...args: Parameters): RedisCommandArguments; +export { transformSortedSetWithScoresReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANGE_WITHSCORES.js b/node_modules/@redis/client/dist/lib/commands/ZRANGE_WITHSCORES.js new file mode 100644 index 0000000..23084c9 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANGE_WITHSCORES.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const ZRANGE_1 = require("./ZRANGE"); +var ZRANGE_2 = require("./ZRANGE"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return ZRANGE_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return ZRANGE_2.IS_READ_ONLY; } }); +function transformArguments(...args) { + return [ + ...(0, ZRANGE_1.transformArguments)(...args), + 'WITHSCORES' + ]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformSortedSetWithScoresReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANK.d.ts b/node_modules/@redis/client/dist/lib/commands/ZRANK.d.ts new file mode 100644 index 0000000..7527828 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANK.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, member: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number | null; diff --git a/node_modules/@redis/client/dist/lib/commands/ZRANK.js b/node_modules/@redis/client/dist/lib/commands/ZRANK.js new file mode 100644 index 0000000..b8b7f0a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZRANK.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, member) { + return ['ZRANK', key, member]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZREM.d.ts b/node_modules/@redis/client/dist/lib/commands/ZREM.d.ts new file mode 100644 index 0000000..c18fefb --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZREM.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, member: RedisCommandArgument | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/ZREM.js b/node_modules/@redis/client/dist/lib/commands/ZREM.js new file mode 100644 index 0000000..fc00be0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZREM.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, member) { + return (0, generic_transformers_1.pushVerdictArguments)(['ZREM', key], member); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYLEX.d.ts b/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYLEX.d.ts new file mode 100644 index 0000000..11ed7fa --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYLEX.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, min: RedisCommandArgument | number, max: RedisCommandArgument | number): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYLEX.js b/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYLEX.js new file mode 100644 index 0000000..4992781 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYLEX.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, min, max) { + return [ + 'ZREMRANGEBYLEX', + key, + (0, generic_transformers_1.transformStringNumberInfinityArgument)(min), + (0, generic_transformers_1.transformStringNumberInfinityArgument)(max) + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYRANK.d.ts b/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYRANK.d.ts new file mode 100644 index 0000000..ec3e9c0 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYRANK.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, start: number, stop: number): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYRANK.js b/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYRANK.js new file mode 100644 index 0000000..a139d01 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYRANK.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, start, stop) { + return ['ZREMRANGEBYRANK', key, start.toString(), stop.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYSCORE.d.ts b/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYSCORE.d.ts new file mode 100644 index 0000000..11ed7fa --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYSCORE.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: RedisCommandArgument, min: RedisCommandArgument | number, max: RedisCommandArgument | number): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYSCORE.js b/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYSCORE.js new file mode 100644 index 0000000..79e486c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZREMRANGEBYSCORE.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, min, max) { + return [ + 'ZREMRANGEBYSCORE', + key, + (0, generic_transformers_1.transformStringNumberInfinityArgument)(min), + (0, generic_transformers_1.transformStringNumberInfinityArgument)(max) + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZREVRANK.d.ts b/node_modules/@redis/client/dist/lib/commands/ZREVRANK.d.ts new file mode 100644 index 0000000..7527828 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZREVRANK.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, member: RedisCommandArgument): RedisCommandArguments; +export declare function transformReply(): number | null; diff --git a/node_modules/@redis/client/dist/lib/commands/ZREVRANK.js b/node_modules/@redis/client/dist/lib/commands/ZREVRANK.js new file mode 100644 index 0000000..e111248 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZREVRANK.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, member) { + return ['ZREVRANK', key, member]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZSCAN.d.ts b/node_modules/@redis/client/dist/lib/commands/ZSCAN.d.ts new file mode 100644 index 0000000..b86d12b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZSCAN.d.ts @@ -0,0 +1,12 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +import { ScanOptions, ZMember } from './generic-transformers'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, cursor: number, options?: ScanOptions): RedisCommandArguments; +type ZScanRawReply = [RedisCommandArgument, Array]; +interface ZScanReply { + cursor: number; + members: Array; +} +export declare function transformReply([cursor, rawMembers]: ZScanRawReply): ZScanReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/ZSCAN.js b/node_modules/@redis/client/dist/lib/commands/ZSCAN.js new file mode 100644 index 0000000..ce72c15 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZSCAN.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, cursor, options) { + return (0, generic_transformers_1.pushScanArguments)([ + 'ZSCAN', + key + ], cursor, options); +} +exports.transformArguments = transformArguments; +function transformReply([cursor, rawMembers]) { + const parsedMembers = []; + for (let i = 0; i < rawMembers.length; i += 2) { + parsedMembers.push({ + value: rawMembers[i], + score: (0, generic_transformers_1.transformNumberInfinityReply)(rawMembers[i + 1]) + }); + } + return { + cursor: Number(cursor), + members: parsedMembers + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/client/dist/lib/commands/ZSCORE.d.ts b/node_modules/@redis/client/dist/lib/commands/ZSCORE.d.ts new file mode 100644 index 0000000..309294a --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZSCORE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: RedisCommandArgument, member: RedisCommandArgument): RedisCommandArguments; +export { transformNumberInfinityNullReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZSCORE.js b/node_modules/@redis/client/dist/lib/commands/ZSCORE.js new file mode 100644 index 0000000..f50a641 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZSCORE.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, member) { + return ['ZSCORE', key, member]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformNumberInfinityNullReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/ZUNION.d.ts b/node_modules/@redis/client/dist/lib/commands/ZUNION.d.ts new file mode 100644 index 0000000..7a06f00 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZUNION.d.ts @@ -0,0 +1,10 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 2; +export declare const IS_READ_ONLY = true; +interface ZUnionOptions { + WEIGHTS?: Array; + AGGREGATE?: 'SUM' | 'MIN' | 'MAX'; +} +export declare function transformArguments(keys: Array | RedisCommandArgument, options?: ZUnionOptions): RedisCommandArguments; +export declare function transformReply(): Array; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/ZUNION.js b/node_modules/@redis/client/dist/lib/commands/ZUNION.js new file mode 100644 index 0000000..f06d8f9 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZUNION.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 2; +exports.IS_READ_ONLY = true; +function transformArguments(keys, options) { + const args = (0, generic_transformers_1.pushVerdictArgument)(['ZUNION'], keys); + if (options?.WEIGHTS) { + args.push('WEIGHTS', ...options.WEIGHTS.map(weight => weight.toString())); + } + if (options?.AGGREGATE) { + args.push('AGGREGATE', options.AGGREGATE); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZUNIONSTORE.d.ts b/node_modules/@redis/client/dist/lib/commands/ZUNIONSTORE.d.ts new file mode 100644 index 0000000..7b3a64b --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZUNIONSTORE.d.ts @@ -0,0 +1,9 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare const FIRST_KEY_INDEX = 1; +interface ZUnionOptions { + WEIGHTS?: Array; + AGGREGATE?: 'SUM' | 'MIN' | 'MAX'; +} +export declare function transformArguments(destination: RedisCommandArgument, keys: Array | RedisCommandArgument, options?: ZUnionOptions): RedisCommandArguments; +export declare function transformReply(): number; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/ZUNIONSTORE.js b/node_modules/@redis/client/dist/lib/commands/ZUNIONSTORE.js new file mode 100644 index 0000000..3e5f0ff --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZUNIONSTORE.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("./generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(destination, keys, options) { + const args = (0, generic_transformers_1.pushVerdictArgument)(['ZUNIONSTORE', destination], keys); + if (options?.WEIGHTS) { + args.push('WEIGHTS', ...options.WEIGHTS.map(weight => weight.toString())); + } + if (options?.AGGREGATE) { + args.push('AGGREGATE', options.AGGREGATE); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/client/dist/lib/commands/ZUNION_WITHSCORES.d.ts b/node_modules/@redis/client/dist/lib/commands/ZUNION_WITHSCORES.d.ts new file mode 100644 index 0000000..07fe4e3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZUNION_WITHSCORES.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '.'; +import { transformArguments as transformZUnionArguments } from './ZUNION'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './ZUNION'; +export declare function transformArguments(...args: Parameters): RedisCommandArguments; +export { transformSortedSetWithScoresReply as transformReply } from './generic-transformers'; diff --git a/node_modules/@redis/client/dist/lib/commands/ZUNION_WITHSCORES.js b/node_modules/@redis/client/dist/lib/commands/ZUNION_WITHSCORES.js new file mode 100644 index 0000000..23c2b86 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/ZUNION_WITHSCORES.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const ZUNION_1 = require("./ZUNION"); +var ZUNION_2 = require("./ZUNION"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return ZUNION_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return ZUNION_2.IS_READ_ONLY; } }); +function transformArguments(...args) { + return [ + ...(0, ZUNION_1.transformArguments)(...args), + 'WITHSCORES' + ]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("./generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformSortedSetWithScoresReply; } }); diff --git a/node_modules/@redis/client/dist/lib/commands/generic-transformers.d.ts b/node_modules/@redis/client/dist/lib/commands/generic-transformers.d.ts new file mode 100644 index 0000000..1162ac1 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/generic-transformers.d.ts @@ -0,0 +1,221 @@ +import { RedisCommandArgument, RedisCommandArguments } from '.'; +export declare function transformBooleanReply(reply: number): boolean; +export declare function transformBooleanArrayReply(reply: Array): Array; +export type BitValue = 0 | 1; +export interface ScanOptions { + MATCH?: string; + COUNT?: number; +} +export declare function pushScanArguments(args: RedisCommandArguments, cursor: number, options?: ScanOptions): RedisCommandArguments; +export declare function transformNumberInfinityReply(reply: RedisCommandArgument): number; +export declare function transformNumberInfinityNullReply(reply: RedisCommandArgument | null): number | null; +export declare function transformNumberInfinityNullArrayReply(reply: Array): Array; +export declare function transformNumberInfinityArgument(num: number): string; +export declare function transformStringNumberInfinityArgument(num: RedisCommandArgument | number): RedisCommandArgument; +export declare function transformTuplesReply(reply: Array): Record; +export interface StreamMessageReply { + id: RedisCommandArgument; + message: Record; +} +export declare function transformStreamMessageReply([id, message]: Array): StreamMessageReply; +export declare function transformStreamMessageNullReply(reply: Array): StreamMessageReply | null; +export type StreamMessagesReply = Array; +export declare function transformStreamMessagesReply(reply: Array): StreamMessagesReply; +export type StreamMessagesNullReply = Array; +export declare function transformStreamMessagesNullReply(reply: Array): StreamMessagesNullReply; +export type StreamsMessagesReply = Array<{ + name: RedisCommandArgument; + messages: StreamMessagesReply; +}> | null; +export declare function transformStreamsMessagesReply(reply: Array | null): StreamsMessagesReply | null; +export interface ZMember { + score: number; + value: RedisCommandArgument; +} +export declare function transformSortedSetMemberNullReply(reply: [RedisCommandArgument, RedisCommandArgument] | []): ZMember | null; +export declare function transformSortedSetMemberReply(reply: [RedisCommandArgument, RedisCommandArgument]): ZMember; +export declare function transformSortedSetWithScoresReply(reply: Array): Array; +export type SortedSetSide = 'MIN' | 'MAX'; +export interface ZMPopOptions { + COUNT?: number; +} +export declare function transformZMPopArguments(args: RedisCommandArguments, keys: RedisCommandArgument | Array, side: SortedSetSide, options?: ZMPopOptions): RedisCommandArguments; +export type ListSide = 'LEFT' | 'RIGHT'; +export interface LMPopOptions { + COUNT?: number; +} +export declare function transformLMPopArguments(args: RedisCommandArguments, keys: RedisCommandArgument | Array, side: ListSide, options?: LMPopOptions): RedisCommandArguments; +type GeoCountArgument = number | { + value: number; + ANY?: true; +}; +export declare function pushGeoCountArgument(args: RedisCommandArguments, count: GeoCountArgument | undefined): RedisCommandArguments; +export type GeoUnits = 'm' | 'km' | 'mi' | 'ft'; +export interface GeoCoordinates { + longitude: string | number; + latitude: string | number; +} +type GeoSearchFromMember = string; +export type GeoSearchFrom = GeoSearchFromMember | GeoCoordinates; +interface GeoSearchByRadius { + radius: number; + unit: GeoUnits; +} +interface GeoSearchByBox { + width: number; + height: number; + unit: GeoUnits; +} +export type GeoSearchBy = GeoSearchByRadius | GeoSearchByBox; +export interface GeoSearchOptions { + SORT?: 'ASC' | 'DESC'; + COUNT?: GeoCountArgument; +} +export declare function pushGeoSearchArguments(args: RedisCommandArguments, key: RedisCommandArgument, from: GeoSearchFrom, by: GeoSearchBy, options?: GeoSearchOptions): RedisCommandArguments; +export declare function pushGeoRadiusArguments(args: RedisCommandArguments, key: RedisCommandArgument, from: GeoSearchFrom, radius: number, unit: GeoUnits, options?: GeoSearchOptions): RedisCommandArguments; +export interface GeoRadiusStoreOptions extends GeoSearchOptions { + STOREDIST?: boolean; +} +export declare function pushGeoRadiusStoreArguments(args: RedisCommandArguments, key: RedisCommandArgument, from: GeoSearchFrom, radius: number, unit: GeoUnits, destination: RedisCommandArgument, options?: GeoRadiusStoreOptions): RedisCommandArguments; +export declare enum GeoReplyWith { + DISTANCE = "WITHDIST", + HASH = "WITHHASH", + COORDINATES = "WITHCOORD" +} +export interface GeoReplyWithMember { + member: string; + distance?: number; + hash?: string; + coordinates?: { + longitude: string; + latitude: string; + }; +} +export declare function transformGeoMembersWithReply(reply: Array>, replyWith: Array): Array; +export declare function transformEXAT(EXAT: number | Date): string; +export declare function transformPXAT(PXAT: number | Date): string; +export interface EvalOptions { + keys?: Array; + arguments?: Array; +} +export declare function evalFirstKeyIndex(options?: EvalOptions): string | undefined; +export declare function pushEvalArguments(args: Array, options?: EvalOptions): Array; +export declare function pushVerdictArguments(args: RedisCommandArguments, value: RedisCommandArgument | Array): RedisCommandArguments; +export declare function pushVerdictNumberArguments(args: RedisCommandArguments, value: number | Array): RedisCommandArguments; +export declare function pushVerdictArgument(args: RedisCommandArguments, value: RedisCommandArgument | Array): RedisCommandArguments; +export declare function pushOptionalVerdictArgument(args: RedisCommandArguments, name: RedisCommandArgument, value: undefined | RedisCommandArgument | Array): RedisCommandArguments; +export declare enum CommandFlags { + WRITE = "write", + READONLY = "readonly", + DENYOOM = "denyoom", + ADMIN = "admin", + PUBSUB = "pubsub", + NOSCRIPT = "noscript", + RANDOM = "random", + SORT_FOR_SCRIPT = "sort_for_script", + LOADING = "loading", + STALE = "stale", + SKIP_MONITOR = "skip_monitor", + ASKING = "asking", + FAST = "fast", + MOVABLEKEYS = "movablekeys" +} +export declare enum CommandCategories { + KEYSPACE = "@keyspace", + READ = "@read", + WRITE = "@write", + SET = "@set", + SORTEDSET = "@sortedset", + LIST = "@list", + HASH = "@hash", + STRING = "@string", + BITMAP = "@bitmap", + HYPERLOGLOG = "@hyperloglog", + GEO = "@geo", + STREAM = "@stream", + PUBSUB = "@pubsub", + ADMIN = "@admin", + FAST = "@fast", + SLOW = "@slow", + BLOCKING = "@blocking", + DANGEROUS = "@dangerous", + CONNECTION = "@connection", + TRANSACTION = "@transaction", + SCRIPTING = "@scripting" +} +export type CommandRawReply = [ + name: string, + arity: number, + flags: Array, + firstKeyIndex: number, + lastKeyIndex: number, + step: number, + categories: Array +]; +export type CommandReply = { + name: string; + arity: number; + flags: Set; + firstKeyIndex: number; + lastKeyIndex: number; + step: number; + categories: Set; +}; +export declare function transformCommandReply(this: void, [name, arity, flags, firstKeyIndex, lastKeyIndex, step, categories]: CommandRawReply): CommandReply; +export declare enum RedisFunctionFlags { + NO_WRITES = "no-writes", + ALLOW_OOM = "allow-oom", + ALLOW_STALE = "allow-stale", + NO_CLUSTER = "no-cluster" +} +export type FunctionListRawItemReply = [ + 'library_name', + string, + 'engine', + string, + 'functions', + Array<[ + 'name', + string, + 'description', + string | null, + 'flags', + Array + ]> +]; +export interface FunctionListItemReply { + libraryName: string; + engine: string; + functions: Array<{ + name: string; + description: string | null; + flags: Array; + }>; +} +export declare function transformFunctionListItemReply(reply: FunctionListRawItemReply): FunctionListItemReply; +export interface SortOptions { + BY?: string; + LIMIT?: { + offset: number; + count: number; + }; + GET?: string | Array; + DIRECTION?: 'ASC' | 'DESC'; + ALPHA?: true; +} +export declare function pushSortArguments(args: RedisCommandArguments, options?: SortOptions): RedisCommandArguments; +export interface SlotRange { + start: number; + end: number; +} +export declare function pushSlotRangesArguments(args: RedisCommandArguments, ranges: SlotRange | Array): RedisCommandArguments; +export type RawRangeReply = [ + start: number, + end: number +]; +export interface RangeReply { + start: number; + end: number; +} +export declare function transformRangeReply([start, end]: RawRangeReply): RangeReply; +export {}; diff --git a/node_modules/@redis/client/dist/lib/commands/generic-transformers.js b/node_modules/@redis/client/dist/lib/commands/generic-transformers.js new file mode 100644 index 0000000..e4a90e3 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/generic-transformers.js @@ -0,0 +1,413 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformRangeReply = exports.pushSlotRangesArguments = exports.pushSortArguments = exports.transformFunctionListItemReply = exports.RedisFunctionFlags = exports.transformCommandReply = exports.CommandCategories = exports.CommandFlags = exports.pushOptionalVerdictArgument = exports.pushVerdictArgument = exports.pushVerdictNumberArguments = exports.pushVerdictArguments = exports.pushEvalArguments = exports.evalFirstKeyIndex = exports.transformPXAT = exports.transformEXAT = exports.transformGeoMembersWithReply = exports.GeoReplyWith = exports.pushGeoRadiusStoreArguments = exports.pushGeoRadiusArguments = exports.pushGeoSearchArguments = exports.pushGeoCountArgument = exports.transformLMPopArguments = exports.transformZMPopArguments = exports.transformSortedSetWithScoresReply = exports.transformSortedSetMemberReply = exports.transformSortedSetMemberNullReply = exports.transformStreamsMessagesReply = exports.transformStreamMessagesNullReply = exports.transformStreamMessagesReply = exports.transformStreamMessageNullReply = exports.transformStreamMessageReply = exports.transformTuplesReply = exports.transformStringNumberInfinityArgument = exports.transformNumberInfinityArgument = exports.transformNumberInfinityNullArrayReply = exports.transformNumberInfinityNullReply = exports.transformNumberInfinityReply = exports.pushScanArguments = exports.transformBooleanArrayReply = exports.transformBooleanReply = void 0; +function transformBooleanReply(reply) { + return reply === 1; +} +exports.transformBooleanReply = transformBooleanReply; +function transformBooleanArrayReply(reply) { + return reply.map(transformBooleanReply); +} +exports.transformBooleanArrayReply = transformBooleanArrayReply; +function pushScanArguments(args, cursor, options) { + args.push(cursor.toString()); + if (options?.MATCH) { + args.push('MATCH', options.MATCH); + } + if (options?.COUNT) { + args.push('COUNT', options.COUNT.toString()); + } + return args; +} +exports.pushScanArguments = pushScanArguments; +function transformNumberInfinityReply(reply) { + switch (reply.toString()) { + case '+inf': + return Infinity; + case '-inf': + return -Infinity; + default: + return Number(reply); + } +} +exports.transformNumberInfinityReply = transformNumberInfinityReply; +function transformNumberInfinityNullReply(reply) { + if (reply === null) + return null; + return transformNumberInfinityReply(reply); +} +exports.transformNumberInfinityNullReply = transformNumberInfinityNullReply; +function transformNumberInfinityNullArrayReply(reply) { + return reply.map(transformNumberInfinityNullReply); +} +exports.transformNumberInfinityNullArrayReply = transformNumberInfinityNullArrayReply; +function transformNumberInfinityArgument(num) { + switch (num) { + case Infinity: + return '+inf'; + case -Infinity: + return '-inf'; + default: + return num.toString(); + } +} +exports.transformNumberInfinityArgument = transformNumberInfinityArgument; +function transformStringNumberInfinityArgument(num) { + if (typeof num !== 'number') + return num; + return transformNumberInfinityArgument(num); +} +exports.transformStringNumberInfinityArgument = transformStringNumberInfinityArgument; +function transformTuplesReply(reply) { + const message = Object.create(null); + for (let i = 0; i < reply.length; i += 2) { + message[reply[i].toString()] = reply[i + 1]; + } + return message; +} +exports.transformTuplesReply = transformTuplesReply; +function transformStreamMessageReply([id, message]) { + return { + id, + message: transformTuplesReply(message) + }; +} +exports.transformStreamMessageReply = transformStreamMessageReply; +function transformStreamMessageNullReply(reply) { + if (reply === null) + return null; + return transformStreamMessageReply(reply); +} +exports.transformStreamMessageNullReply = transformStreamMessageNullReply; +function transformStreamMessagesReply(reply) { + return reply.map(transformStreamMessageReply); +} +exports.transformStreamMessagesReply = transformStreamMessagesReply; +function transformStreamMessagesNullReply(reply) { + return reply.map(transformStreamMessageNullReply); +} +exports.transformStreamMessagesNullReply = transformStreamMessagesNullReply; +function transformStreamsMessagesReply(reply) { + if (reply === null) + return null; + return reply.map(([name, rawMessages]) => ({ + name, + messages: transformStreamMessagesReply(rawMessages) + })); +} +exports.transformStreamsMessagesReply = transformStreamsMessagesReply; +function transformSortedSetMemberNullReply(reply) { + if (!reply.length) + return null; + return transformSortedSetMemberReply(reply); +} +exports.transformSortedSetMemberNullReply = transformSortedSetMemberNullReply; +function transformSortedSetMemberReply(reply) { + return { + value: reply[0], + score: transformNumberInfinityReply(reply[1]) + }; +} +exports.transformSortedSetMemberReply = transformSortedSetMemberReply; +function transformSortedSetWithScoresReply(reply) { + const members = []; + for (let i = 0; i < reply.length; i += 2) { + members.push({ + value: reply[i], + score: transformNumberInfinityReply(reply[i + 1]) + }); + } + return members; +} +exports.transformSortedSetWithScoresReply = transformSortedSetWithScoresReply; +function transformZMPopArguments(args, keys, side, options) { + pushVerdictArgument(args, keys); + args.push(side); + if (options?.COUNT) { + args.push('COUNT', options.COUNT.toString()); + } + return args; +} +exports.transformZMPopArguments = transformZMPopArguments; +function transformLMPopArguments(args, keys, side, options) { + pushVerdictArgument(args, keys); + args.push(side); + if (options?.COUNT) { + args.push('COUNT', options.COUNT.toString()); + } + return args; +} +exports.transformLMPopArguments = transformLMPopArguments; +function pushGeoCountArgument(args, count) { + if (typeof count === 'number') { + args.push('COUNT', count.toString()); + } + else if (count) { + args.push('COUNT', count.value.toString()); + if (count.ANY) { + args.push('ANY'); + } + } + return args; +} +exports.pushGeoCountArgument = pushGeoCountArgument; +function pushGeoSearchArguments(args, key, from, by, options) { + args.push(key); + if (typeof from === 'string') { + args.push('FROMMEMBER', from); + } + else { + args.push('FROMLONLAT', from.longitude.toString(), from.latitude.toString()); + } + if ('radius' in by) { + args.push('BYRADIUS', by.radius.toString()); + } + else { + args.push('BYBOX', by.width.toString(), by.height.toString()); + } + args.push(by.unit); + if (options?.SORT) { + args.push(options.SORT); + } + pushGeoCountArgument(args, options?.COUNT); + return args; +} +exports.pushGeoSearchArguments = pushGeoSearchArguments; +function pushGeoRadiusArguments(args, key, from, radius, unit, options) { + args.push(key); + if (typeof from === 'string') { + args.push(from); + } + else { + args.push(from.longitude.toString(), from.latitude.toString()); + } + args.push(radius.toString(), unit); + if (options?.SORT) { + args.push(options.SORT); + } + pushGeoCountArgument(args, options?.COUNT); + return args; +} +exports.pushGeoRadiusArguments = pushGeoRadiusArguments; +function pushGeoRadiusStoreArguments(args, key, from, radius, unit, destination, options) { + pushGeoRadiusArguments(args, key, from, radius, unit, options); + if (options?.STOREDIST) { + args.push('STOREDIST', destination); + } + else { + args.push('STORE', destination); + } + return args; +} +exports.pushGeoRadiusStoreArguments = pushGeoRadiusStoreArguments; +var GeoReplyWith; +(function (GeoReplyWith) { + GeoReplyWith["DISTANCE"] = "WITHDIST"; + GeoReplyWith["HASH"] = "WITHHASH"; + GeoReplyWith["COORDINATES"] = "WITHCOORD"; +})(GeoReplyWith || (exports.GeoReplyWith = GeoReplyWith = {})); +function transformGeoMembersWithReply(reply, replyWith) { + const replyWithSet = new Set(replyWith); + let index = 0; + const distanceIndex = replyWithSet.has(GeoReplyWith.DISTANCE) && ++index, hashIndex = replyWithSet.has(GeoReplyWith.HASH) && ++index, coordinatesIndex = replyWithSet.has(GeoReplyWith.COORDINATES) && ++index; + return reply.map(member => { + const transformedMember = { + member: member[0] + }; + if (distanceIndex) { + transformedMember.distance = member[distanceIndex]; + } + if (hashIndex) { + transformedMember.hash = member[hashIndex]; + } + if (coordinatesIndex) { + const [longitude, latitude] = member[coordinatesIndex]; + transformedMember.coordinates = { + longitude, + latitude + }; + } + return transformedMember; + }); +} +exports.transformGeoMembersWithReply = transformGeoMembersWithReply; +function transformEXAT(EXAT) { + return (typeof EXAT === 'number' ? EXAT : Math.floor(EXAT.getTime() / 1000)).toString(); +} +exports.transformEXAT = transformEXAT; +function transformPXAT(PXAT) { + return (typeof PXAT === 'number' ? PXAT : PXAT.getTime()).toString(); +} +exports.transformPXAT = transformPXAT; +function evalFirstKeyIndex(options) { + return options?.keys?.[0]; +} +exports.evalFirstKeyIndex = evalFirstKeyIndex; +function pushEvalArguments(args, options) { + if (options?.keys) { + args.push(options.keys.length.toString(), ...options.keys); + } + else { + args.push('0'); + } + if (options?.arguments) { + args.push(...options.arguments); + } + return args; +} +exports.pushEvalArguments = pushEvalArguments; +function pushVerdictArguments(args, value) { + if (Array.isArray(value)) { + // https://github.com/redis/node-redis/pull/2160 + args = args.concat(value); + } + else { + args.push(value); + } + return args; +} +exports.pushVerdictArguments = pushVerdictArguments; +function pushVerdictNumberArguments(args, value) { + if (Array.isArray(value)) { + for (const item of value) { + args.push(item.toString()); + } + } + else { + args.push(value.toString()); + } + return args; +} +exports.pushVerdictNumberArguments = pushVerdictNumberArguments; +function pushVerdictArgument(args, value) { + if (Array.isArray(value)) { + args.push(value.length.toString(), ...value); + } + else { + args.push('1', value); + } + return args; +} +exports.pushVerdictArgument = pushVerdictArgument; +function pushOptionalVerdictArgument(args, name, value) { + if (value === undefined) + return args; + args.push(name); + return pushVerdictArgument(args, value); +} +exports.pushOptionalVerdictArgument = pushOptionalVerdictArgument; +var CommandFlags; +(function (CommandFlags) { + CommandFlags["WRITE"] = "write"; + CommandFlags["READONLY"] = "readonly"; + CommandFlags["DENYOOM"] = "denyoom"; + CommandFlags["ADMIN"] = "admin"; + CommandFlags["PUBSUB"] = "pubsub"; + CommandFlags["NOSCRIPT"] = "noscript"; + CommandFlags["RANDOM"] = "random"; + CommandFlags["SORT_FOR_SCRIPT"] = "sort_for_script"; + CommandFlags["LOADING"] = "loading"; + CommandFlags["STALE"] = "stale"; + CommandFlags["SKIP_MONITOR"] = "skip_monitor"; + CommandFlags["ASKING"] = "asking"; + CommandFlags["FAST"] = "fast"; + CommandFlags["MOVABLEKEYS"] = "movablekeys"; // keys have no pre-determined position. You must discover keys yourself. +})(CommandFlags || (exports.CommandFlags = CommandFlags = {})); +var CommandCategories; +(function (CommandCategories) { + CommandCategories["KEYSPACE"] = "@keyspace"; + CommandCategories["READ"] = "@read"; + CommandCategories["WRITE"] = "@write"; + CommandCategories["SET"] = "@set"; + CommandCategories["SORTEDSET"] = "@sortedset"; + CommandCategories["LIST"] = "@list"; + CommandCategories["HASH"] = "@hash"; + CommandCategories["STRING"] = "@string"; + CommandCategories["BITMAP"] = "@bitmap"; + CommandCategories["HYPERLOGLOG"] = "@hyperloglog"; + CommandCategories["GEO"] = "@geo"; + CommandCategories["STREAM"] = "@stream"; + CommandCategories["PUBSUB"] = "@pubsub"; + CommandCategories["ADMIN"] = "@admin"; + CommandCategories["FAST"] = "@fast"; + CommandCategories["SLOW"] = "@slow"; + CommandCategories["BLOCKING"] = "@blocking"; + CommandCategories["DANGEROUS"] = "@dangerous"; + CommandCategories["CONNECTION"] = "@connection"; + CommandCategories["TRANSACTION"] = "@transaction"; + CommandCategories["SCRIPTING"] = "@scripting"; +})(CommandCategories || (exports.CommandCategories = CommandCategories = {})); +function transformCommandReply([name, arity, flags, firstKeyIndex, lastKeyIndex, step, categories]) { + return { + name, + arity, + flags: new Set(flags), + firstKeyIndex, + lastKeyIndex, + step, + categories: new Set(categories) + }; +} +exports.transformCommandReply = transformCommandReply; +var RedisFunctionFlags; +(function (RedisFunctionFlags) { + RedisFunctionFlags["NO_WRITES"] = "no-writes"; + RedisFunctionFlags["ALLOW_OOM"] = "allow-oom"; + RedisFunctionFlags["ALLOW_STALE"] = "allow-stale"; + RedisFunctionFlags["NO_CLUSTER"] = "no-cluster"; +})(RedisFunctionFlags || (exports.RedisFunctionFlags = RedisFunctionFlags = {})); +function transformFunctionListItemReply(reply) { + return { + libraryName: reply[1], + engine: reply[3], + functions: reply[5].map(fn => ({ + name: fn[1], + description: fn[3], + flags: fn[5] + })) + }; +} +exports.transformFunctionListItemReply = transformFunctionListItemReply; +function pushSortArguments(args, options) { + if (options?.BY) { + args.push('BY', options.BY); + } + if (options?.LIMIT) { + args.push('LIMIT', options.LIMIT.offset.toString(), options.LIMIT.count.toString()); + } + if (options?.GET) { + for (const pattern of (typeof options.GET === 'string' ? [options.GET] : options.GET)) { + args.push('GET', pattern); + } + } + if (options?.DIRECTION) { + args.push(options.DIRECTION); + } + if (options?.ALPHA) { + args.push('ALPHA'); + } + return args; +} +exports.pushSortArguments = pushSortArguments; +function pushSlotRangeArguments(args, range) { + args.push(range.start.toString(), range.end.toString()); +} +function pushSlotRangesArguments(args, ranges) { + if (Array.isArray(ranges)) { + for (const range of ranges) { + pushSlotRangeArguments(args, range); + } + } + else { + pushSlotRangeArguments(args, ranges); + } + return args; +} +exports.pushSlotRangesArguments = pushSlotRangesArguments; +function transformRangeReply([start, end]) { + return { + start, + end + }; +} +exports.transformRangeReply = transformRangeReply; diff --git a/node_modules/@redis/client/dist/lib/commands/index.d.ts b/node_modules/@redis/client/dist/lib/commands/index.d.ts new file mode 100644 index 0000000..d956111 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/index.d.ts @@ -0,0 +1,49 @@ +/// +import { ClientCommandOptions } from '../client'; +import { CommandOptions } from '../command-options'; +import { RedisScriptConfig, SHA1 } from '../lua-script'; +export type RedisCommandRawReply = string | number | Buffer | null | undefined | Array; +export type RedisCommandArgument = string | Buffer; +export type RedisCommandArguments = Array & { + preserve?: unknown; +}; +export interface RedisCommand { + FIRST_KEY_INDEX?: number | ((...args: Array) => RedisCommandArgument | undefined); + IS_READ_ONLY?: boolean; + TRANSFORM_LEGACY_REPLY?: boolean; + transformArguments(this: void, ...args: Array): RedisCommandArguments; + transformReply?(this: void, reply: any, preserved?: any): any; +} +export type RedisCommandReply = C['transformReply'] extends (...args: any) => infer T ? T : RedisCommandRawReply; +export type ConvertArgumentType = Type extends RedisCommandArgument ? (Type extends (string & ToType) ? Type : ToType) : (Type extends Set ? Set> : (Type extends Map ? Map> : (Type extends Array ? Array> : (Type extends Date ? Type : (Type extends Record ? { + [Property in keyof Type]: ConvertArgumentType; +} : Type))))); +export type RedisCommandSignature = Parameters> = >(...args: Params | [options: Options, ...rest: Params]) => Promise, Options['returnBuffers'] extends true ? Buffer : string>>; +export interface RedisCommands { + [command: string]: RedisCommand; +} +export interface RedisModule { + [command: string]: RedisCommand; +} +export interface RedisModules { + [module: string]: RedisModule; +} +export interface RedisFunction extends RedisCommand { + NUMBER_OF_KEYS?: number; +} +export interface RedisFunctionLibrary { + [fn: string]: RedisFunction; +} +export interface RedisFunctions { + [library: string]: RedisFunctionLibrary; +} +export type RedisScript = RedisScriptConfig & SHA1; +export interface RedisScripts { + [script: string]: RedisScript; +} +export interface RedisExtensions { + modules?: M; + functions?: F; + scripts?: S; +} +export type ExcludeMappedString = string extends S ? never : S; diff --git a/node_modules/@redis/client/dist/lib/commands/index.js b/node_modules/@redis/client/dist/lib/commands/index.js new file mode 100644 index 0000000..c8ad2e5 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/commands/index.js @@ -0,0 +1,2 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); diff --git a/node_modules/@redis/client/dist/lib/errors.d.ts b/node_modules/@redis/client/dist/lib/errors.d.ts new file mode 100644 index 0000000..f0ccef9 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/errors.d.ts @@ -0,0 +1,39 @@ +import { RedisCommandRawReply } from './commands'; +export declare class AbortError extends Error { + constructor(); +} +export declare class WatchError extends Error { + constructor(); +} +export declare class ConnectionTimeoutError extends Error { + constructor(); +} +export declare class ClientClosedError extends Error { + constructor(); +} +export declare class ClientOfflineError extends Error { + constructor(); +} +export declare class DisconnectsClientError extends Error { + constructor(); +} +export declare class SocketClosedUnexpectedlyError extends Error { + constructor(); +} +export declare class RootNodesUnavailableError extends Error { + constructor(); +} +export declare class ReconnectStrategyError extends Error { + originalError: Error; + socketError: unknown; + constructor(originalError: Error, socketError: unknown); +} +export declare class ErrorReply extends Error { + constructor(message: string); +} +export declare class MultiErrorReply extends ErrorReply { + replies: (RedisCommandRawReply | ErrorReply)[]; + errorIndexes: number[]; + constructor(replies: Array, errorIndexes: Array); + errors(): Generator; +} diff --git a/node_modules/@redis/client/dist/lib/errors.js b/node_modules/@redis/client/dist/lib/errors.js new file mode 100644 index 0000000..7919d27 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/errors.js @@ -0,0 +1,103 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.MultiErrorReply = exports.ErrorReply = exports.ReconnectStrategyError = exports.RootNodesUnavailableError = exports.SocketClosedUnexpectedlyError = exports.DisconnectsClientError = exports.ClientOfflineError = exports.ClientClosedError = exports.ConnectionTimeoutError = exports.WatchError = exports.AbortError = void 0; +class AbortError extends Error { + constructor() { + super('The command was aborted'); + } +} +exports.AbortError = AbortError; +class WatchError extends Error { + constructor() { + super('One (or more) of the watched keys has been changed'); + } +} +exports.WatchError = WatchError; +class ConnectionTimeoutError extends Error { + constructor() { + super('Connection timeout'); + } +} +exports.ConnectionTimeoutError = ConnectionTimeoutError; +class ClientClosedError extends Error { + constructor() { + super('The client is closed'); + } +} +exports.ClientClosedError = ClientClosedError; +class ClientOfflineError extends Error { + constructor() { + super('The client is offline'); + } +} +exports.ClientOfflineError = ClientOfflineError; +class DisconnectsClientError extends Error { + constructor() { + super('Disconnects client'); + } +} +exports.DisconnectsClientError = DisconnectsClientError; +class SocketClosedUnexpectedlyError extends Error { + constructor() { + super('Socket closed unexpectedly'); + } +} +exports.SocketClosedUnexpectedlyError = SocketClosedUnexpectedlyError; +class RootNodesUnavailableError extends Error { + constructor() { + super('All the root nodes are unavailable'); + } +} +exports.RootNodesUnavailableError = RootNodesUnavailableError; +class ReconnectStrategyError extends Error { + constructor(originalError, socketError) { + super(originalError.message); + Object.defineProperty(this, "originalError", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "socketError", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + this.originalError = originalError; + this.socketError = socketError; + } +} +exports.ReconnectStrategyError = ReconnectStrategyError; +class ErrorReply extends Error { + constructor(message) { + super(message); + this.stack = undefined; + } +} +exports.ErrorReply = ErrorReply; +class MultiErrorReply extends ErrorReply { + constructor(replies, errorIndexes) { + super(`${errorIndexes.length} commands failed, see .replies and .errorIndexes for more information`); + Object.defineProperty(this, "replies", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + Object.defineProperty(this, "errorIndexes", { + enumerable: true, + configurable: true, + writable: true, + value: void 0 + }); + this.replies = replies; + this.errorIndexes = errorIndexes; + } + *errors() { + for (const index of this.errorIndexes) { + yield this.replies[index]; + } + } +} +exports.MultiErrorReply = MultiErrorReply; diff --git a/node_modules/@redis/client/dist/lib/lua-script.d.ts b/node_modules/@redis/client/dist/lib/lua-script.d.ts new file mode 100644 index 0000000..29a7116 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/lua-script.d.ts @@ -0,0 +1,10 @@ +import { RedisCommand } from './commands'; +export interface RedisScriptConfig extends RedisCommand { + SCRIPT: string; + NUMBER_OF_KEYS?: number; +} +export interface SHA1 { + SHA1: string; +} +export declare function defineScript(script: S): S & SHA1; +export declare function scriptSha1(script: string): string; diff --git a/node_modules/@redis/client/dist/lib/lua-script.js b/node_modules/@redis/client/dist/lib/lua-script.js new file mode 100644 index 0000000..3a5d94f --- /dev/null +++ b/node_modules/@redis/client/dist/lib/lua-script.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.scriptSha1 = exports.defineScript = void 0; +const crypto_1 = require("crypto"); +function defineScript(script) { + return { + ...script, + SHA1: scriptSha1(script.SCRIPT) + }; +} +exports.defineScript = defineScript; +function scriptSha1(script) { + return (0, crypto_1.createHash)('sha1').update(script).digest('hex'); +} +exports.scriptSha1 = scriptSha1; diff --git a/node_modules/@redis/client/dist/lib/multi-command.d.ts b/node_modules/@redis/client/dist/lib/multi-command.d.ts new file mode 100644 index 0000000..9e47534 --- /dev/null +++ b/node_modules/@redis/client/dist/lib/multi-command.d.ts @@ -0,0 +1,16 @@ +import { RedisCommand, RedisCommandArguments, RedisCommandRawReply, RedisFunction, RedisScript } from './commands'; +import { ErrorReply } from './errors'; +export interface RedisMultiQueuedCommand { + args: RedisCommandArguments; + transformReply?: RedisCommand['transformReply']; +} +export default class RedisMultiCommand { + static generateChainId(): symbol; + readonly queue: Array; + readonly scriptsInUse: Set; + addCommand(args: RedisCommandArguments, transformReply?: RedisCommand['transformReply']): void; + addFunction(name: string, fn: RedisFunction, args: Array): RedisCommandArguments; + addScript(script: RedisScript, args: Array): RedisCommandArguments; + handleExecReplies(rawReplies: Array): Array; + transformReplies(rawReplies: Array): Array; +} diff --git a/node_modules/@redis/client/dist/lib/multi-command.js b/node_modules/@redis/client/dist/lib/multi-command.js new file mode 100644 index 0000000..91da60c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/multi-command.js @@ -0,0 +1,78 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const commander_1 = require("./commander"); +const errors_1 = require("./errors"); +class RedisMultiCommand { + constructor() { + Object.defineProperty(this, "queue", { + enumerable: true, + configurable: true, + writable: true, + value: [] + }); + Object.defineProperty(this, "scriptsInUse", { + enumerable: true, + configurable: true, + writable: true, + value: new Set() + }); + } + static generateChainId() { + return Symbol('RedisMultiCommand Chain Id'); + } + addCommand(args, transformReply) { + this.queue.push({ + args, + transformReply + }); + } + addFunction(name, fn, args) { + const transformedArguments = (0, commander_1.fCallArguments)(name, fn, fn.transformArguments(...args)); + this.queue.push({ + args: transformedArguments, + transformReply: fn.transformReply + }); + return transformedArguments; + } + addScript(script, args) { + const transformedArguments = []; + if (this.scriptsInUse.has(script.SHA1)) { + transformedArguments.push('EVALSHA', script.SHA1); + } + else { + this.scriptsInUse.add(script.SHA1); + transformedArguments.push('EVAL', script.SCRIPT); + } + if (script.NUMBER_OF_KEYS !== undefined) { + transformedArguments.push(script.NUMBER_OF_KEYS.toString()); + } + const scriptArguments = script.transformArguments(...args); + transformedArguments.push(...scriptArguments); + if (scriptArguments.preserve) { + transformedArguments.preserve = scriptArguments.preserve; + } + this.addCommand(transformedArguments, script.transformReply); + return transformedArguments; + } + handleExecReplies(rawReplies) { + const execReply = rawReplies[rawReplies.length - 1]; + if (execReply === null) { + throw new errors_1.WatchError(); + } + return this.transformReplies(execReply); + } + transformReplies(rawReplies) { + const errorIndexes = [], replies = rawReplies.map((reply, i) => { + if (reply instanceof errors_1.ErrorReply) { + errorIndexes.push(i); + return reply; + } + const { transformReply, args } = this.queue[i]; + return transformReply ? transformReply(reply, args.preserve) : reply; + }); + if (errorIndexes.length) + throw new errors_1.MultiErrorReply(replies, errorIndexes); + return replies; + } +} +exports.default = RedisMultiCommand; diff --git a/node_modules/@redis/client/dist/lib/utils.d.ts b/node_modules/@redis/client/dist/lib/utils.d.ts new file mode 100644 index 0000000..c930f9d --- /dev/null +++ b/node_modules/@redis/client/dist/lib/utils.d.ts @@ -0,0 +1 @@ +export declare function promiseTimeout(ms: number): Promise; diff --git a/node_modules/@redis/client/dist/lib/utils.js b/node_modules/@redis/client/dist/lib/utils.js new file mode 100644 index 0000000..013763c --- /dev/null +++ b/node_modules/@redis/client/dist/lib/utils.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.promiseTimeout = void 0; +function promiseTimeout(ms) { + return new Promise(resolve => setTimeout(resolve, ms)); +} +exports.promiseTimeout = promiseTimeout; diff --git a/node_modules/@redis/client/dist/package.json b/node_modules/@redis/client/dist/package.json new file mode 100644 index 0000000..e63961d --- /dev/null +++ b/node_modules/@redis/client/dist/package.json @@ -0,0 +1,52 @@ +{ + "name": "@redis/client", + "version": "1.6.0", + "license": "MIT", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist/" + ], + "scripts": { + "test": "nyc -r text-summary -r lcov mocha -r source-map-support/register -r ts-node/register './lib/**/*.spec.ts'", + "build": "tsc", + "lint": "eslint ./*.ts ./lib/**/*.ts", + "documentation": "typedoc" + }, + "dependencies": { + "cluster-key-slot": "1.1.2", + "generic-pool": "3.9.0", + "yallist": "4.0.0" + }, + "devDependencies": { + "@istanbuljs/nyc-config-typescript": "^1.0.2", + "@redis/test-utils": "*", + "@types/node": "^20.6.2", + "@types/sinon": "^10.0.16", + "@types/yallist": "^4.0.1", + "@typescript-eslint/eslint-plugin": "^6.7.2", + "@typescript-eslint/parser": "^6.7.2", + "eslint": "^8.49.0", + "nyc": "^15.1.0", + "release-it": "^16.1.5", + "sinon": "^16.0.0", + "source-map-support": "^0.5.21", + "ts-node": "^10.9.1", + "typedoc": "^0.25.1", + "typescript": "^5.2.2" + }, + "engines": { + "node": ">=14" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis/tree/master/packages/client", + "keywords": [ + "redis" + ] +} diff --git a/node_modules/@redis/client/package.json b/node_modules/@redis/client/package.json new file mode 100644 index 0000000..da3105b --- /dev/null +++ b/node_modules/@redis/client/package.json @@ -0,0 +1,52 @@ +{ + "name": "@redis/client", + "version": "1.6.1", + "license": "MIT", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist/" + ], + "scripts": { + "test": "nyc -r text-summary -r lcov mocha -r source-map-support/register -r ts-node/register './lib/**/*.spec.ts'", + "build": "tsc", + "lint": "eslint ./*.ts ./lib/**/*.ts", + "documentation": "typedoc" + }, + "dependencies": { + "cluster-key-slot": "1.1.2", + "generic-pool": "3.9.0", + "yallist": "4.0.0" + }, + "devDependencies": { + "@istanbuljs/nyc-config-typescript": "^1.0.2", + "@redis/test-utils": "*", + "@types/node": "^20.6.2", + "@types/sinon": "^10.0.16", + "@types/yallist": "^4.0.1", + "@typescript-eslint/eslint-plugin": "^6.7.2", + "@typescript-eslint/parser": "^6.7.2", + "eslint": "^8.49.0", + "nyc": "^15.1.0", + "release-it": "^16.1.5", + "sinon": "^16.0.0", + "source-map-support": "^0.5.21", + "ts-node": "^10.9.1", + "typedoc": "^0.25.1", + "typescript": "^5.2.2" + }, + "engines": { + "node": ">=14" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis/tree/master/packages/client", + "keywords": [ + "redis" + ] +} diff --git a/node_modules/@redis/graph/README.md b/node_modules/@redis/graph/README.md new file mode 100644 index 0000000..4c712bf --- /dev/null +++ b/node_modules/@redis/graph/README.md @@ -0,0 +1,34 @@ +# @redis/graph + +Example usage: +```javascript +import { createClient, Graph } from 'redis'; + +const client = createClient(); +client.on('error', (err) => console.log('Redis Client Error', err)); + +await client.connect(); + +const graph = new Graph(client, 'graph'); + +await graph.query( + 'CREATE (:Rider { name: $riderName })-[:rides]->(:Team { name: $teamName })', + { + params: { + riderName: 'Buzz Aldrin', + teamName: 'Apollo' + } + } +); + +const result = await graph.roQuery( + 'MATCH (r:Rider)-[:rides]->(t:Team { name: $name }) RETURN r.name AS name', + { + params: { + name: 'Apollo' + } + } +); + +console.log(result.data); // [{ name: 'Buzz Aldrin' }] +``` diff --git a/node_modules/@redis/graph/dist/commands/CONFIG_GET.d.ts b/node_modules/@redis/graph/dist/commands/CONFIG_GET.d.ts new file mode 100644 index 0000000..3a89f87 --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/CONFIG_GET.d.ts @@ -0,0 +1,8 @@ +export declare const IS_READ_ONLY = true; +export declare function transformArguments(configKey: string): Array; +type ConfigItem = [ + configKey: string, + value: number +]; +export declare function transformReply(): ConfigItem | Array; +export {}; diff --git a/node_modules/@redis/graph/dist/commands/CONFIG_GET.js b/node_modules/@redis/graph/dist/commands/CONFIG_GET.js new file mode 100644 index 0000000..169c40f --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/CONFIG_GET.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments(configKey) { + return ['GRAPH.CONFIG', 'GET', configKey]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/graph/dist/commands/CONFIG_SET.d.ts b/node_modules/@redis/graph/dist/commands/CONFIG_SET.d.ts new file mode 100644 index 0000000..6209229 --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/CONFIG_SET.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(configKey: string, value: number): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/graph/dist/commands/CONFIG_SET.js b/node_modules/@redis/graph/dist/commands/CONFIG_SET.js new file mode 100644 index 0000000..7336c4b --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/CONFIG_SET.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(configKey, value) { + return [ + 'GRAPH.CONFIG', + 'SET', + configKey, + value.toString() + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/graph/dist/commands/DELETE.d.ts b/node_modules/@redis/graph/dist/commands/DELETE.d.ts new file mode 100644 index 0000000..8b5c99b --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/DELETE.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string): Array; +export declare function transformReply(): string; diff --git a/node_modules/@redis/graph/dist/commands/DELETE.js b/node_modules/@redis/graph/dist/commands/DELETE.js new file mode 100644 index 0000000..6123ffc --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/DELETE.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['GRAPH.DELETE', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/graph/dist/commands/EXPLAIN.d.ts b/node_modules/@redis/graph/dist/commands/EXPLAIN.d.ts new file mode 100644 index 0000000..883405b --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/EXPLAIN.d.ts @@ -0,0 +1,4 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, query: string): Array; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/graph/dist/commands/EXPLAIN.js b/node_modules/@redis/graph/dist/commands/EXPLAIN.js new file mode 100644 index 0000000..d63c05c --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/EXPLAIN.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, query) { + return ['GRAPH.EXPLAIN', key, query]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/graph/dist/commands/LIST.d.ts b/node_modules/@redis/graph/dist/commands/LIST.d.ts new file mode 100644 index 0000000..53667fd --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/LIST.d.ts @@ -0,0 +1,3 @@ +export declare const IS_READ_ONLY = true; +export declare function transformArguments(): Array; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/graph/dist/commands/LIST.js b/node_modules/@redis/graph/dist/commands/LIST.js new file mode 100644 index 0000000..b9c6f06 --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/LIST.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments() { + return ['GRAPH.LIST']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/graph/dist/commands/PROFILE.d.ts b/node_modules/@redis/graph/dist/commands/PROFILE.d.ts new file mode 100644 index 0000000..883405b --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/PROFILE.d.ts @@ -0,0 +1,4 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, query: string): Array; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/graph/dist/commands/PROFILE.js b/node_modules/@redis/graph/dist/commands/PROFILE.js new file mode 100644 index 0000000..f5749e6 --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/PROFILE.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, query) { + return ['GRAPH.PROFILE', key, query]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/graph/dist/commands/QUERY.d.ts b/node_modules/@redis/graph/dist/commands/QUERY.d.ts new file mode 100644 index 0000000..a14264b --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/QUERY.d.ts @@ -0,0 +1,25 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands/index'; +import { QueryOptionsBackwardCompatible } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(graph: RedisCommandArgument, query: RedisCommandArgument, options?: QueryOptionsBackwardCompatible, compact?: boolean): RedisCommandArguments; +type Headers = Array; +type Data = Array; +type Metadata = Array; +type QueryRawReply = [ + headers: Headers, + data: Data, + metadata: Metadata +] | [ + metadata: Metadata +]; +export type QueryReply = { + headers: undefined; + data: undefined; + metadata: Metadata; +} | { + headers: Headers; + data: Data; + metadata: Metadata; +}; +export declare function transformReply(reply: QueryRawReply): QueryReply; +export {}; diff --git a/node_modules/@redis/graph/dist/commands/QUERY.js b/node_modules/@redis/graph/dist/commands/QUERY.js new file mode 100644 index 0000000..cb81373 --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/QUERY.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(graph, query, options, compact) { + return (0, _1.pushQueryArguments)(['GRAPH.QUERY'], graph, query, options, compact); +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply.length === 1 ? { + headers: undefined, + data: undefined, + metadata: reply[0] + } : { + headers: reply[0], + data: reply[1], + metadata: reply[2] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/graph/dist/commands/RO_QUERY.d.ts b/node_modules/@redis/graph/dist/commands/RO_QUERY.d.ts new file mode 100644 index 0000000..7ffad65 --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/RO_QUERY.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { QueryOptionsBackwardCompatible } from '.'; +export { FIRST_KEY_INDEX } from './QUERY'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(graph: RedisCommandArgument, query: RedisCommandArgument, options?: QueryOptionsBackwardCompatible, compact?: boolean): RedisCommandArguments; +export { transformReply } from './QUERY'; diff --git a/node_modules/@redis/graph/dist/commands/RO_QUERY.js b/node_modules/@redis/graph/dist/commands/RO_QUERY.js new file mode 100644 index 0000000..eb9ed57 --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/RO_QUERY.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +var QUERY_1 = require("./QUERY"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return QUERY_1.FIRST_KEY_INDEX; } }); +exports.IS_READ_ONLY = true; +function transformArguments(graph, query, options, compact) { + return (0, _1.pushQueryArguments)(['GRAPH.RO_QUERY'], graph, query, options, compact); +} +exports.transformArguments = transformArguments; +var QUERY_2 = require("./QUERY"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return QUERY_2.transformReply; } }); diff --git a/node_modules/@redis/graph/dist/commands/SLOWLOG.d.ts b/node_modules/@redis/graph/dist/commands/SLOWLOG.d.ts new file mode 100644 index 0000000..53155f2 --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/SLOWLOG.d.ts @@ -0,0 +1,17 @@ +export declare const IS_READ_ONLY = true; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string): string[]; +type SlowLogRawReply = Array<[ + timestamp: string, + command: string, + query: string, + took: string +]>; +type SlowLogReply = Array<{ + timestamp: Date; + command: string; + query: string; + took: number; +}>; +export declare function transformReply(logs: SlowLogRawReply): SlowLogReply; +export {}; diff --git a/node_modules/@redis/graph/dist/commands/SLOWLOG.js b/node_modules/@redis/graph/dist/commands/SLOWLOG.js new file mode 100644 index 0000000..bbadeb3 --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/SLOWLOG.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key) { + return ['GRAPH.SLOWLOG', key]; +} +exports.transformArguments = transformArguments; +function transformReply(logs) { + return logs.map(([timestamp, command, query, took]) => ({ + timestamp: new Date(Number(timestamp) * 1000), + command, + query, + took: Number(took) + })); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/graph/dist/commands/index.d.ts b/node_modules/@redis/graph/dist/commands/index.d.ts new file mode 100644 index 0000000..d39c646 --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/index.d.ts @@ -0,0 +1,41 @@ +import * as CONFIG_GET from './CONFIG_GET'; +import * as CONFIG_SET from './CONFIG_SET'; +import * as DELETE from './DELETE'; +import * as EXPLAIN from './EXPLAIN'; +import * as LIST from './LIST'; +import * as PROFILE from './PROFILE'; +import * as QUERY from './QUERY'; +import * as RO_QUERY from './RO_QUERY'; +import * as SLOWLOG from './SLOWLOG'; +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +declare const _default: { + CONFIG_GET: typeof CONFIG_GET; + configGet: typeof CONFIG_GET; + CONFIG_SET: typeof CONFIG_SET; + configSet: typeof CONFIG_SET; + DELETE: typeof DELETE; + delete: typeof DELETE; + EXPLAIN: typeof EXPLAIN; + explain: typeof EXPLAIN; + LIST: typeof LIST; + list: typeof LIST; + PROFILE: typeof PROFILE; + profile: typeof PROFILE; + QUERY: typeof QUERY; + query: typeof QUERY; + RO_QUERY: typeof RO_QUERY; + roQuery: typeof RO_QUERY; + SLOWLOG: typeof SLOWLOG; + slowLog: typeof SLOWLOG; +}; +export default _default; +type QueryParam = null | string | number | boolean | QueryParams | Array; +type QueryParams = { + [key: string]: QueryParam; +}; +export interface QueryOptions { + params?: QueryParams; + TIMEOUT?: number; +} +export type QueryOptionsBackwardCompatible = QueryOptions | number; +export declare function pushQueryArguments(args: RedisCommandArguments, graph: RedisCommandArgument, query: RedisCommandArgument, options?: QueryOptionsBackwardCompatible, compact?: boolean): RedisCommandArguments; diff --git a/node_modules/@redis/graph/dist/commands/index.js b/node_modules/@redis/graph/dist/commands/index.js new file mode 100644 index 0000000..1f71400 --- /dev/null +++ b/node_modules/@redis/graph/dist/commands/index.js @@ -0,0 +1,88 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.pushQueryArguments = void 0; +const CONFIG_GET = require("./CONFIG_GET"); +const CONFIG_SET = require("./CONFIG_SET"); +; +const DELETE = require("./DELETE"); +const EXPLAIN = require("./EXPLAIN"); +const LIST = require("./LIST"); +const PROFILE = require("./PROFILE"); +const QUERY = require("./QUERY"); +const RO_QUERY = require("./RO_QUERY"); +const SLOWLOG = require("./SLOWLOG"); +exports.default = { + CONFIG_GET, + configGet: CONFIG_GET, + CONFIG_SET, + configSet: CONFIG_SET, + DELETE, + delete: DELETE, + EXPLAIN, + explain: EXPLAIN, + LIST, + list: LIST, + PROFILE, + profile: PROFILE, + QUERY, + query: QUERY, + RO_QUERY, + roQuery: RO_QUERY, + SLOWLOG, + slowLog: SLOWLOG +}; +function pushQueryArguments(args, graph, query, options, compact) { + args.push(graph); + if (typeof options === 'number') { + args.push(query); + pushTimeout(args, options); + } + else { + args.push(options?.params ? + `CYPHER ${queryParamsToString(options.params)} ${query}` : + query); + if (options?.TIMEOUT !== undefined) { + pushTimeout(args, options.TIMEOUT); + } + } + if (compact) { + args.push('--compact'); + } + return args; +} +exports.pushQueryArguments = pushQueryArguments; +function pushTimeout(args, timeout) { + args.push('TIMEOUT', timeout.toString()); +} +function queryParamsToString(params) { + const parts = []; + for (const [key, value] of Object.entries(params)) { + parts.push(`${key}=${queryParamToString(value)}`); + } + return parts.join(' '); +} +function queryParamToString(param) { + if (param === null) { + return 'null'; + } + switch (typeof param) { + case 'string': + return `"${param.replace(/["\\]/g, '\\$&')}"`; + case 'number': + case 'boolean': + return param.toString(); + } + if (Array.isArray(param)) { + return `[${param.map(queryParamToString).join(',')}]`; + } + else if (typeof param === 'object') { + const body = []; + for (const [key, value] of Object.entries(param)) { + body.push(`${key}:${queryParamToString(value)}`); + } + return `{${body.join(',')}}`; + } + else { + throw new TypeError(`Unexpected param type ${typeof param} ${param}`); + } +} diff --git a/node_modules/@redis/graph/dist/graph.d.ts b/node_modules/@redis/graph/dist/graph.d.ts new file mode 100644 index 0000000..0764e69 --- /dev/null +++ b/node_modules/@redis/graph/dist/graph.d.ts @@ -0,0 +1,19 @@ +import { RedisClientType } from '@redis/client/dist/lib/client/index'; +import { RedisCommandArgument, RedisFunctions, RedisScripts } from '@redis/client/dist/lib/commands'; +import { QueryOptions } from './commands'; +import { QueryReply } from './commands/QUERY'; +export type GraphReply = Omit & { + data?: Array; +}; +export type GraphClientType = RedisClientType<{ + graph: { + query: typeof import('./commands/QUERY'); + roQuery: typeof import('./commands/RO_QUERY'); + }; +}, RedisFunctions, RedisScripts>; +export default class Graph { + #private; + constructor(client: GraphClientType, name: string); + query(query: RedisCommandArgument, options?: QueryOptions): Promise>; + roQuery(query: RedisCommandArgument, options?: QueryOptions): Promise>; +} diff --git a/node_modules/@redis/graph/dist/graph.js b/node_modules/@redis/graph/dist/graph.js new file mode 100644 index 0000000..230de6f --- /dev/null +++ b/node_modules/@redis/graph/dist/graph.js @@ -0,0 +1,173 @@ +"use strict"; +var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { + if (kind === "m") throw new TypeError("Private method is not writable"); + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); + return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; +}; +var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { + if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); + if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); + return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); +}; +var _Graph_instances, _Graph_client, _Graph_name, _Graph_metadata, _Graph_setMetadataPromise, _Graph_updateMetadata, _Graph_setMetadata, _Graph_cleanMetadataArray, _Graph_getMetadata, _Graph_getMetadataAsync, _Graph_parseReply, _Graph_parseValue, _Graph_parseEdge, _Graph_parseNode, _Graph_parseProperties; +Object.defineProperty(exports, "__esModule", { value: true }); +// https://github.com/RedisGraph/RedisGraph/blob/master/src/resultset/formatters/resultset_formatter.h#L20 +var GraphValueTypes; +(function (GraphValueTypes) { + GraphValueTypes[GraphValueTypes["UNKNOWN"] = 0] = "UNKNOWN"; + GraphValueTypes[GraphValueTypes["NULL"] = 1] = "NULL"; + GraphValueTypes[GraphValueTypes["STRING"] = 2] = "STRING"; + GraphValueTypes[GraphValueTypes["INTEGER"] = 3] = "INTEGER"; + GraphValueTypes[GraphValueTypes["BOOLEAN"] = 4] = "BOOLEAN"; + GraphValueTypes[GraphValueTypes["DOUBLE"] = 5] = "DOUBLE"; + GraphValueTypes[GraphValueTypes["ARRAY"] = 6] = "ARRAY"; + GraphValueTypes[GraphValueTypes["EDGE"] = 7] = "EDGE"; + GraphValueTypes[GraphValueTypes["NODE"] = 8] = "NODE"; + GraphValueTypes[GraphValueTypes["PATH"] = 9] = "PATH"; + GraphValueTypes[GraphValueTypes["MAP"] = 10] = "MAP"; + GraphValueTypes[GraphValueTypes["POINT"] = 11] = "POINT"; +})(GraphValueTypes || (GraphValueTypes = {})); +class Graph { + constructor(client, name) { + _Graph_instances.add(this); + _Graph_client.set(this, void 0); + _Graph_name.set(this, void 0); + _Graph_metadata.set(this, void 0); + _Graph_setMetadataPromise.set(this, void 0); + __classPrivateFieldSet(this, _Graph_client, client, "f"); + __classPrivateFieldSet(this, _Graph_name, name, "f"); + } + async query(query, options) { + return __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_parseReply).call(this, await __classPrivateFieldGet(this, _Graph_client, "f").graph.query(__classPrivateFieldGet(this, _Graph_name, "f"), query, options, true)); + } + async roQuery(query, options) { + return __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_parseReply).call(this, await __classPrivateFieldGet(this, _Graph_client, "f").graph.roQuery(__classPrivateFieldGet(this, _Graph_name, "f"), query, options, true)); + } +} +_Graph_client = new WeakMap(), _Graph_name = new WeakMap(), _Graph_metadata = new WeakMap(), _Graph_setMetadataPromise = new WeakMap(), _Graph_instances = new WeakSet(), _Graph_updateMetadata = function _Graph_updateMetadata() { + __classPrivateFieldSet(this, _Graph_setMetadataPromise, __classPrivateFieldGet(this, _Graph_setMetadataPromise, "f") ?? __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_setMetadata).call(this) + .finally(() => __classPrivateFieldSet(this, _Graph_setMetadataPromise, undefined, "f")), "f"); + return __classPrivateFieldGet(this, _Graph_setMetadataPromise, "f"); +}, _Graph_setMetadata = +// DO NOT use directly, use #updateMetadata instead +async function _Graph_setMetadata() { + const [labels, relationshipTypes, propertyKeys] = await Promise.all([ + __classPrivateFieldGet(this, _Graph_client, "f").graph.roQuery(__classPrivateFieldGet(this, _Graph_name, "f"), 'CALL db.labels()'), + __classPrivateFieldGet(this, _Graph_client, "f").graph.roQuery(__classPrivateFieldGet(this, _Graph_name, "f"), 'CALL db.relationshipTypes()'), + __classPrivateFieldGet(this, _Graph_client, "f").graph.roQuery(__classPrivateFieldGet(this, _Graph_name, "f"), 'CALL db.propertyKeys()') + ]); + __classPrivateFieldSet(this, _Graph_metadata, { + labels: __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_cleanMetadataArray).call(this, labels.data), + relationshipTypes: __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_cleanMetadataArray).call(this, relationshipTypes.data), + propertyKeys: __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_cleanMetadataArray).call(this, propertyKeys.data) + }, "f"); + return __classPrivateFieldGet(this, _Graph_metadata, "f"); +}, _Graph_cleanMetadataArray = function _Graph_cleanMetadataArray(arr) { + return arr.map(([value]) => value); +}, _Graph_getMetadata = function _Graph_getMetadata(key, id) { + return __classPrivateFieldGet(this, _Graph_metadata, "f")?.[key][id] ?? __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_getMetadataAsync).call(this, key, id); +}, _Graph_getMetadataAsync = +// DO NOT use directly, use #getMetadata instead +async function _Graph_getMetadataAsync(key, id) { + const value = (await __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_updateMetadata).call(this))[key][id]; + if (value === undefined) + throw new Error(`Cannot find value from ${key}[${id}]`); + return value; +}, _Graph_parseReply = async function _Graph_parseReply(reply) { + if (!reply.data) + return reply; + const promises = [], parsed = { + metadata: reply.metadata, + data: reply.data.map((row) => { + const data = {}; + for (let i = 0; i < row.length; i++) { + data[reply.headers[i][1]] = __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_parseValue).call(this, row[i], promises); + } + return data; + }) + }; + if (promises.length) + await Promise.all(promises); + return parsed; +}, _Graph_parseValue = function _Graph_parseValue([valueType, value], promises) { + switch (valueType) { + case GraphValueTypes.NULL: + return null; + case GraphValueTypes.STRING: + case GraphValueTypes.INTEGER: + return value; + case GraphValueTypes.BOOLEAN: + return value === 'true'; + case GraphValueTypes.DOUBLE: + return parseFloat(value); + case GraphValueTypes.ARRAY: + return value.map(x => __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_parseValue).call(this, x, promises)); + case GraphValueTypes.EDGE: + return __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_parseEdge).call(this, value, promises); + case GraphValueTypes.NODE: + return __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_parseNode).call(this, value, promises); + case GraphValueTypes.PATH: + return { + nodes: value[0][1].map(([, node]) => __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_parseNode).call(this, node, promises)), + edges: value[1][1].map(([, edge]) => __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_parseEdge).call(this, edge, promises)) + }; + case GraphValueTypes.MAP: + const map = {}; + for (let i = 0; i < value.length; i++) { + map[value[i++]] = __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_parseValue).call(this, value[i], promises); + } + return map; + case GraphValueTypes.POINT: + return { + latitude: parseFloat(value[0]), + longitude: parseFloat(value[1]) + }; + default: + throw new Error(`unknown scalar type: ${valueType}`); + } +}, _Graph_parseEdge = function _Graph_parseEdge([id, relationshipTypeId, sourceId, destinationId, properties], promises) { + const edge = { + id, + sourceId, + destinationId, + properties: __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_parseProperties).call(this, properties, promises) + }; + const relationshipType = __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_getMetadata).call(this, 'relationshipTypes', relationshipTypeId); + if (relationshipType instanceof Promise) { + promises.push(relationshipType.then(value => edge.relationshipType = value)); + } + else { + edge.relationshipType = relationshipType; + } + return edge; +}, _Graph_parseNode = function _Graph_parseNode([id, labelIds, properties], promises) { + const labels = new Array(labelIds.length); + for (let i = 0; i < labelIds.length; i++) { + const value = __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_getMetadata).call(this, 'labels', labelIds[i]); + if (value instanceof Promise) { + promises.push(value.then(value => labels[i] = value)); + } + else { + labels[i] = value; + } + } + return { + id, + labels, + properties: __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_parseProperties).call(this, properties, promises) + }; +}, _Graph_parseProperties = function _Graph_parseProperties(raw, promises) { + const parsed = {}; + for (const [id, type, value] of raw) { + const parsedValue = __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_parseValue).call(this, [type, value], promises), key = __classPrivateFieldGet(this, _Graph_instances, "m", _Graph_getMetadata).call(this, 'propertyKeys', id); + if (key instanceof Promise) { + promises.push(key.then(key => parsed[key] = parsedValue)); + } + else { + parsed[key] = parsedValue; + } + } + return parsed; +}; +exports.default = Graph; diff --git a/node_modules/@redis/graph/dist/index.d.ts b/node_modules/@redis/graph/dist/index.d.ts new file mode 100644 index 0000000..e9f15ab --- /dev/null +++ b/node_modules/@redis/graph/dist/index.d.ts @@ -0,0 +1,2 @@ +export { default } from './commands'; +export { default as Graph } from './graph'; diff --git a/node_modules/@redis/graph/dist/index.js b/node_modules/@redis/graph/dist/index.js new file mode 100644 index 0000000..3552eff --- /dev/null +++ b/node_modules/@redis/graph/dist/index.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Graph = exports.default = void 0; +var commands_1 = require("./commands"); +Object.defineProperty(exports, "default", { enumerable: true, get: function () { return commands_1.default; } }); +var graph_1 = require("./graph"); +Object.defineProperty(exports, "Graph", { enumerable: true, get: function () { return graph_1.default; } }); diff --git a/node_modules/@redis/graph/package.json b/node_modules/@redis/graph/package.json new file mode 100644 index 0000000..95cce6b --- /dev/null +++ b/node_modules/@redis/graph/package.json @@ -0,0 +1,41 @@ +{ + "name": "@redis/graph", + "version": "1.1.1", + "license": "MIT", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist/" + ], + "scripts": { + "test": "nyc -r text-summary -r lcov mocha -r source-map-support/register -r ts-node/register './lib/**/*.spec.ts'", + "build": "tsc", + "documentation": "typedoc" + }, + "peerDependencies": { + "@redis/client": "^1.0.0" + }, + "devDependencies": { + "@istanbuljs/nyc-config-typescript": "^1.0.2", + "@redis/test-utils": "*", + "@types/node": "^20.6.2", + "nyc": "^15.1.0", + "release-it": "^16.1.5", + "source-map-support": "^0.5.21", + "ts-node": "^10.9.1", + "typedoc": "^0.25.1", + "typescript": "^5.2.2" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis/tree/master/packages/graph", + "keywords": [ + "redis", + "RedisGraph" + ] +} diff --git a/node_modules/@redis/json/README.md b/node_modules/@redis/json/README.md new file mode 100644 index 0000000..e7f7017 --- /dev/null +++ b/node_modules/@redis/json/README.md @@ -0,0 +1,80 @@ +# @redis/json + +This package provides support for the [RedisJSON](https://redis.io/docs/stack/json/) module, which adds JSON as a native data type to Redis. It extends the [Node Redis client](https://github.com/redis/node-redis) to include functions for each of the RedisJSON commands. + +To use these extra commands, your Redis server must have the RedisJSON module installed. + +## Usage + +For a complete example, see [`managing-json.js`](https://github.com/redis/node-redis/blob/master/examples/managing-json.js) in the Node Redis examples folder. + +### Storing JSON Documents in Redis + +The [`JSON.SET`](https://redis.io/commands/json.set/) command stores a JSON value at a given JSON Path in a Redis key. + +Here, we'll store a JSON document in the root of the Redis key "`mydoc`": + +```javascript +import { createClient } from 'redis'; + +... +await client.json.set('noderedis:jsondata', '$', { + name: 'Roberta McDonald', + pets: [ + { + name: 'Rex', + species: 'dog', + age: 3, + isMammal: true + }, + { + name: 'Goldie', + species: 'fish', + age: 2, + isMammal: false + } + ] +}); +``` + +For more information about RedisJSON's path syntax, [check out the documentation](https://redis.io/docs/stack/json/path/). + +### Retrieving JSON Documents from Redis + +With RedisJSON, we can retrieve all or part(s) of a JSON document using the [`JSON.GET`](https://redis.io/commands/json.get/) command and one or more JSON Paths. Let's get the name and age of one of the pets: + +```javascript +const results = await client.json.get('noderedis:jsondata', { + path: [ + '.pets[1].name', + '.pets[1].age' + ] +}); +``` + +`results` will contain the following: + +```javascript + { '.pets[1].name': 'Goldie', '.pets[1].age': 2 } +``` + +### Performing Atomic Updates on JSON Documents Stored in Redis + +RedisJSON includes commands that can atomically update values in a JSON document, in place in Redis without having to first retrieve the entire document. + +Using the [`JSON.NUMINCRBY`](https://redis.io/commands/json.numincrby/) command, we can update the age of one of the pets like this: + +```javascript +await client.json.numIncrBy('noderedis:jsondata', '.pets[1].age', 1); +``` + +And we can add a new object to the pets array with the [`JSON.ARRAPPEND`](https://redis.io/commands/json.arrappend/) command: + +```javascript +await client.json.arrAppend('noderedis:jsondata', '.pets', { + name: 'Robin', + species: 'bird', + age: 1, + isMammal: false +}); +``` diff --git a/node_modules/@redis/json/dist/commands/ARRAPPEND.d.ts b/node_modules/@redis/json/dist/commands/ARRAPPEND.d.ts new file mode 100644 index 0000000..c13272a --- /dev/null +++ b/node_modules/@redis/json/dist/commands/ARRAPPEND.d.ts @@ -0,0 +1,4 @@ +import { RedisJSON } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, path: string, ...jsons: Array): Array; +export declare function transformReply(): number | Array; diff --git a/node_modules/@redis/json/dist/commands/ARRAPPEND.js b/node_modules/@redis/json/dist/commands/ARRAPPEND.js new file mode 100644 index 0000000..cc178b6 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/ARRAPPEND.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path, ...jsons) { + const args = ['JSON.ARRAPPEND', key, path]; + for (const json of jsons) { + args.push((0, _1.transformRedisJsonArgument)(json)); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/ARRINDEX.d.ts b/node_modules/@redis/json/dist/commands/ARRINDEX.d.ts new file mode 100644 index 0000000..bc13f45 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/ARRINDEX.d.ts @@ -0,0 +1,5 @@ +import { RedisJSON } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, path: string, json: RedisJSON, start?: number, stop?: number): Array; +export declare function transformReply(): number | Array; diff --git a/node_modules/@redis/json/dist/commands/ARRINDEX.js b/node_modules/@redis/json/dist/commands/ARRINDEX.js new file mode 100644 index 0000000..95c9cf1 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/ARRINDEX.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, path, json, start, stop) { + const args = ['JSON.ARRINDEX', key, path, (0, _1.transformRedisJsonArgument)(json)]; + if (start !== undefined && start !== null) { + args.push(start.toString()); + if (stop !== undefined && stop !== null) { + args.push(stop.toString()); + } + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/ARRINSERT.d.ts b/node_modules/@redis/json/dist/commands/ARRINSERT.d.ts new file mode 100644 index 0000000..19f2adb --- /dev/null +++ b/node_modules/@redis/json/dist/commands/ARRINSERT.d.ts @@ -0,0 +1,4 @@ +import { RedisJSON } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, path: string, index: number, ...jsons: Array): Array; +export declare function transformReply(): number | Array; diff --git a/node_modules/@redis/json/dist/commands/ARRINSERT.js b/node_modules/@redis/json/dist/commands/ARRINSERT.js new file mode 100644 index 0000000..540c481 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/ARRINSERT.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path, index, ...jsons) { + const args = ['JSON.ARRINSERT', key, path, index.toString()]; + for (const json of jsons) { + args.push((0, _1.transformRedisJsonArgument)(json)); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/ARRLEN.d.ts b/node_modules/@redis/json/dist/commands/ARRLEN.d.ts new file mode 100644 index 0000000..f78020a --- /dev/null +++ b/node_modules/@redis/json/dist/commands/ARRLEN.d.ts @@ -0,0 +1,4 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, path?: string): Array; +export declare function transformReply(): number | Array; diff --git a/node_modules/@redis/json/dist/commands/ARRLEN.js b/node_modules/@redis/json/dist/commands/ARRLEN.js new file mode 100644 index 0000000..30a50cf --- /dev/null +++ b/node_modules/@redis/json/dist/commands/ARRLEN.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, path) { + const args = ['JSON.ARRLEN', key]; + if (path) { + args.push(path); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/ARRPOP.d.ts b/node_modules/@redis/json/dist/commands/ARRPOP.d.ts new file mode 100644 index 0000000..ce9bca5 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/ARRPOP.d.ts @@ -0,0 +1,4 @@ +import { RedisJSON } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, path?: string, index?: number): Array; +export declare function transformReply(reply: null | string | Array): null | RedisJSON | Array; diff --git a/node_modules/@redis/json/dist/commands/ARRPOP.js b/node_modules/@redis/json/dist/commands/ARRPOP.js new file mode 100644 index 0000000..2be6472 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/ARRPOP.js @@ -0,0 +1,25 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path, index) { + const args = ['JSON.ARRPOP', key]; + if (path) { + args.push(path); + if (index !== undefined && index !== null) { + args.push(index.toString()); + } + } + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + if (reply === null) + return null; + if (Array.isArray(reply)) { + return reply.map(_1.transformRedisJsonNullReply); + } + return (0, _1.transformRedisJsonNullReply)(reply); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/json/dist/commands/ARRTRIM.d.ts b/node_modules/@redis/json/dist/commands/ARRTRIM.d.ts new file mode 100644 index 0000000..ac7d469 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/ARRTRIM.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, path: string, start: number, stop: number): Array; +export declare function transformReply(): number | Array; diff --git a/node_modules/@redis/json/dist/commands/ARRTRIM.js b/node_modules/@redis/json/dist/commands/ARRTRIM.js new file mode 100644 index 0000000..287a02c --- /dev/null +++ b/node_modules/@redis/json/dist/commands/ARRTRIM.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path, start, stop) { + return ['JSON.ARRTRIM', key, path, start.toString(), stop.toString()]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/DEBUG_MEMORY.d.ts b/node_modules/@redis/json/dist/commands/DEBUG_MEMORY.d.ts new file mode 100644 index 0000000..d9fa8ce --- /dev/null +++ b/node_modules/@redis/json/dist/commands/DEBUG_MEMORY.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 2; +export declare function transformArguments(key: string, path?: string): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/json/dist/commands/DEBUG_MEMORY.js b/node_modules/@redis/json/dist/commands/DEBUG_MEMORY.js new file mode 100644 index 0000000..3382bde --- /dev/null +++ b/node_modules/@redis/json/dist/commands/DEBUG_MEMORY.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 2; +function transformArguments(key, path) { + const args = ['JSON.DEBUG', 'MEMORY', key]; + if (path) { + args.push(path); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/DEL.d.ts b/node_modules/@redis/json/dist/commands/DEL.d.ts new file mode 100644 index 0000000..6526825 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/DEL.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, path?: string): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/json/dist/commands/DEL.js b/node_modules/@redis/json/dist/commands/DEL.js new file mode 100644 index 0000000..4866aa8 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/DEL.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path) { + const args = ['JSON.DEL', key]; + if (path) { + args.push(path); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/FORGET.d.ts b/node_modules/@redis/json/dist/commands/FORGET.d.ts new file mode 100644 index 0000000..6526825 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/FORGET.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, path?: string): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/json/dist/commands/FORGET.js b/node_modules/@redis/json/dist/commands/FORGET.js new file mode 100644 index 0000000..e48d301 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/FORGET.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path) { + const args = ['JSON.FORGET', key]; + if (path) { + args.push(path); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/GET.d.ts b/node_modules/@redis/json/dist/commands/GET.d.ts new file mode 100644 index 0000000..8dcce9a --- /dev/null +++ b/node_modules/@redis/json/dist/commands/GET.d.ts @@ -0,0 +1,12 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +interface GetOptions { + path?: string | Array; + INDENT?: string; + NEWLINE?: string; + SPACE?: string; + NOESCAPE?: true; +} +export declare function transformArguments(key: string, options?: GetOptions): RedisCommandArguments; +export { transformRedisJsonNullReply as transformReply } from '.'; diff --git a/node_modules/@redis/json/dist/commands/GET.js b/node_modules/@redis/json/dist/commands/GET.js new file mode 100644 index 0000000..2915ad1 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/GET.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, options) { + let args = ['JSON.GET', key]; + if (options?.path) { + args = (0, generic_transformers_1.pushVerdictArguments)(args, options.path); + } + if (options?.INDENT) { + args.push('INDENT', options.INDENT); + } + if (options?.NEWLINE) { + args.push('NEWLINE', options.NEWLINE); + } + if (options?.SPACE) { + args.push('SPACE', options.SPACE); + } + if (options?.NOESCAPE) { + args.push('NOESCAPE'); + } + return args; +} +exports.transformArguments = transformArguments; +var _1 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _1.transformRedisJsonNullReply; } }); diff --git a/node_modules/@redis/json/dist/commands/MERGE.d.ts b/node_modules/@redis/json/dist/commands/MERGE.d.ts new file mode 100644 index 0000000..b4974da --- /dev/null +++ b/node_modules/@redis/json/dist/commands/MERGE.d.ts @@ -0,0 +1,4 @@ +import { RedisJSON } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, path: string, json: RedisJSON): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/json/dist/commands/MERGE.js b/node_modules/@redis/json/dist/commands/MERGE.js new file mode 100644 index 0000000..ccfbfa7 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/MERGE.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path, json) { + return ['JSON.MERGE', key, path, (0, _1.transformRedisJsonArgument)(json)]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/MGET.d.ts b/node_modules/@redis/json/dist/commands/MGET.d.ts new file mode 100644 index 0000000..431d38c --- /dev/null +++ b/node_modules/@redis/json/dist/commands/MGET.d.ts @@ -0,0 +1,5 @@ +import { RedisJSON } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(keys: Array, path: string): Array; +export declare function transformReply(reply: Array): Array; diff --git a/node_modules/@redis/json/dist/commands/MGET.js b/node_modules/@redis/json/dist/commands/MGET.js new file mode 100644 index 0000000..ff5b53f --- /dev/null +++ b/node_modules/@redis/json/dist/commands/MGET.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(keys, path) { + return [ + 'JSON.MGET', + ...keys, + path + ]; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply.map(_1.transformRedisJsonNullReply); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/json/dist/commands/MSET.d.ts b/node_modules/@redis/json/dist/commands/MSET.d.ts new file mode 100644 index 0000000..8ae7758 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/MSET.d.ts @@ -0,0 +1,11 @@ +import { RedisJSON } from '.'; +import { RedisCommandArgument } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +interface JsonMSetItem { + key: RedisCommandArgument; + path: RedisCommandArgument; + value: RedisJSON; +} +export declare function transformArguments(items: Array): Array; +export declare function transformReply(): 'OK'; +export {}; diff --git a/node_modules/@redis/json/dist/commands/MSET.js b/node_modules/@redis/json/dist/commands/MSET.js new file mode 100644 index 0000000..1af9afa --- /dev/null +++ b/node_modules/@redis/json/dist/commands/MSET.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(items) { + const args = new Array(1 + items.length * 3); + args[0] = 'JSON.MSET'; + let argsIndex = 1; + for (let i = 0; i < items.length; i++) { + const item = items[i]; + args[argsIndex++] = item.key; + args[argsIndex++] = item.path; + args[argsIndex++] = (0, _1.transformRedisJsonArgument)(item.value); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/NUMINCRBY.d.ts b/node_modules/@redis/json/dist/commands/NUMINCRBY.d.ts new file mode 100644 index 0000000..3cac586 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/NUMINCRBY.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, path: string, by: number): Array; +export { transformNumbersReply as transformReply } from '.'; diff --git a/node_modules/@redis/json/dist/commands/NUMINCRBY.js b/node_modules/@redis/json/dist/commands/NUMINCRBY.js new file mode 100644 index 0000000..a0f9100 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/NUMINCRBY.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path, by) { + return ['JSON.NUMINCRBY', key, path, by.toString()]; +} +exports.transformArguments = transformArguments; +var _1 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _1.transformNumbersReply; } }); diff --git a/node_modules/@redis/json/dist/commands/NUMMULTBY.d.ts b/node_modules/@redis/json/dist/commands/NUMMULTBY.d.ts new file mode 100644 index 0000000..3cac586 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/NUMMULTBY.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, path: string, by: number): Array; +export { transformNumbersReply as transformReply } from '.'; diff --git a/node_modules/@redis/json/dist/commands/NUMMULTBY.js b/node_modules/@redis/json/dist/commands/NUMMULTBY.js new file mode 100644 index 0000000..64de23f --- /dev/null +++ b/node_modules/@redis/json/dist/commands/NUMMULTBY.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path, by) { + return ['JSON.NUMMULTBY', key, path, by.toString()]; +} +exports.transformArguments = transformArguments; +var _1 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _1.transformNumbersReply; } }); diff --git a/node_modules/@redis/json/dist/commands/OBJKEYS.d.ts b/node_modules/@redis/json/dist/commands/OBJKEYS.d.ts new file mode 100644 index 0000000..b66d5de --- /dev/null +++ b/node_modules/@redis/json/dist/commands/OBJKEYS.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, path?: string): Array; +export declare function transformReply(): Array | null | Array | null>; diff --git a/node_modules/@redis/json/dist/commands/OBJKEYS.js b/node_modules/@redis/json/dist/commands/OBJKEYS.js new file mode 100644 index 0000000..fa936f2 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/OBJKEYS.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path) { + const args = ['JSON.OBJKEYS', key]; + if (path) { + args.push(path); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/OBJLEN.d.ts b/node_modules/@redis/json/dist/commands/OBJLEN.d.ts new file mode 100644 index 0000000..915b6e1 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/OBJLEN.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, path?: string): Array; +export declare function transformReply(): number | null | Array; diff --git a/node_modules/@redis/json/dist/commands/OBJLEN.js b/node_modules/@redis/json/dist/commands/OBJLEN.js new file mode 100644 index 0000000..07c0962 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/OBJLEN.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path) { + const args = ['JSON.OBJLEN', key]; + if (path) { + args.push(path); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/RESP.d.ts b/node_modules/@redis/json/dist/commands/RESP.d.ts new file mode 100644 index 0000000..d1ba697 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/RESP.d.ts @@ -0,0 +1,5 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, path?: string): Array; +type RESPReply = Array; +export declare function transformReply(): RESPReply; +export {}; diff --git a/node_modules/@redis/json/dist/commands/RESP.js b/node_modules/@redis/json/dist/commands/RESP.js new file mode 100644 index 0000000..76fc64b --- /dev/null +++ b/node_modules/@redis/json/dist/commands/RESP.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path) { + const args = ['JSON.RESP', key]; + if (path) { + args.push(path); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/SET.d.ts b/node_modules/@redis/json/dist/commands/SET.d.ts new file mode 100644 index 0000000..9acba66 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/SET.d.ts @@ -0,0 +1,11 @@ +import { RedisJSON } from '.'; +export declare const FIRST_KEY_INDEX = 1; +interface NX { + NX: true; +} +interface XX { + XX: true; +} +export declare function transformArguments(key: string, path: string, json: RedisJSON, options?: NX | XX): Array; +export declare function transformReply(): 'OK' | null; +export {}; diff --git a/node_modules/@redis/json/dist/commands/SET.js b/node_modules/@redis/json/dist/commands/SET.js new file mode 100644 index 0000000..d7a6988 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/SET.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path, json, options) { + const args = ['JSON.SET', key, path, (0, _1.transformRedisJsonArgument)(json)]; + if (options?.NX) { + args.push('NX'); + } + else if (options?.XX) { + args.push('XX'); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/STRAPPEND.d.ts b/node_modules/@redis/json/dist/commands/STRAPPEND.d.ts new file mode 100644 index 0000000..0983914 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/STRAPPEND.d.ts @@ -0,0 +1,6 @@ +export declare const FIRST_KEY_INDEX = 1; +type AppendArguments = [key: string, append: string]; +type AppendWithPathArguments = [key: string, path: string, append: string]; +export declare function transformArguments(...[key, pathOrAppend, append]: AppendArguments | AppendWithPathArguments): Array; +export declare function transformReply(): number | Array; +export {}; diff --git a/node_modules/@redis/json/dist/commands/STRAPPEND.js b/node_modules/@redis/json/dist/commands/STRAPPEND.js new file mode 100644 index 0000000..b5c6736 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/STRAPPEND.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(...[key, pathOrAppend, append]) { + const args = ['JSON.STRAPPEND', key]; + if (append !== undefined && append !== null) { + args.push(pathOrAppend, (0, _1.transformRedisJsonArgument)(append)); + } + else { + args.push((0, _1.transformRedisJsonArgument)(pathOrAppend)); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/STRLEN.d.ts b/node_modules/@redis/json/dist/commands/STRLEN.d.ts new file mode 100644 index 0000000..a27960d --- /dev/null +++ b/node_modules/@redis/json/dist/commands/STRLEN.d.ts @@ -0,0 +1,4 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, path?: string): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/json/dist/commands/STRLEN.js b/node_modules/@redis/json/dist/commands/STRLEN.js new file mode 100644 index 0000000..599a793 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/STRLEN.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, path) { + const args = ['JSON.STRLEN', key]; + if (path) { + args.push(path); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/TYPE.d.ts b/node_modules/@redis/json/dist/commands/TYPE.d.ts new file mode 100644 index 0000000..f9bc3f4 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/TYPE.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, path?: string): Array; +export declare function transformReply(): string | null | Array; diff --git a/node_modules/@redis/json/dist/commands/TYPE.js b/node_modules/@redis/json/dist/commands/TYPE.js new file mode 100644 index 0000000..7a4bc8a --- /dev/null +++ b/node_modules/@redis/json/dist/commands/TYPE.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, path) { + const args = ['JSON.TYPE', key]; + if (path) { + args.push(path); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/json/dist/commands/index.d.ts b/node_modules/@redis/json/dist/commands/index.d.ts new file mode 100644 index 0000000..ac894b9 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/index.d.ts @@ -0,0 +1,80 @@ +import * as ARRAPPEND from './ARRAPPEND'; +import * as ARRINDEX from './ARRINDEX'; +import * as ARRINSERT from './ARRINSERT'; +import * as ARRLEN from './ARRLEN'; +import * as ARRPOP from './ARRPOP'; +import * as ARRTRIM from './ARRTRIM'; +import * as DEBUG_MEMORY from './DEBUG_MEMORY'; +import * as DEL from './DEL'; +import * as FORGET from './FORGET'; +import * as GET from './GET'; +import * as MERGE from './MERGE'; +import * as MGET from './MGET'; +import * as MSET from './MSET'; +import * as NUMINCRBY from './NUMINCRBY'; +import * as NUMMULTBY from './NUMMULTBY'; +import * as OBJKEYS from './OBJKEYS'; +import * as OBJLEN from './OBJLEN'; +import * as RESP from './RESP'; +import * as SET from './SET'; +import * as STRAPPEND from './STRAPPEND'; +import * as STRLEN from './STRLEN'; +import * as TYPE from './TYPE'; +declare const _default: { + ARRAPPEND: typeof ARRAPPEND; + arrAppend: typeof ARRAPPEND; + ARRINDEX: typeof ARRINDEX; + arrIndex: typeof ARRINDEX; + ARRINSERT: typeof ARRINSERT; + arrInsert: typeof ARRINSERT; + ARRLEN: typeof ARRLEN; + arrLen: typeof ARRLEN; + ARRPOP: typeof ARRPOP; + arrPop: typeof ARRPOP; + ARRTRIM: typeof ARRTRIM; + arrTrim: typeof ARRTRIM; + DEBUG_MEMORY: typeof DEBUG_MEMORY; + debugMemory: typeof DEBUG_MEMORY; + DEL: typeof DEL; + del: typeof DEL; + FORGET: typeof FORGET; + forget: typeof FORGET; + GET: typeof GET; + get: typeof GET; + MERGE: typeof MERGE; + merge: typeof MERGE; + MGET: typeof MGET; + mGet: typeof MGET; + MSET: typeof MSET; + mSet: typeof MSET; + NUMINCRBY: typeof NUMINCRBY; + numIncrBy: typeof NUMINCRBY; + NUMMULTBY: typeof NUMMULTBY; + numMultBy: typeof NUMMULTBY; + OBJKEYS: typeof OBJKEYS; + objKeys: typeof OBJKEYS; + OBJLEN: typeof OBJLEN; + objLen: typeof OBJLEN; + RESP: typeof RESP; + resp: typeof RESP; + SET: typeof SET; + set: typeof SET; + STRAPPEND: typeof STRAPPEND; + strAppend: typeof STRAPPEND; + STRLEN: typeof STRLEN; + strLen: typeof STRLEN; + TYPE: typeof TYPE; + type: typeof TYPE; +}; +export default _default; +interface RedisJSONArray extends Array { +} +interface RedisJSONObject { + [key: string]: RedisJSON; + [key: number]: RedisJSON; +} +export type RedisJSON = null | boolean | number | string | Date | RedisJSONArray | RedisJSONObject; +export declare function transformRedisJsonArgument(json: RedisJSON): string; +export declare function transformRedisJsonReply(json: string): RedisJSON; +export declare function transformRedisJsonNullReply(json: string | null): RedisJSON | null; +export declare function transformNumbersReply(reply: string): number | Array; diff --git a/node_modules/@redis/json/dist/commands/index.js b/node_modules/@redis/json/dist/commands/index.js new file mode 100644 index 0000000..94c4e17 --- /dev/null +++ b/node_modules/@redis/json/dist/commands/index.js @@ -0,0 +1,89 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformNumbersReply = exports.transformRedisJsonNullReply = exports.transformRedisJsonReply = exports.transformRedisJsonArgument = void 0; +const ARRAPPEND = require("./ARRAPPEND"); +const ARRINDEX = require("./ARRINDEX"); +const ARRINSERT = require("./ARRINSERT"); +const ARRLEN = require("./ARRLEN"); +const ARRPOP = require("./ARRPOP"); +const ARRTRIM = require("./ARRTRIM"); +const DEBUG_MEMORY = require("./DEBUG_MEMORY"); +const DEL = require("./DEL"); +const FORGET = require("./FORGET"); +const GET = require("./GET"); +const MERGE = require("./MERGE"); +const MGET = require("./MGET"); +const MSET = require("./MSET"); +const NUMINCRBY = require("./NUMINCRBY"); +const NUMMULTBY = require("./NUMMULTBY"); +const OBJKEYS = require("./OBJKEYS"); +const OBJLEN = require("./OBJLEN"); +const RESP = require("./RESP"); +const SET = require("./SET"); +const STRAPPEND = require("./STRAPPEND"); +const STRLEN = require("./STRLEN"); +const TYPE = require("./TYPE"); +exports.default = { + ARRAPPEND, + arrAppend: ARRAPPEND, + ARRINDEX, + arrIndex: ARRINDEX, + ARRINSERT, + arrInsert: ARRINSERT, + ARRLEN, + arrLen: ARRLEN, + ARRPOP, + arrPop: ARRPOP, + ARRTRIM, + arrTrim: ARRTRIM, + DEBUG_MEMORY, + debugMemory: DEBUG_MEMORY, + DEL, + del: DEL, + FORGET, + forget: FORGET, + GET, + get: GET, + MERGE, + merge: MERGE, + MGET, + mGet: MGET, + MSET, + mSet: MSET, + NUMINCRBY, + numIncrBy: NUMINCRBY, + NUMMULTBY, + numMultBy: NUMMULTBY, + OBJKEYS, + objKeys: OBJKEYS, + OBJLEN, + objLen: OBJLEN, + RESP, + resp: RESP, + SET, + set: SET, + STRAPPEND, + strAppend: STRAPPEND, + STRLEN, + strLen: STRLEN, + TYPE, + type: TYPE +}; +function transformRedisJsonArgument(json) { + return JSON.stringify(json); +} +exports.transformRedisJsonArgument = transformRedisJsonArgument; +function transformRedisJsonReply(json) { + return JSON.parse(json); +} +exports.transformRedisJsonReply = transformRedisJsonReply; +function transformRedisJsonNullReply(json) { + if (json === null) + return null; + return transformRedisJsonReply(json); +} +exports.transformRedisJsonNullReply = transformRedisJsonNullReply; +function transformNumbersReply(reply) { + return JSON.parse(reply); +} +exports.transformNumbersReply = transformNumbersReply; diff --git a/node_modules/@redis/json/dist/index.d.ts b/node_modules/@redis/json/dist/index.d.ts new file mode 100644 index 0000000..bc0e103 --- /dev/null +++ b/node_modules/@redis/json/dist/index.d.ts @@ -0,0 +1 @@ +export { default } from './commands'; diff --git a/node_modules/@redis/json/dist/index.js b/node_modules/@redis/json/dist/index.js new file mode 100644 index 0000000..992c58d --- /dev/null +++ b/node_modules/@redis/json/dist/index.js @@ -0,0 +1,5 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = void 0; +var commands_1 = require("./commands"); +Object.defineProperty(exports, "default", { enumerable: true, get: function () { return commands_1.default; } }); diff --git a/node_modules/@redis/json/package.json b/node_modules/@redis/json/package.json new file mode 100644 index 0000000..ad60cc1 --- /dev/null +++ b/node_modules/@redis/json/package.json @@ -0,0 +1,41 @@ +{ + "name": "@redis/json", + "version": "1.0.7", + "license": "MIT", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist/" + ], + "scripts": { + "test": "nyc -r text-summary -r lcov mocha -r source-map-support/register -r ts-node/register './lib/**/*.spec.ts'", + "build": "tsc", + "documentation": "typedoc" + }, + "peerDependencies": { + "@redis/client": "^1.0.0" + }, + "devDependencies": { + "@istanbuljs/nyc-config-typescript": "^1.0.2", + "@redis/test-utils": "*", + "@types/node": "^20.6.2", + "nyc": "^15.1.0", + "release-it": "^16.1.5", + "source-map-support": "^0.5.21", + "ts-node": "^10.9.1", + "typedoc": "^0.25.1", + "typescript": "^5.2.2" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis/tree/master/packages/json", + "keywords": [ + "redis", + "RedisJSON" + ] +} diff --git a/node_modules/@redis/search/README.md b/node_modules/@redis/search/README.md new file mode 100644 index 0000000..60186ba --- /dev/null +++ b/node_modules/@redis/search/README.md @@ -0,0 +1,119 @@ +# @redis/search + +This package provides support for the [RediSearch](https://redisearch.io) module, which adds indexing and querying support for data stored in Redis Hashes or as JSON documents with the RedisJSON module. It extends the [Node Redis client](https://github.com/redis/node-redis) to include functions for each of the RediSearch commands. + +To use these extra commands, your Redis server must have the RediSearch module installed. To index and query JSON documents, you'll also need to add the RedisJSON module. + +## Usage + +For complete examples, see [`search-hashes.js`](https://github.com/redis/node-redis/blob/master/examples/search-hashes.js) and [`search-json.js`](https://github.com/redis/node-redis/blob/master/examples/search-json.js) in the Node Redis examples folder. + +### Indexing and Querying Data in Redis Hashes + +#### Creating an Index + +Before we can perform any searches, we need to tell RediSearch how to index our data, and which Redis keys to find that data in. The [FT.CREATE](https://redis.io/commands/ft.create) command creates a RediSearch index. Here's how to use it to create an index we'll call `idx:animals` where we want to index hashes containing `name`, `species` and `age` fields, and whose key names in Redis begin with the prefix `noderedis:animals`: + +```javascript +await client.ft.create('idx:animals', { + name: { + type: SchemaFieldTypes.TEXT, + SORTABLE: true + }, + species: SchemaFieldTypes.TAG, + age: SchemaFieldTypes.NUMERIC +}, { + ON: 'HASH', + PREFIX: 'noderedis:animals' +}); +``` + +See the [`FT.CREATE` documentation](https://redis.io/commands/ft.create/#description) for information about the different field types and additional options. + +#### Querying the Index + +Once we've created an index, and added some data to Redis hashes whose keys begin with the prefix `noderedis:animals`, we can start writing some search queries. RediSearch supports a rich query syntax for full-text search, faceted search, aggregation and more. Check out the [`FT.SEARCH` documentation](https://redis.io/commands/ft.search) and the [query syntax reference](https://redis.io/docs/interact/search-and-query/query) for more information. + +Let's write a query to find all the animals where the `species` field has the value `dog`: + +```javascript +const results = await client.ft.search('idx:animals', '@species:{dog}'); +``` + +`results` looks like this: + +```javascript +{ + total: 2, + documents: [ + { + id: 'noderedis:animals:4', + value: { + name: 'Fido', + species: 'dog', + age: '7' + } + }, + { + id: 'noderedis:animals:3', + value: { + name: 'Rover', + species: 'dog', + age: '9' + } + } + ] +} +``` + +### Indexing and Querying Data with RedisJSON + +RediSearch can also index and query JSON documents stored in Redis using the RedisJSON module. The approach is similar to that for indexing and searching data in hashes, but we can now use JSON Path like syntax and the data no longer has to be flat name/value pairs - it can contain nested objects and arrays. + +#### Creating an Index + +As before, we create an index with the `FT.CREATE` command, this time specifying we want to index JSON documents that look like this: + +```javascript +{ + name: 'Alice', + age: 32, + coins: 100 +} +``` + +Each document represents a user in some system, and users have name, age and coins properties. + +One way we might choose to index these documents is as follows: + +```javascript +await client.ft.create('idx:users', { + '$.name': { + type: SchemaFieldTypes.TEXT, + SORTABLE: 'UNF' + }, + '$.age': { + type: SchemaFieldTypes.NUMERIC, + AS: 'age' + }, + '$.coins': { + type: SchemaFieldTypes.NUMERIC, + AS: 'coins' + } +}, { + ON: 'JSON', + PREFIX: 'noderedis:users' +}); +``` + +Note that we're using JSON Path to specify where the fields to index are in our JSON documents, and the `AS` clause to define a name/alias for each field. We'll use these when writing queries. + +#### Querying the Index + +Now we have an index and some data stored as JSON documents in Redis (see the [JSON package documentation](https://github.com/redis/node-redis/tree/master/packages/json) for examples of how to store JSON), we can write some queries... + +We'll use the [RediSearch query language](https://redis.io/docs/interact/search-and-query/query) and [`FT.SEARCH`](https://redis.io/commands/ft.search) command. Here's a query to find users under the age of 30: + +```javascript +await client.ft.search('idx:users', '@age:[0 30]'); +``` diff --git a/node_modules/@redis/search/dist/commands/AGGREGATE.d.ts b/node_modules/@redis/search/dist/commands/AGGREGATE.d.ts new file mode 100644 index 0000000..a390ad9 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/AGGREGATE.d.ts @@ -0,0 +1,118 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { Params, PropertyName, SortByProperty } from '.'; +export declare enum AggregateSteps { + GROUPBY = "GROUPBY", + SORTBY = "SORTBY", + APPLY = "APPLY", + LIMIT = "LIMIT", + FILTER = "FILTER" +} +interface AggregateStep { + type: T; +} +export declare enum AggregateGroupByReducers { + COUNT = "COUNT", + COUNT_DISTINCT = "COUNT_DISTINCT", + COUNT_DISTINCTISH = "COUNT_DISTINCTISH", + SUM = "SUM", + MIN = "MIN", + MAX = "MAX", + AVG = "AVG", + STDDEV = "STDDEV", + QUANTILE = "QUANTILE", + TOLIST = "TOLIST", + TO_LIST = "TOLIST", + FIRST_VALUE = "FIRST_VALUE", + RANDOM_SAMPLE = "RANDOM_SAMPLE" +} +interface GroupByReducer { + type: T; + AS?: string; +} +type CountReducer = GroupByReducer; +interface CountDistinctReducer extends GroupByReducer { + property: PropertyName; +} +interface CountDistinctishReducer extends GroupByReducer { + property: PropertyName; +} +interface SumReducer extends GroupByReducer { + property: PropertyName; +} +interface MinReducer extends GroupByReducer { + property: PropertyName; +} +interface MaxReducer extends GroupByReducer { + property: PropertyName; +} +interface AvgReducer extends GroupByReducer { + property: PropertyName; +} +interface StdDevReducer extends GroupByReducer { + property: PropertyName; +} +interface QuantileReducer extends GroupByReducer { + property: PropertyName; + quantile: number; +} +interface ToListReducer extends GroupByReducer { + property: PropertyName; +} +interface FirstValueReducer extends GroupByReducer { + property: PropertyName; + BY?: PropertyName | { + property: PropertyName; + direction?: 'ASC' | 'DESC'; + }; +} +interface RandomSampleReducer extends GroupByReducer { + property: PropertyName; + sampleSize: number; +} +type GroupByReducers = CountReducer | CountDistinctReducer | CountDistinctishReducer | SumReducer | MinReducer | MaxReducer | AvgReducer | StdDevReducer | QuantileReducer | ToListReducer | FirstValueReducer | RandomSampleReducer; +interface GroupByStep extends AggregateStep { + properties?: PropertyName | Array; + REDUCE: GroupByReducers | Array; +} +interface SortStep extends AggregateStep { + BY: SortByProperty | Array; + MAX?: number; +} +interface ApplyStep extends AggregateStep { + expression: string; + AS: string; +} +interface LimitStep extends AggregateStep { + from: number; + size: number; +} +interface FilterStep extends AggregateStep { + expression: string; +} +type LoadField = PropertyName | { + identifier: PropertyName; + AS?: string; +}; +export interface AggregateOptions { + VERBATIM?: boolean; + ADDSCORES?: boolean; + LOAD?: LoadField | Array; + STEPS?: Array; + PARAMS?: Params; + DIALECT?: number; + TIMEOUT?: number; +} +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(index: string, query: string, options?: AggregateOptions): RedisCommandArguments; +export declare function pushAggregatehOptions(args: RedisCommandArguments, options?: AggregateOptions): RedisCommandArguments; +export type AggregateRawReply = [ + total: number, + ...results: Array> +]; +export interface AggregateReply { + total: number; + results: Array>; +} +export declare function transformReply(rawReply: AggregateRawReply): AggregateReply; +export {}; diff --git a/node_modules/@redis/search/dist/commands/AGGREGATE.js b/node_modules/@redis/search/dist/commands/AGGREGATE.js new file mode 100644 index 0000000..58d42b4 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/AGGREGATE.js @@ -0,0 +1,170 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.pushAggregatehOptions = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = exports.AggregateGroupByReducers = exports.AggregateSteps = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +const _1 = require("."); +var AggregateSteps; +(function (AggregateSteps) { + AggregateSteps["GROUPBY"] = "GROUPBY"; + AggregateSteps["SORTBY"] = "SORTBY"; + AggregateSteps["APPLY"] = "APPLY"; + AggregateSteps["LIMIT"] = "LIMIT"; + AggregateSteps["FILTER"] = "FILTER"; +})(AggregateSteps || (exports.AggregateSteps = AggregateSteps = {})); +var AggregateGroupByReducers; +(function (AggregateGroupByReducers) { + AggregateGroupByReducers["COUNT"] = "COUNT"; + AggregateGroupByReducers["COUNT_DISTINCT"] = "COUNT_DISTINCT"; + AggregateGroupByReducers["COUNT_DISTINCTISH"] = "COUNT_DISTINCTISH"; + AggregateGroupByReducers["SUM"] = "SUM"; + AggregateGroupByReducers["MIN"] = "MIN"; + AggregateGroupByReducers["MAX"] = "MAX"; + AggregateGroupByReducers["AVG"] = "AVG"; + AggregateGroupByReducers["STDDEV"] = "STDDEV"; + AggregateGroupByReducers["QUANTILE"] = "QUANTILE"; + AggregateGroupByReducers["TOLIST"] = "TOLIST"; + AggregateGroupByReducers["TO_LIST"] = "TOLIST"; + AggregateGroupByReducers["FIRST_VALUE"] = "FIRST_VALUE"; + AggregateGroupByReducers["RANDOM_SAMPLE"] = "RANDOM_SAMPLE"; +})(AggregateGroupByReducers || (exports.AggregateGroupByReducers = AggregateGroupByReducers = {})); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(index, query, options) { + return pushAggregatehOptions(['FT.AGGREGATE', index, query], options); +} +exports.transformArguments = transformArguments; +function pushAggregatehOptions(args, options) { + if (options?.VERBATIM) { + args.push('VERBATIM'); + } + if (options?.ADDSCORES) { + args.push('ADDSCORES'); + } + if (options?.LOAD) { + args.push('LOAD'); + (0, _1.pushArgumentsWithLength)(args, () => { + if (Array.isArray(options.LOAD)) { + for (const load of options.LOAD) { + pushLoadField(args, load); + } + } + else { + pushLoadField(args, options.LOAD); + } + }); + } + if (options?.STEPS) { + for (const step of options.STEPS) { + switch (step.type) { + case AggregateSteps.GROUPBY: + args.push('GROUPBY'); + if (!step.properties) { + args.push('0'); + } + else { + (0, generic_transformers_1.pushVerdictArgument)(args, step.properties); + } + if (Array.isArray(step.REDUCE)) { + for (const reducer of step.REDUCE) { + pushGroupByReducer(args, reducer); + } + } + else { + pushGroupByReducer(args, step.REDUCE); + } + break; + case AggregateSteps.SORTBY: + (0, _1.pushSortByArguments)(args, 'SORTBY', step.BY); + if (step.MAX) { + args.push('MAX', step.MAX.toString()); + } + break; + case AggregateSteps.APPLY: + args.push('APPLY', step.expression, 'AS', step.AS); + break; + case AggregateSteps.LIMIT: + args.push('LIMIT', step.from.toString(), step.size.toString()); + break; + case AggregateSteps.FILTER: + args.push('FILTER', step.expression); + break; + } + } + } + (0, _1.pushParamsArgs)(args, options?.PARAMS); + if (options?.DIALECT) { + args.push('DIALECT', options.DIALECT.toString()); + } + if (options?.TIMEOUT !== undefined) { + args.push('TIMEOUT', options.TIMEOUT.toString()); + } + return args; +} +exports.pushAggregatehOptions = pushAggregatehOptions; +function pushLoadField(args, toLoad) { + if (typeof toLoad === 'string') { + args.push(toLoad); + } + else { + args.push(toLoad.identifier); + if (toLoad.AS) { + args.push('AS', toLoad.AS); + } + } +} +function pushGroupByReducer(args, reducer) { + args.push('REDUCE', reducer.type); + switch (reducer.type) { + case AggregateGroupByReducers.COUNT: + args.push('0'); + break; + case AggregateGroupByReducers.COUNT_DISTINCT: + case AggregateGroupByReducers.COUNT_DISTINCTISH: + case AggregateGroupByReducers.SUM: + case AggregateGroupByReducers.MIN: + case AggregateGroupByReducers.MAX: + case AggregateGroupByReducers.AVG: + case AggregateGroupByReducers.STDDEV: + case AggregateGroupByReducers.TOLIST: + args.push('1', reducer.property); + break; + case AggregateGroupByReducers.QUANTILE: + args.push('2', reducer.property, reducer.quantile.toString()); + break; + case AggregateGroupByReducers.FIRST_VALUE: { + (0, _1.pushArgumentsWithLength)(args, () => { + args.push(reducer.property); + if (reducer.BY) { + args.push('BY'); + if (typeof reducer.BY === 'string') { + args.push(reducer.BY); + } + else { + args.push(reducer.BY.property); + if (reducer.BY.direction) { + args.push(reducer.BY.direction); + } + } + } + }); + break; + } + case AggregateGroupByReducers.RANDOM_SAMPLE: + args.push('2', reducer.property, reducer.sampleSize.toString()); + break; + } + if (reducer.AS) { + args.push('AS', reducer.AS); + } +} +function transformReply(rawReply) { + const results = []; + for (let i = 1; i < rawReply.length; i++) { + results.push((0, generic_transformers_1.transformTuplesReply)(rawReply[i])); + } + return { + total: rawReply[0], + results + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/search/dist/commands/AGGREGATE_WITHCURSOR.d.ts b/node_modules/@redis/search/dist/commands/AGGREGATE_WITHCURSOR.d.ts new file mode 100644 index 0000000..5cd8461 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/AGGREGATE_WITHCURSOR.d.ts @@ -0,0 +1,14 @@ +import { AggregateOptions, AggregateRawReply, AggregateReply } from './AGGREGATE'; +export { FIRST_KEY_INDEX, IS_READ_ONLY } from './AGGREGATE'; +interface AggregateWithCursorOptions extends AggregateOptions { + COUNT?: number; +} +export declare function transformArguments(index: string, query: string, options?: AggregateWithCursorOptions): import("@redis/client/dist/lib/commands").RedisCommandArguments; +type AggregateWithCursorRawReply = [ + result: AggregateRawReply, + cursor: number +]; +interface AggregateWithCursorReply extends AggregateReply { + cursor: number; +} +export declare function transformReply(reply: AggregateWithCursorRawReply): AggregateWithCursorReply; diff --git a/node_modules/@redis/search/dist/commands/AGGREGATE_WITHCURSOR.js b/node_modules/@redis/search/dist/commands/AGGREGATE_WITHCURSOR.js new file mode 100644 index 0000000..7c40c69 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/AGGREGATE_WITHCURSOR.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const AGGREGATE_1 = require("./AGGREGATE"); +var AGGREGATE_2 = require("./AGGREGATE"); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return AGGREGATE_2.FIRST_KEY_INDEX; } }); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return AGGREGATE_2.IS_READ_ONLY; } }); +function transformArguments(index, query, options) { + const args = (0, AGGREGATE_1.transformArguments)(index, query, options); + args.push('WITHCURSOR'); + if (options?.COUNT) { + args.push('COUNT', options.COUNT.toString()); + } + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + ...(0, AGGREGATE_1.transformReply)(reply[0]), + cursor: reply[1] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/search/dist/commands/ALIASADD.d.ts b/node_modules/@redis/search/dist/commands/ALIASADD.d.ts new file mode 100644 index 0000000..e3e7ab4 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/ALIASADD.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(name: string, index: string): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/search/dist/commands/ALIASADD.js b/node_modules/@redis/search/dist/commands/ALIASADD.js new file mode 100644 index 0000000..f71c91e --- /dev/null +++ b/node_modules/@redis/search/dist/commands/ALIASADD.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(name, index) { + return ['FT.ALIASADD', name, index]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/ALIASDEL.d.ts b/node_modules/@redis/search/dist/commands/ALIASDEL.d.ts new file mode 100644 index 0000000..e3e7ab4 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/ALIASDEL.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(name: string, index: string): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/search/dist/commands/ALIASDEL.js b/node_modules/@redis/search/dist/commands/ALIASDEL.js new file mode 100644 index 0000000..f80ce4e --- /dev/null +++ b/node_modules/@redis/search/dist/commands/ALIASDEL.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(name, index) { + return ['FT.ALIASDEL', name, index]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/ALIASUPDATE.d.ts b/node_modules/@redis/search/dist/commands/ALIASUPDATE.d.ts new file mode 100644 index 0000000..e3e7ab4 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/ALIASUPDATE.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(name: string, index: string): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/search/dist/commands/ALIASUPDATE.js b/node_modules/@redis/search/dist/commands/ALIASUPDATE.js new file mode 100644 index 0000000..718d8f5 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/ALIASUPDATE.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(name, index) { + return ['FT.ALIASUPDATE', name, index]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/ALTER.d.ts b/node_modules/@redis/search/dist/commands/ALTER.d.ts new file mode 100644 index 0000000..1a6413c --- /dev/null +++ b/node_modules/@redis/search/dist/commands/ALTER.d.ts @@ -0,0 +1,3 @@ +import { RediSearchSchema } from '.'; +export declare function transformArguments(index: string, schema: RediSearchSchema): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/search/dist/commands/ALTER.js b/node_modules/@redis/search/dist/commands/ALTER.js new file mode 100644 index 0000000..1f1971f --- /dev/null +++ b/node_modules/@redis/search/dist/commands/ALTER.js @@ -0,0 +1,10 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +const _1 = require("."); +function transformArguments(index, schema) { + const args = ['FT.ALTER', index, 'SCHEMA', 'ADD']; + (0, _1.pushSchema)(args, schema); + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/CONFIG_GET.d.ts b/node_modules/@redis/search/dist/commands/CONFIG_GET.d.ts new file mode 100644 index 0000000..de47f89 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/CONFIG_GET.d.ts @@ -0,0 +1,6 @@ +export declare function transformArguments(option: string): string[]; +interface ConfigGetReply { + [option: string]: string | null; +} +export declare function transformReply(rawReply: Array<[string, string | null]>): ConfigGetReply; +export {}; diff --git a/node_modules/@redis/search/dist/commands/CONFIG_GET.js b/node_modules/@redis/search/dist/commands/CONFIG_GET.js new file mode 100644 index 0000000..58fb360 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/CONFIG_GET.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments(option) { + return ['FT.CONFIG', 'GET', option]; +} +exports.transformArguments = transformArguments; +function transformReply(rawReply) { + const transformedReply = Object.create(null); + for (const [key, value] of rawReply) { + transformedReply[key] = value; + } + return transformedReply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/search/dist/commands/CONFIG_SET.d.ts b/node_modules/@redis/search/dist/commands/CONFIG_SET.d.ts new file mode 100644 index 0000000..c354779 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/CONFIG_SET.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(option: string, value: string): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/search/dist/commands/CONFIG_SET.js b/node_modules/@redis/search/dist/commands/CONFIG_SET.js new file mode 100644 index 0000000..c1b095f --- /dev/null +++ b/node_modules/@redis/search/dist/commands/CONFIG_SET.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(option, value) { + return ['FT.CONFIG', 'SET', option, value]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/CREATE.d.ts b/node_modules/@redis/search/dist/commands/CREATE.d.ts new file mode 100644 index 0000000..a0da3a3 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/CREATE.d.ts @@ -0,0 +1,21 @@ +import { RedisSearchLanguages, PropertyName, RediSearchSchema } from '.'; +interface CreateOptions { + ON?: 'HASH' | 'JSON'; + PREFIX?: string | Array; + FILTER?: string; + LANGUAGE?: RedisSearchLanguages; + LANGUAGE_FIELD?: PropertyName; + SCORE?: number; + SCORE_FIELD?: PropertyName; + MAXTEXTFIELDS?: true; + TEMPORARY?: number; + NOOFFSETS?: true; + NOHL?: true; + NOFIELDS?: true; + NOFREQS?: true; + SKIPINITIALSCAN?: true; + STOPWORDS?: string | Array; +} +export declare function transformArguments(index: string, schema: RediSearchSchema, options?: CreateOptions): Array; +export declare function transformReply(): 'OK'; +export {}; diff --git a/node_modules/@redis/search/dist/commands/CREATE.js b/node_modules/@redis/search/dist/commands/CREATE.js new file mode 100644 index 0000000..a8641d2 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/CREATE.js @@ -0,0 +1,56 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +const _1 = require("."); +function transformArguments(index, schema, options) { + const args = ['FT.CREATE', index]; + if (options?.ON) { + args.push('ON', options.ON); + } + (0, generic_transformers_1.pushOptionalVerdictArgument)(args, 'PREFIX', options?.PREFIX); + if (options?.FILTER) { + args.push('FILTER', options.FILTER); + } + if (options?.LANGUAGE) { + args.push('LANGUAGE', options.LANGUAGE); + } + if (options?.LANGUAGE_FIELD) { + args.push('LANGUAGE_FIELD', options.LANGUAGE_FIELD); + } + if (options?.SCORE) { + args.push('SCORE', options.SCORE.toString()); + } + if (options?.SCORE_FIELD) { + args.push('SCORE_FIELD', options.SCORE_FIELD); + } + // if (options?.PAYLOAD_FIELD) { + // args.push('PAYLOAD_FIELD', options.PAYLOAD_FIELD); + // } + if (options?.MAXTEXTFIELDS) { + args.push('MAXTEXTFIELDS'); + } + if (options?.TEMPORARY) { + args.push('TEMPORARY', options.TEMPORARY.toString()); + } + if (options?.NOOFFSETS) { + args.push('NOOFFSETS'); + } + if (options?.NOHL) { + args.push('NOHL'); + } + if (options?.NOFIELDS) { + args.push('NOFIELDS'); + } + if (options?.NOFREQS) { + args.push('NOFREQS'); + } + if (options?.SKIPINITIALSCAN) { + args.push('SKIPINITIALSCAN'); + } + (0, generic_transformers_1.pushOptionalVerdictArgument)(args, 'STOPWORDS', options?.STOPWORDS); + args.push('SCHEMA'); + (0, _1.pushSchema)(args, schema); + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/CURSOR_DEL.d.ts b/node_modules/@redis/search/dist/commands/CURSOR_DEL.d.ts new file mode 100644 index 0000000..1f12c3f --- /dev/null +++ b/node_modules/@redis/search/dist/commands/CURSOR_DEL.d.ts @@ -0,0 +1,4 @@ +import { RedisCommandArgument } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(index: RedisCommandArgument, cursorId: number): RedisCommandArgument[]; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/search/dist/commands/CURSOR_DEL.js b/node_modules/@redis/search/dist/commands/CURSOR_DEL.js new file mode 100644 index 0000000..cbba23a --- /dev/null +++ b/node_modules/@redis/search/dist/commands/CURSOR_DEL.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(index, cursorId) { + return [ + 'FT.CURSOR', + 'DEL', + index, + cursorId.toString() + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/CURSOR_READ.d.ts b/node_modules/@redis/search/dist/commands/CURSOR_READ.d.ts new file mode 100644 index 0000000..7a186c0 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/CURSOR_READ.d.ts @@ -0,0 +1,8 @@ +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +interface CursorReadOptions { + COUNT?: number; +} +export declare function transformArguments(index: RedisCommandArgument, cursor: number, options?: CursorReadOptions): RedisCommandArguments; +export { transformReply } from './AGGREGATE_WITHCURSOR'; diff --git a/node_modules/@redis/search/dist/commands/CURSOR_READ.js b/node_modules/@redis/search/dist/commands/CURSOR_READ.js new file mode 100644 index 0000000..edcc14d --- /dev/null +++ b/node_modules/@redis/search/dist/commands/CURSOR_READ.js @@ -0,0 +1,20 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(index, cursor, options) { + const args = [ + 'FT.CURSOR', + 'READ', + index, + cursor.toString() + ]; + if (options?.COUNT) { + args.push('COUNT', options.COUNT.toString()); + } + return args; +} +exports.transformArguments = transformArguments; +var AGGREGATE_WITHCURSOR_1 = require("./AGGREGATE_WITHCURSOR"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return AGGREGATE_WITHCURSOR_1.transformReply; } }); diff --git a/node_modules/@redis/search/dist/commands/DICTADD.d.ts b/node_modules/@redis/search/dist/commands/DICTADD.d.ts new file mode 100644 index 0000000..c7f064d --- /dev/null +++ b/node_modules/@redis/search/dist/commands/DICTADD.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare function transformArguments(dictionary: string, term: string | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/search/dist/commands/DICTADD.js b/node_modules/@redis/search/dist/commands/DICTADD.js new file mode 100644 index 0000000..189a93f --- /dev/null +++ b/node_modules/@redis/search/dist/commands/DICTADD.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +function transformArguments(dictionary, term) { + return (0, generic_transformers_1.pushVerdictArguments)(['FT.DICTADD', dictionary], term); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/DICTDEL.d.ts b/node_modules/@redis/search/dist/commands/DICTDEL.d.ts new file mode 100644 index 0000000..c7f064d --- /dev/null +++ b/node_modules/@redis/search/dist/commands/DICTDEL.d.ts @@ -0,0 +1,3 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare function transformArguments(dictionary: string, term: string | Array): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/search/dist/commands/DICTDEL.js b/node_modules/@redis/search/dist/commands/DICTDEL.js new file mode 100644 index 0000000..f4fbb26 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/DICTDEL.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +function transformArguments(dictionary, term) { + return (0, generic_transformers_1.pushVerdictArguments)(['FT.DICTDEL', dictionary], term); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/DICTDUMP.d.ts b/node_modules/@redis/search/dist/commands/DICTDUMP.d.ts new file mode 100644 index 0000000..3bf7808 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/DICTDUMP.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(dictionary: string): Array; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/search/dist/commands/DICTDUMP.js b/node_modules/@redis/search/dist/commands/DICTDUMP.js new file mode 100644 index 0000000..e736b16 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/DICTDUMP.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(dictionary) { + return ['FT.DICTDUMP', dictionary]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/DROPINDEX.d.ts b/node_modules/@redis/search/dist/commands/DROPINDEX.d.ts new file mode 100644 index 0000000..e8ed7f3 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/DROPINDEX.d.ts @@ -0,0 +1,6 @@ +interface DropIndexOptions { + DD?: true; +} +export declare function transformArguments(index: string, options?: DropIndexOptions): Array; +export declare function transformReply(): 'OK'; +export {}; diff --git a/node_modules/@redis/search/dist/commands/DROPINDEX.js b/node_modules/@redis/search/dist/commands/DROPINDEX.js new file mode 100644 index 0000000..f9fb4de --- /dev/null +++ b/node_modules/@redis/search/dist/commands/DROPINDEX.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(index, options) { + const args = ['FT.DROPINDEX', index]; + if (options?.DD) { + args.push('DD'); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/EXPLAIN.d.ts b/node_modules/@redis/search/dist/commands/EXPLAIN.d.ts new file mode 100644 index 0000000..8bb7a3f --- /dev/null +++ b/node_modules/@redis/search/dist/commands/EXPLAIN.d.ts @@ -0,0 +1,9 @@ +import { Params } from "."; +export declare const IS_READ_ONLY = true; +interface ExplainOptions { + PARAMS?: Params; + DIALECT?: number; +} +export declare function transformArguments(index: string, query: string, options?: ExplainOptions): Array; +export declare function transformReply(): string; +export {}; diff --git a/node_modules/@redis/search/dist/commands/EXPLAIN.js b/node_modules/@redis/search/dist/commands/EXPLAIN.js new file mode 100644 index 0000000..a05b77e --- /dev/null +++ b/node_modules/@redis/search/dist/commands/EXPLAIN.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +const _1 = require("."); +exports.IS_READ_ONLY = true; +function transformArguments(index, query, options) { + const args = ['FT.EXPLAIN', index, query]; + (0, _1.pushParamsArgs)(args, options?.PARAMS); + if (options?.DIALECT) { + args.push('DIALECT', options.DIALECT.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/EXPLAINCLI.d.ts b/node_modules/@redis/search/dist/commands/EXPLAINCLI.d.ts new file mode 100644 index 0000000..1eaddc9 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/EXPLAINCLI.d.ts @@ -0,0 +1,3 @@ +export declare const IS_READ_ONLY = true; +export declare function transformArguments(index: string, query: string): Array; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/search/dist/commands/EXPLAINCLI.js b/node_modules/@redis/search/dist/commands/EXPLAINCLI.js new file mode 100644 index 0000000..f802560 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/EXPLAINCLI.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments(index, query) { + return ['FT.EXPLAINCLI', index, query]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/INFO.d.ts b/node_modules/@redis/search/dist/commands/INFO.d.ts new file mode 100644 index 0000000..7d5e8ef --- /dev/null +++ b/node_modules/@redis/search/dist/commands/INFO.d.ts @@ -0,0 +1,120 @@ +import { RedisCommandArgument } from '@redis/client/dist/lib/commands'; +export declare function transformArguments(index: string): Array; +type InfoRawReply = [ + 'index_name', + RedisCommandArgument, + 'index_options', + Array, + 'index_definition', + Array, + 'attributes', + Array>, + 'num_docs', + RedisCommandArgument, + 'max_doc_id', + RedisCommandArgument, + 'num_terms', + RedisCommandArgument, + 'num_records', + RedisCommandArgument, + 'inverted_sz_mb', + RedisCommandArgument, + 'vector_index_sz_mb', + RedisCommandArgument, + 'total_inverted_index_blocks', + RedisCommandArgument, + 'offset_vectors_sz_mb', + RedisCommandArgument, + 'doc_table_size_mb', + RedisCommandArgument, + 'sortable_values_size_mb', + RedisCommandArgument, + 'key_table_size_mb', + RedisCommandArgument, + 'records_per_doc_avg', + RedisCommandArgument, + 'bytes_per_record_avg', + RedisCommandArgument, + 'offsets_per_term_avg', + RedisCommandArgument, + 'offset_bits_per_record_avg', + RedisCommandArgument, + 'hash_indexing_failures', + RedisCommandArgument, + 'indexing', + RedisCommandArgument, + 'percent_indexed', + RedisCommandArgument, + 'gc_stats', + [ + 'bytes_collected', + RedisCommandArgument, + 'total_ms_run', + RedisCommandArgument, + 'total_cycles', + RedisCommandArgument, + 'average_cycle_time_ms', + RedisCommandArgument, + 'last_run_time_ms', + RedisCommandArgument, + 'gc_numeric_trees_missed', + RedisCommandArgument, + 'gc_blocks_denied', + RedisCommandArgument + ], + 'cursor_stats', + [ + 'global_idle', + number, + 'global_total', + number, + 'index_capacity', + number, + 'index_total', + number + ], + 'stopwords_list'?, + Array? +]; +interface InfoReply { + indexName: RedisCommandArgument; + indexOptions: Array; + indexDefinition: Record; + attributes: Array>; + numDocs: RedisCommandArgument; + maxDocId: RedisCommandArgument; + numTerms: RedisCommandArgument; + numRecords: RedisCommandArgument; + invertedSzMb: RedisCommandArgument; + vectorIndexSzMb: RedisCommandArgument; + totalInvertedIndexBlocks: RedisCommandArgument; + offsetVectorsSzMb: RedisCommandArgument; + docTableSizeMb: RedisCommandArgument; + sortableValuesSizeMb: RedisCommandArgument; + keyTableSizeMb: RedisCommandArgument; + recordsPerDocAvg: RedisCommandArgument; + bytesPerRecordAvg: RedisCommandArgument; + offsetsPerTermAvg: RedisCommandArgument; + offsetBitsPerRecordAvg: RedisCommandArgument; + hashIndexingFailures: RedisCommandArgument; + indexing: RedisCommandArgument; + percentIndexed: RedisCommandArgument; + gcStats: { + bytesCollected: RedisCommandArgument; + totalMsRun: RedisCommandArgument; + totalCycles: RedisCommandArgument; + averageCycleTimeMs: RedisCommandArgument; + lastRunTimeMs: RedisCommandArgument; + gcNumericTreesMissed: RedisCommandArgument; + gcBlocksDenied: RedisCommandArgument; + }; + cursorStats: { + globalIdle: number; + globalTotal: number; + indexCapacity: number; + idnexTotal: number; + }; + stopWords: Array | undefined; +} +export declare function transformReply(rawReply: InfoRawReply): InfoReply; +export {}; diff --git a/node_modules/@redis/search/dist/commands/INFO.js b/node_modules/@redis/search/dist/commands/INFO.js new file mode 100644 index 0000000..c09c9e3 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/INFO.js @@ -0,0 +1,51 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +function transformArguments(index) { + return ['FT.INFO', index]; +} +exports.transformArguments = transformArguments; +function transformReply(rawReply) { + return { + indexName: rawReply[1], + indexOptions: rawReply[3], + indexDefinition: (0, generic_transformers_1.transformTuplesReply)(rawReply[5]), + attributes: rawReply[7].map(attribute => (0, generic_transformers_1.transformTuplesReply)(attribute)), + numDocs: rawReply[9], + maxDocId: rawReply[11], + numTerms: rawReply[13], + numRecords: rawReply[15], + invertedSzMb: rawReply[17], + vectorIndexSzMb: rawReply[19], + totalInvertedIndexBlocks: rawReply[21], + offsetVectorsSzMb: rawReply[23], + docTableSizeMb: rawReply[25], + sortableValuesSizeMb: rawReply[27], + keyTableSizeMb: rawReply[29], + recordsPerDocAvg: rawReply[31], + bytesPerRecordAvg: rawReply[33], + offsetsPerTermAvg: rawReply[35], + offsetBitsPerRecordAvg: rawReply[37], + hashIndexingFailures: rawReply[39], + indexing: rawReply[41], + percentIndexed: rawReply[43], + gcStats: { + bytesCollected: rawReply[45][1], + totalMsRun: rawReply[45][3], + totalCycles: rawReply[45][5], + averageCycleTimeMs: rawReply[45][7], + lastRunTimeMs: rawReply[45][9], + gcNumericTreesMissed: rawReply[45][11], + gcBlocksDenied: rawReply[45][13] + }, + cursorStats: { + globalIdle: rawReply[47][1], + globalTotal: rawReply[47][3], + indexCapacity: rawReply[47][5], + idnexTotal: rawReply[47][7] + }, + stopWords: rawReply[49] + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/search/dist/commands/PROFILE_AGGREGATE.d.ts b/node_modules/@redis/search/dist/commands/PROFILE_AGGREGATE.d.ts new file mode 100644 index 0000000..f44d2aa --- /dev/null +++ b/node_modules/@redis/search/dist/commands/PROFILE_AGGREGATE.d.ts @@ -0,0 +1,7 @@ +import { AggregateOptions, AggregateRawReply } from './AGGREGATE'; +import { ProfileOptions, ProfileRawReply, ProfileReply } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(index: string, query: string, options?: ProfileOptions & AggregateOptions): Array; +type ProfileAggeregateRawReply = ProfileRawReply; +export declare function transformReply(reply: ProfileAggeregateRawReply): ProfileReply; +export {}; diff --git a/node_modules/@redis/search/dist/commands/PROFILE_AGGREGATE.js b/node_modules/@redis/search/dist/commands/PROFILE_AGGREGATE.js new file mode 100644 index 0000000..0e16492 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/PROFILE_AGGREGATE.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const AGGREGATE_1 = require("./AGGREGATE"); +const _1 = require("."); +exports.IS_READ_ONLY = true; +function transformArguments(index, query, options) { + const args = ['FT.PROFILE', index, 'AGGREGATE']; + if (options?.LIMITED) { + args.push('LIMITED'); + } + args.push('QUERY', query); + (0, AGGREGATE_1.pushAggregatehOptions)(args, options); + return args; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + results: (0, AGGREGATE_1.transformReply)(reply[0]), + profile: (0, _1.transformProfile)(reply[1]) + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/search/dist/commands/PROFILE_SEARCH.d.ts b/node_modules/@redis/search/dist/commands/PROFILE_SEARCH.d.ts new file mode 100644 index 0000000..bc160e3 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/PROFILE_SEARCH.d.ts @@ -0,0 +1,8 @@ +import { SearchOptions, SearchRawReply } from './SEARCH'; +import { ProfileOptions, ProfileRawReply, ProfileReply } from '.'; +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(index: string, query: string, options?: ProfileOptions & SearchOptions): RedisCommandArguments; +type ProfileSearchRawReply = ProfileRawReply; +export declare function transformReply(reply: ProfileSearchRawReply, withoutDocuments: boolean): ProfileReply; +export {}; diff --git a/node_modules/@redis/search/dist/commands/PROFILE_SEARCH.js b/node_modules/@redis/search/dist/commands/PROFILE_SEARCH.js new file mode 100644 index 0000000..126d14c --- /dev/null +++ b/node_modules/@redis/search/dist/commands/PROFILE_SEARCH.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const SEARCH_1 = require("./SEARCH"); +const _1 = require("."); +exports.IS_READ_ONLY = true; +function transformArguments(index, query, options) { + let args = ['FT.PROFILE', index, 'SEARCH']; + if (options?.LIMITED) { + args.push('LIMITED'); + } + args.push('QUERY', query); + return (0, _1.pushSearchOptions)(args, options); +} +exports.transformArguments = transformArguments; +function transformReply(reply, withoutDocuments) { + return { + results: (0, SEARCH_1.transformReply)(reply[0], withoutDocuments), + profile: (0, _1.transformProfile)(reply[1]) + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/search/dist/commands/SEARCH.d.ts b/node_modules/@redis/search/dist/commands/SEARCH.d.ts new file mode 100644 index 0000000..495695a --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SEARCH.d.ts @@ -0,0 +1,41 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { RedisSearchLanguages, Params, PropertyName, SortByProperty, SearchReply } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export interface SearchOptions { + VERBATIM?: true; + NOSTOPWORDS?: true; + WITHSORTKEYS?: true; + INKEYS?: string | Array; + INFIELDS?: string | Array; + RETURN?: string | Array; + SUMMARIZE?: true | { + FIELDS?: PropertyName | Array; + FRAGS?: number; + LEN?: number; + SEPARATOR?: string; + }; + HIGHLIGHT?: true | { + FIELDS?: PropertyName | Array; + TAGS?: { + open: string; + close: string; + }; + }; + SLOP?: number; + INORDER?: true; + LANGUAGE?: RedisSearchLanguages; + EXPANDER?: string; + SCORER?: string; + SORTBY?: SortByProperty; + LIMIT?: { + from: number | string; + size: number | string; + }; + PARAMS?: Params; + DIALECT?: number; + TIMEOUT?: number; +} +export declare function transformArguments(index: string, query: string, options?: SearchOptions): RedisCommandArguments; +export type SearchRawReply = Array; +export declare function transformReply(reply: SearchRawReply, withoutDocuments: boolean): SearchReply; diff --git a/node_modules/@redis/search/dist/commands/SEARCH.js b/node_modules/@redis/search/dist/commands/SEARCH.js new file mode 100644 index 0000000..a8ebf43 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SEARCH.js @@ -0,0 +1,43 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(index, query, options) { + return (0, _1.pushSearchOptions)(['FT.SEARCH', index, query], options); +} +exports.transformArguments = transformArguments; +function transformReply(reply, withoutDocuments) { + const documents = []; + let i = 1; + while (i < reply.length) { + documents.push({ + id: reply[i++], + value: withoutDocuments ? Object.create(null) : documentValue(reply[i++]) + }); + } + return { + total: reply[0], + documents + }; +} +exports.transformReply = transformReply; +function documentValue(tuples) { + const message = Object.create(null); + let i = 0; + while (i < tuples.length) { + const key = tuples[i++], value = tuples[i++]; + if (key === '$') { // might be a JSON reply + try { + Object.assign(message, JSON.parse(value)); + continue; + } + catch { + // set as a regular property if not a valid JSON + } + } + message[key] = value; + } + return message; +} diff --git a/node_modules/@redis/search/dist/commands/SEARCH_NOCONTENT.d.ts b/node_modules/@redis/search/dist/commands/SEARCH_NOCONTENT.d.ts new file mode 100644 index 0000000..4018a80 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SEARCH_NOCONTENT.d.ts @@ -0,0 +1,10 @@ +import { RedisCommandArguments } from "@redis/client/dist/lib/commands"; +import { SearchOptions, SearchRawReply } from "./SEARCH"; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(index: string, query: string, options?: SearchOptions): RedisCommandArguments; +export interface SearchNoContentReply { + total: number; + documents: Array; +} +export declare function transformReply(reply: SearchRawReply): SearchNoContentReply; diff --git a/node_modules/@redis/search/dist/commands/SEARCH_NOCONTENT.js b/node_modules/@redis/search/dist/commands/SEARCH_NOCONTENT.js new file mode 100644 index 0000000..5d94cdd --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SEARCH_NOCONTENT.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(index, query, options) { + return (0, _1.pushSearchOptions)(['FT.SEARCH', index, query, 'NOCONTENT'], options); +} +exports.transformArguments = transformArguments; +; +function transformReply(reply) { + return { + total: reply[0], + documents: reply.slice(1) + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/search/dist/commands/SPELLCHECK.d.ts b/node_modules/@redis/search/dist/commands/SPELLCHECK.d.ts new file mode 100644 index 0000000..f5bf946 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SPELLCHECK.d.ts @@ -0,0 +1,24 @@ +interface SpellCheckTerms { + mode: 'INCLUDE' | 'EXCLUDE'; + dictionary: string; +} +interface SpellCheckOptions { + DISTANCE?: number; + TERMS?: SpellCheckTerms | Array; + DIALECT?: number; +} +export declare function transformArguments(index: string, query: string, options?: SpellCheckOptions): Array; +type SpellCheckRawReply = Array<[ + _: string, + term: string, + suggestions: Array<[score: string, suggestion: string]> +]>; +type SpellCheckReply = Array<{ + term: string; + suggestions: Array<{ + score: number; + suggestion: string; + }>; +}>; +export declare function transformReply(rawReply: SpellCheckRawReply): SpellCheckReply; +export {}; diff --git a/node_modules/@redis/search/dist/commands/SPELLCHECK.js b/node_modules/@redis/search/dist/commands/SPELLCHECK.js new file mode 100644 index 0000000..96f71b8 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SPELLCHECK.js @@ -0,0 +1,37 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments(index, query, options) { + const args = ['FT.SPELLCHECK', index, query]; + if (options?.DISTANCE) { + args.push('DISTANCE', options.DISTANCE.toString()); + } + if (options?.TERMS) { + if (Array.isArray(options.TERMS)) { + for (const term of options.TERMS) { + pushTerms(args, term); + } + } + else { + pushTerms(args, options.TERMS); + } + } + if (options?.DIALECT) { + args.push('DIALECT', options.DIALECT.toString()); + } + return args; +} +exports.transformArguments = transformArguments; +function pushTerms(args, { mode, dictionary }) { + args.push('TERMS', mode, dictionary); +} +function transformReply(rawReply) { + return rawReply.map(([, term, suggestions]) => ({ + term, + suggestions: suggestions.map(([score, suggestion]) => ({ + score: Number(score), + suggestion + })) + })); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/search/dist/commands/SUGADD.d.ts b/node_modules/@redis/search/dist/commands/SUGADD.d.ts new file mode 100644 index 0000000..1bbae32 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGADD.d.ts @@ -0,0 +1,7 @@ +interface SugAddOptions { + INCR?: true; + PAYLOAD?: string; +} +export declare function transformArguments(key: string, string: string, score: number, options?: SugAddOptions): Array; +export declare function transformReply(): number; +export {}; diff --git a/node_modules/@redis/search/dist/commands/SUGADD.js b/node_modules/@redis/search/dist/commands/SUGADD.js new file mode 100644 index 0000000..d9bb8ed --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGADD.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(key, string, score, options) { + const args = ['FT.SUGADD', key, string, score.toString()]; + if (options?.INCR) { + args.push('INCR'); + } + if (options?.PAYLOAD) { + args.push('PAYLOAD', options.PAYLOAD); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/SUGDEL.d.ts b/node_modules/@redis/search/dist/commands/SUGDEL.d.ts new file mode 100644 index 0000000..8476ac2 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGDEL.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(key: string, string: string): Array; +export { transformBooleanReply as transformReply } from '@redis/client/dist/lib/commands/generic-transformers'; diff --git a/node_modules/@redis/search/dist/commands/SUGDEL.js b/node_modules/@redis/search/dist/commands/SUGDEL.js new file mode 100644 index 0000000..d23e9a1 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGDEL.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = void 0; +function transformArguments(key, string) { + return ['FT.SUGDEL', key, string]; +} +exports.transformArguments = transformArguments; +var generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return generic_transformers_1.transformBooleanReply; } }); diff --git a/node_modules/@redis/search/dist/commands/SUGGET.d.ts b/node_modules/@redis/search/dist/commands/SUGGET.d.ts new file mode 100644 index 0000000..079cfc7 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGGET.d.ts @@ -0,0 +1,7 @@ +export declare const IS_READ_ONLY = true; +export interface SugGetOptions { + FUZZY?: true; + MAX?: number; +} +export declare function transformArguments(key: string, prefix: string, options?: SugGetOptions): Array; +export declare function transformReply(): null | Array; diff --git a/node_modules/@redis/search/dist/commands/SUGGET.js b/node_modules/@redis/search/dist/commands/SUGGET.js new file mode 100644 index 0000000..2190e4b --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGGET.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments(key, prefix, options) { + const args = ['FT.SUGGET', key, prefix]; + if (options?.FUZZY) { + args.push('FUZZY'); + } + if (options?.MAX) { + args.push('MAX', options.MAX.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/SUGGET_WITHPAYLOADS.d.ts b/node_modules/@redis/search/dist/commands/SUGGET_WITHPAYLOADS.d.ts new file mode 100644 index 0000000..a8ff955 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGGET_WITHPAYLOADS.d.ts @@ -0,0 +1,8 @@ +import { SugGetOptions } from './SUGGET'; +export { IS_READ_ONLY } from './SUGGET'; +export declare function transformArguments(key: string, prefix: string, options?: SugGetOptions): Array; +export interface SuggestionWithPayload { + suggestion: string; + payload: string | null; +} +export declare function transformReply(rawReply: Array | null): Array | null; diff --git a/node_modules/@redis/search/dist/commands/SUGGET_WITHPAYLOADS.js b/node_modules/@redis/search/dist/commands/SUGGET_WITHPAYLOADS.js new file mode 100644 index 0000000..c091b30 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGGET_WITHPAYLOADS.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const SUGGET_1 = require("./SUGGET"); +var SUGGET_2 = require("./SUGGET"); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return SUGGET_2.IS_READ_ONLY; } }); +function transformArguments(key, prefix, options) { + return [ + ...(0, SUGGET_1.transformArguments)(key, prefix, options), + 'WITHPAYLOADS' + ]; +} +exports.transformArguments = transformArguments; +function transformReply(rawReply) { + if (rawReply === null) + return null; + const transformedReply = []; + for (let i = 0; i < rawReply.length; i += 2) { + transformedReply.push({ + suggestion: rawReply[i], + payload: rawReply[i + 1] + }); + } + return transformedReply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES.d.ts b/node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES.d.ts new file mode 100644 index 0000000..2d42b68 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES.d.ts @@ -0,0 +1,8 @@ +import { SugGetOptions } from './SUGGET'; +export { IS_READ_ONLY } from './SUGGET'; +export declare function transformArguments(key: string, prefix: string, options?: SugGetOptions): Array; +export interface SuggestionWithScores { + suggestion: string; + score: number; +} +export declare function transformReply(rawReply: Array | null): Array | null; diff --git a/node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES.js b/node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES.js new file mode 100644 index 0000000..5147cb1 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const SUGGET_1 = require("./SUGGET"); +var SUGGET_2 = require("./SUGGET"); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return SUGGET_2.IS_READ_ONLY; } }); +function transformArguments(key, prefix, options) { + return [ + ...(0, SUGGET_1.transformArguments)(key, prefix, options), + 'WITHSCORES' + ]; +} +exports.transformArguments = transformArguments; +function transformReply(rawReply) { + if (rawReply === null) + return null; + const transformedReply = []; + for (let i = 0; i < rawReply.length; i += 2) { + transformedReply.push({ + suggestion: rawReply[i], + score: Number(rawReply[i + 1]) + }); + } + return transformedReply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES_WITHPAYLOADS.d.ts b/node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES_WITHPAYLOADS.d.ts new file mode 100644 index 0000000..0c53891 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES_WITHPAYLOADS.d.ts @@ -0,0 +1,7 @@ +import { SugGetOptions } from './SUGGET'; +import { SuggestionWithPayload } from './SUGGET_WITHPAYLOADS'; +import { SuggestionWithScores } from './SUGGET_WITHSCORES'; +export { IS_READ_ONLY } from './SUGGET'; +export declare function transformArguments(key: string, prefix: string, options?: SugGetOptions): Array; +type SuggestionWithScoresAndPayloads = SuggestionWithScores & SuggestionWithPayload; +export declare function transformReply(rawReply: Array | null): Array | null; diff --git a/node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES_WITHPAYLOADS.js b/node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES_WITHPAYLOADS.js new file mode 100644 index 0000000..fa5d83a --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGGET_WITHSCORES_WITHPAYLOADS.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const SUGGET_1 = require("./SUGGET"); +var SUGGET_2 = require("./SUGGET"); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return SUGGET_2.IS_READ_ONLY; } }); +function transformArguments(key, prefix, options) { + return [ + ...(0, SUGGET_1.transformArguments)(key, prefix, options), + 'WITHSCORES', + 'WITHPAYLOADS' + ]; +} +exports.transformArguments = transformArguments; +function transformReply(rawReply) { + if (rawReply === null) + return null; + const transformedReply = []; + for (let i = 0; i < rawReply.length; i += 3) { + transformedReply.push({ + suggestion: rawReply[i], + score: Number(rawReply[i + 1]), + payload: rawReply[i + 2] + }); + } + return transformedReply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/search/dist/commands/SUGLEN.d.ts b/node_modules/@redis/search/dist/commands/SUGLEN.d.ts new file mode 100644 index 0000000..b0c7168 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGLEN.d.ts @@ -0,0 +1,3 @@ +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string): Array; +export declare function transformReply(): number; diff --git a/node_modules/@redis/search/dist/commands/SUGLEN.js b/node_modules/@redis/search/dist/commands/SUGLEN.js new file mode 100644 index 0000000..3681839 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SUGLEN.js @@ -0,0 +1,8 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['FT.SUGLEN', key]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/SYNDUMP.d.ts b/node_modules/@redis/search/dist/commands/SYNDUMP.d.ts new file mode 100644 index 0000000..3bade6c --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SYNDUMP.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(index: string): Array; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/search/dist/commands/SYNDUMP.js b/node_modules/@redis/search/dist/commands/SYNDUMP.js new file mode 100644 index 0000000..b7c153e --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SYNDUMP.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(index) { + return ['FT.SYNDUMP', index]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/SYNUPDATE.d.ts b/node_modules/@redis/search/dist/commands/SYNUPDATE.d.ts new file mode 100644 index 0000000..2a1c21f --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SYNUPDATE.d.ts @@ -0,0 +1,7 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +interface SynUpdateOptions { + SKIPINITIALSCAN?: true; +} +export declare function transformArguments(index: string, groupId: string, terms: string | Array, options?: SynUpdateOptions): RedisCommandArguments; +export declare function transformReply(): 'OK'; +export {}; diff --git a/node_modules/@redis/search/dist/commands/SYNUPDATE.js b/node_modules/@redis/search/dist/commands/SYNUPDATE.js new file mode 100644 index 0000000..b6498cc --- /dev/null +++ b/node_modules/@redis/search/dist/commands/SYNUPDATE.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +function transformArguments(index, groupId, terms, options) { + const args = ['FT.SYNUPDATE', index, groupId]; + if (options?.SKIPINITIALSCAN) { + args.push('SKIPINITIALSCAN'); + } + return (0, generic_transformers_1.pushVerdictArguments)(args, terms); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/TAGVALS.d.ts b/node_modules/@redis/search/dist/commands/TAGVALS.d.ts new file mode 100644 index 0000000..dff3e30 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/TAGVALS.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(index: string, fieldName: string): Array; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/search/dist/commands/TAGVALS.js b/node_modules/@redis/search/dist/commands/TAGVALS.js new file mode 100644 index 0000000..84fc416 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/TAGVALS.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments(index, fieldName) { + return ['FT.TAGVALS', index, fieldName]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/_LIST.d.ts b/node_modules/@redis/search/dist/commands/_LIST.d.ts new file mode 100644 index 0000000..db92074 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/_LIST.d.ts @@ -0,0 +1,2 @@ +export declare function transformArguments(): Array; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/search/dist/commands/_LIST.js b/node_modules/@redis/search/dist/commands/_LIST.js new file mode 100644 index 0000000..c255557 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/_LIST.js @@ -0,0 +1,7 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = void 0; +function transformArguments() { + return ['FT._LIST']; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/search/dist/commands/index.d.ts b/node_modules/@redis/search/dist/commands/index.d.ts new file mode 100644 index 0000000..ce41a97 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/index.d.ts @@ -0,0 +1,267 @@ +import * as _LIST from './_LIST'; +import * as ALTER from './ALTER'; +import * as AGGREGATE_WITHCURSOR from './AGGREGATE_WITHCURSOR'; +import * as AGGREGATE from './AGGREGATE'; +import * as ALIASADD from './ALIASADD'; +import * as ALIASDEL from './ALIASDEL'; +import * as ALIASUPDATE from './ALIASUPDATE'; +import * as CONFIG_GET from './CONFIG_GET'; +import * as CONFIG_SET from './CONFIG_SET'; +import * as CREATE from './CREATE'; +import * as CURSOR_DEL from './CURSOR_DEL'; +import * as CURSOR_READ from './CURSOR_READ'; +import * as DICTADD from './DICTADD'; +import * as DICTDEL from './DICTDEL'; +import * as DICTDUMP from './DICTDUMP'; +import * as DROPINDEX from './DROPINDEX'; +import * as EXPLAIN from './EXPLAIN'; +import * as EXPLAINCLI from './EXPLAINCLI'; +import * as INFO from './INFO'; +import * as PROFILESEARCH from './PROFILE_SEARCH'; +import * as PROFILEAGGREGATE from './PROFILE_AGGREGATE'; +import * as SEARCH from './SEARCH'; +import * as SEARCH_NOCONTENT from './SEARCH_NOCONTENT'; +import * as SPELLCHECK from './SPELLCHECK'; +import * as SUGADD from './SUGADD'; +import * as SUGDEL from './SUGDEL'; +import * as SUGGET_WITHPAYLOADS from './SUGGET_WITHPAYLOADS'; +import * as SUGGET_WITHSCORES_WITHPAYLOADS from './SUGGET_WITHSCORES_WITHPAYLOADS'; +import * as SUGGET_WITHSCORES from './SUGGET_WITHSCORES'; +import * as SUGGET from './SUGGET'; +import * as SUGLEN from './SUGLEN'; +import * as SYNDUMP from './SYNDUMP'; +import * as SYNUPDATE from './SYNUPDATE'; +import * as TAGVALS from './TAGVALS'; +import { RedisCommandArgument, RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { SearchOptions } from './SEARCH'; +declare const _default: { + _LIST: typeof _LIST; + _list: typeof _LIST; + ALTER: typeof ALTER; + alter: typeof ALTER; + AGGREGATE_WITHCURSOR: typeof AGGREGATE_WITHCURSOR; + aggregateWithCursor: typeof AGGREGATE_WITHCURSOR; + AGGREGATE: typeof AGGREGATE; + aggregate: typeof AGGREGATE; + ALIASADD: typeof ALIASADD; + aliasAdd: typeof ALIASADD; + ALIASDEL: typeof ALIASDEL; + aliasDel: typeof ALIASDEL; + ALIASUPDATE: typeof ALIASUPDATE; + aliasUpdate: typeof ALIASUPDATE; + CONFIG_GET: typeof CONFIG_GET; + configGet: typeof CONFIG_GET; + CONFIG_SET: typeof CONFIG_SET; + configSet: typeof CONFIG_SET; + CREATE: typeof CREATE; + create: typeof CREATE; + CURSOR_DEL: typeof CURSOR_DEL; + cursorDel: typeof CURSOR_DEL; + CURSOR_READ: typeof CURSOR_READ; + cursorRead: typeof CURSOR_READ; + DICTADD: typeof DICTADD; + dictAdd: typeof DICTADD; + DICTDEL: typeof DICTDEL; + dictDel: typeof DICTDEL; + DICTDUMP: typeof DICTDUMP; + dictDump: typeof DICTDUMP; + DROPINDEX: typeof DROPINDEX; + dropIndex: typeof DROPINDEX; + EXPLAIN: typeof EXPLAIN; + explain: typeof EXPLAIN; + EXPLAINCLI: typeof EXPLAINCLI; + explainCli: typeof EXPLAINCLI; + INFO: typeof INFO; + info: typeof INFO; + PROFILESEARCH: typeof PROFILESEARCH; + profileSearch: typeof PROFILESEARCH; + PROFILEAGGREGATE: typeof PROFILEAGGREGATE; + profileAggregate: typeof PROFILEAGGREGATE; + SEARCH: typeof SEARCH; + search: typeof SEARCH; + SEARCH_NOCONTENT: typeof SEARCH_NOCONTENT; + searchNoContent: typeof SEARCH_NOCONTENT; + SPELLCHECK: typeof SPELLCHECK; + spellCheck: typeof SPELLCHECK; + SUGADD: typeof SUGADD; + sugAdd: typeof SUGADD; + SUGDEL: typeof SUGDEL; + sugDel: typeof SUGDEL; + SUGGET_WITHPAYLOADS: typeof SUGGET_WITHPAYLOADS; + sugGetWithPayloads: typeof SUGGET_WITHPAYLOADS; + SUGGET_WITHSCORES_WITHPAYLOADS: typeof SUGGET_WITHSCORES_WITHPAYLOADS; + sugGetWithScoresWithPayloads: typeof SUGGET_WITHSCORES_WITHPAYLOADS; + SUGGET_WITHSCORES: typeof SUGGET_WITHSCORES; + sugGetWithScores: typeof SUGGET_WITHSCORES; + SUGGET: typeof SUGGET; + sugGet: typeof SUGGET; + SUGLEN: typeof SUGLEN; + sugLen: typeof SUGLEN; + SYNDUMP: typeof SYNDUMP; + synDump: typeof SYNDUMP; + SYNUPDATE: typeof SYNUPDATE; + synUpdate: typeof SYNUPDATE; + TAGVALS: typeof TAGVALS; + tagVals: typeof TAGVALS; +}; +export default _default; +export declare enum RedisSearchLanguages { + ARABIC = "Arabic", + BASQUE = "Basque", + CATALANA = "Catalan", + DANISH = "Danish", + DUTCH = "Dutch", + ENGLISH = "English", + FINNISH = "Finnish", + FRENCH = "French", + GERMAN = "German", + GREEK = "Greek", + HUNGARIAN = "Hungarian", + INDONESAIN = "Indonesian", + IRISH = "Irish", + ITALIAN = "Italian", + LITHUANIAN = "Lithuanian", + NEPALI = "Nepali", + NORWEIGAN = "Norwegian", + PORTUGUESE = "Portuguese", + ROMANIAN = "Romanian", + RUSSIAN = "Russian", + SPANISH = "Spanish", + SWEDISH = "Swedish", + TAMIL = "Tamil", + TURKISH = "Turkish", + CHINESE = "Chinese" +} +export type PropertyName = `${'@' | '$.'}${string}`; +export type SortByProperty = string | { + BY: string; + DIRECTION?: 'ASC' | 'DESC'; +}; +export declare function pushSortByProperty(args: RedisCommandArguments, sortBy: SortByProperty): void; +export declare function pushSortByArguments(args: RedisCommandArguments, name: string, sortBy: SortByProperty | Array): RedisCommandArguments; +export declare function pushArgumentsWithLength(args: RedisCommandArguments, fn: (args: RedisCommandArguments) => void): RedisCommandArguments; +export declare enum SchemaFieldTypes { + TEXT = "TEXT", + NUMERIC = "NUMERIC", + GEO = "GEO", + TAG = "TAG", + VECTOR = "VECTOR", + GEOSHAPE = "GEOSHAPE" +} +type CreateSchemaField> = T | ({ + type: T; + AS?: string; + INDEXMISSING?: boolean; +} & E); +type CommonFieldArguments = { + SORTABLE?: boolean | 'UNF'; + NOINDEX?: boolean; +}; +type CreateSchemaCommonField> = CreateSchemaField; +export declare enum SchemaTextFieldPhonetics { + DM_EN = "dm:en", + DM_FR = "dm:fr", + FM_PT = "dm:pt", + DM_ES = "dm:es" +} +type CreateSchemaTextField = CreateSchemaCommonField; +type CreateSchemaNumericField = CreateSchemaCommonField; +type CreateSchemaGeoField = CreateSchemaCommonField; +type CreateSchemaTagField = CreateSchemaCommonField; +export declare enum VectorAlgorithms { + FLAT = "FLAT", + HNSW = "HNSW" +} +type CreateSchemaVectorField> = CreateSchemaField; +type CreateSchemaFlatVectorField = CreateSchemaVectorField; +type CreateSchemaHNSWVectorField = CreateSchemaVectorField; +export declare const SCHEMA_GEO_SHAPE_COORD_SYSTEM: { + readonly SPHERICAL: "SPHERICAL"; + readonly FLAT: "FLAT"; +}; +export type SchemaGeoShapeFieldCoordSystem = typeof SCHEMA_GEO_SHAPE_COORD_SYSTEM[keyof typeof SCHEMA_GEO_SHAPE_COORD_SYSTEM]; +type CreateSchemaGeoShapeField = CreateSchemaCommonField; +export interface RediSearchSchema { + [field: string]: CreateSchemaTextField | CreateSchemaNumericField | CreateSchemaGeoField | CreateSchemaTagField | CreateSchemaFlatVectorField | CreateSchemaHNSWVectorField | CreateSchemaGeoShapeField; +} +export declare function pushSchema(args: RedisCommandArguments, schema: RediSearchSchema): void; +export type Params = Record; +export declare function pushParamsArgs(args: RedisCommandArguments, params?: Params): RedisCommandArguments; +export declare function pushSearchOptions(args: RedisCommandArguments, options?: SearchOptions): RedisCommandArguments; +interface SearchDocumentValue { + [key: string]: string | number | null | Array | SearchDocumentValue; +} +export interface SearchReply { + total: number; + documents: Array<{ + id: string; + value: SearchDocumentValue; + }>; +} +export interface ProfileOptions { + LIMITED?: true; +} +export type ProfileRawReply = [ + results: T, + profile: [ + _: string, + TotalProfileTime: string, + _: string, + ParsingTime: string, + _: string, + PipelineCreationTime: string, + _: string, + IteratorsProfile: Array + ] +]; +export interface ProfileReply { + results: SearchReply | AGGREGATE.AggregateReply; + profile: ProfileData; +} +interface ChildIterator { + type?: string; + counter?: number; + term?: string; + size?: number; + time?: string; + childIterators?: Array; +} +interface IteratorsProfile { + type?: string; + counter?: number; + queryType?: string; + time?: string; + childIterators?: Array; +} +interface ProfileData { + totalProfileTime: string; + parsingTime: string; + pipelineCreationTime: string; + iteratorsProfile: IteratorsProfile; +} +export declare function transformProfile(reply: Array): ProfileData; diff --git a/node_modules/@redis/search/dist/commands/index.js b/node_modules/@redis/search/dist/commands/index.js new file mode 100644 index 0000000..f2467e0 --- /dev/null +++ b/node_modules/@redis/search/dist/commands/index.js @@ -0,0 +1,458 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformProfile = exports.pushSearchOptions = exports.pushParamsArgs = exports.pushSchema = exports.SCHEMA_GEO_SHAPE_COORD_SYSTEM = exports.VectorAlgorithms = exports.SchemaTextFieldPhonetics = exports.SchemaFieldTypes = exports.pushArgumentsWithLength = exports.pushSortByArguments = exports.pushSortByProperty = exports.RedisSearchLanguages = void 0; +const _LIST = require("./_LIST"); +const ALTER = require("./ALTER"); +const AGGREGATE_WITHCURSOR = require("./AGGREGATE_WITHCURSOR"); +const AGGREGATE = require("./AGGREGATE"); +const ALIASADD = require("./ALIASADD"); +const ALIASDEL = require("./ALIASDEL"); +const ALIASUPDATE = require("./ALIASUPDATE"); +const CONFIG_GET = require("./CONFIG_GET"); +const CONFIG_SET = require("./CONFIG_SET"); +const CREATE = require("./CREATE"); +const CURSOR_DEL = require("./CURSOR_DEL"); +const CURSOR_READ = require("./CURSOR_READ"); +const DICTADD = require("./DICTADD"); +const DICTDEL = require("./DICTDEL"); +const DICTDUMP = require("./DICTDUMP"); +const DROPINDEX = require("./DROPINDEX"); +const EXPLAIN = require("./EXPLAIN"); +const EXPLAINCLI = require("./EXPLAINCLI"); +const INFO = require("./INFO"); +const PROFILESEARCH = require("./PROFILE_SEARCH"); +const PROFILEAGGREGATE = require("./PROFILE_AGGREGATE"); +const SEARCH = require("./SEARCH"); +const SEARCH_NOCONTENT = require("./SEARCH_NOCONTENT"); +const SPELLCHECK = require("./SPELLCHECK"); +const SUGADD = require("./SUGADD"); +const SUGDEL = require("./SUGDEL"); +const SUGGET_WITHPAYLOADS = require("./SUGGET_WITHPAYLOADS"); +const SUGGET_WITHSCORES_WITHPAYLOADS = require("./SUGGET_WITHSCORES_WITHPAYLOADS"); +const SUGGET_WITHSCORES = require("./SUGGET_WITHSCORES"); +const SUGGET = require("./SUGGET"); +const SUGLEN = require("./SUGLEN"); +const SYNDUMP = require("./SYNDUMP"); +const SYNUPDATE = require("./SYNUPDATE"); +const TAGVALS = require("./TAGVALS"); +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +exports.default = { + _LIST, + _list: _LIST, + ALTER, + alter: ALTER, + AGGREGATE_WITHCURSOR, + aggregateWithCursor: AGGREGATE_WITHCURSOR, + AGGREGATE, + aggregate: AGGREGATE, + ALIASADD, + aliasAdd: ALIASADD, + ALIASDEL, + aliasDel: ALIASDEL, + ALIASUPDATE, + aliasUpdate: ALIASUPDATE, + CONFIG_GET, + configGet: CONFIG_GET, + CONFIG_SET, + configSet: CONFIG_SET, + CREATE, + create: CREATE, + CURSOR_DEL, + cursorDel: CURSOR_DEL, + CURSOR_READ, + cursorRead: CURSOR_READ, + DICTADD, + dictAdd: DICTADD, + DICTDEL, + dictDel: DICTDEL, + DICTDUMP, + dictDump: DICTDUMP, + DROPINDEX, + dropIndex: DROPINDEX, + EXPLAIN, + explain: EXPLAIN, + EXPLAINCLI, + explainCli: EXPLAINCLI, + INFO, + info: INFO, + PROFILESEARCH, + profileSearch: PROFILESEARCH, + PROFILEAGGREGATE, + profileAggregate: PROFILEAGGREGATE, + SEARCH, + search: SEARCH, + SEARCH_NOCONTENT, + searchNoContent: SEARCH_NOCONTENT, + SPELLCHECK, + spellCheck: SPELLCHECK, + SUGADD, + sugAdd: SUGADD, + SUGDEL, + sugDel: SUGDEL, + SUGGET_WITHPAYLOADS, + sugGetWithPayloads: SUGGET_WITHPAYLOADS, + SUGGET_WITHSCORES_WITHPAYLOADS, + sugGetWithScoresWithPayloads: SUGGET_WITHSCORES_WITHPAYLOADS, + SUGGET_WITHSCORES, + sugGetWithScores: SUGGET_WITHSCORES, + SUGGET, + sugGet: SUGGET, + SUGLEN, + sugLen: SUGLEN, + SYNDUMP, + synDump: SYNDUMP, + SYNUPDATE, + synUpdate: SYNUPDATE, + TAGVALS, + tagVals: TAGVALS +}; +var RedisSearchLanguages; +(function (RedisSearchLanguages) { + RedisSearchLanguages["ARABIC"] = "Arabic"; + RedisSearchLanguages["BASQUE"] = "Basque"; + RedisSearchLanguages["CATALANA"] = "Catalan"; + RedisSearchLanguages["DANISH"] = "Danish"; + RedisSearchLanguages["DUTCH"] = "Dutch"; + RedisSearchLanguages["ENGLISH"] = "English"; + RedisSearchLanguages["FINNISH"] = "Finnish"; + RedisSearchLanguages["FRENCH"] = "French"; + RedisSearchLanguages["GERMAN"] = "German"; + RedisSearchLanguages["GREEK"] = "Greek"; + RedisSearchLanguages["HUNGARIAN"] = "Hungarian"; + RedisSearchLanguages["INDONESAIN"] = "Indonesian"; + RedisSearchLanguages["IRISH"] = "Irish"; + RedisSearchLanguages["ITALIAN"] = "Italian"; + RedisSearchLanguages["LITHUANIAN"] = "Lithuanian"; + RedisSearchLanguages["NEPALI"] = "Nepali"; + RedisSearchLanguages["NORWEIGAN"] = "Norwegian"; + RedisSearchLanguages["PORTUGUESE"] = "Portuguese"; + RedisSearchLanguages["ROMANIAN"] = "Romanian"; + RedisSearchLanguages["RUSSIAN"] = "Russian"; + RedisSearchLanguages["SPANISH"] = "Spanish"; + RedisSearchLanguages["SWEDISH"] = "Swedish"; + RedisSearchLanguages["TAMIL"] = "Tamil"; + RedisSearchLanguages["TURKISH"] = "Turkish"; + RedisSearchLanguages["CHINESE"] = "Chinese"; +})(RedisSearchLanguages || (exports.RedisSearchLanguages = RedisSearchLanguages = {})); +function pushSortByProperty(args, sortBy) { + if (typeof sortBy === 'string') { + args.push(sortBy); + } + else { + args.push(sortBy.BY); + if (sortBy.DIRECTION) { + args.push(sortBy.DIRECTION); + } + } +} +exports.pushSortByProperty = pushSortByProperty; +function pushSortByArguments(args, name, sortBy) { + const lengthBefore = args.push(name, '' // will be overwritten + ); + if (Array.isArray(sortBy)) { + for (const field of sortBy) { + pushSortByProperty(args, field); + } + } + else { + pushSortByProperty(args, sortBy); + } + args[lengthBefore - 1] = (args.length - lengthBefore).toString(); + return args; +} +exports.pushSortByArguments = pushSortByArguments; +function pushArgumentsWithLength(args, fn) { + const lengthIndex = args.push('') - 1; + fn(args); + args[lengthIndex] = (args.length - lengthIndex - 1).toString(); + return args; +} +exports.pushArgumentsWithLength = pushArgumentsWithLength; +var SchemaFieldTypes; +(function (SchemaFieldTypes) { + SchemaFieldTypes["TEXT"] = "TEXT"; + SchemaFieldTypes["NUMERIC"] = "NUMERIC"; + SchemaFieldTypes["GEO"] = "GEO"; + SchemaFieldTypes["TAG"] = "TAG"; + SchemaFieldTypes["VECTOR"] = "VECTOR"; + SchemaFieldTypes["GEOSHAPE"] = "GEOSHAPE"; +})(SchemaFieldTypes || (exports.SchemaFieldTypes = SchemaFieldTypes = {})); +function pushCommonFieldArguments(args, fieldOptions) { + if (fieldOptions.SORTABLE) { + args.push('SORTABLE'); + if (fieldOptions.SORTABLE === 'UNF') { + args.push('UNF'); + } + } + if (fieldOptions.NOINDEX) { + args.push('NOINDEX'); + } +} +var SchemaTextFieldPhonetics; +(function (SchemaTextFieldPhonetics) { + SchemaTextFieldPhonetics["DM_EN"] = "dm:en"; + SchemaTextFieldPhonetics["DM_FR"] = "dm:fr"; + SchemaTextFieldPhonetics["FM_PT"] = "dm:pt"; + SchemaTextFieldPhonetics["DM_ES"] = "dm:es"; +})(SchemaTextFieldPhonetics || (exports.SchemaTextFieldPhonetics = SchemaTextFieldPhonetics = {})); +var VectorAlgorithms; +(function (VectorAlgorithms) { + VectorAlgorithms["FLAT"] = "FLAT"; + VectorAlgorithms["HNSW"] = "HNSW"; +})(VectorAlgorithms || (exports.VectorAlgorithms = VectorAlgorithms = {})); +exports.SCHEMA_GEO_SHAPE_COORD_SYSTEM = { + SPHERICAL: 'SPHERICAL', + FLAT: 'FLAT' +}; +function pushSchema(args, schema) { + for (const [field, fieldOptions] of Object.entries(schema)) { + args.push(field); + if (typeof fieldOptions === 'string') { + args.push(fieldOptions); + continue; + } + if (fieldOptions.AS) { + args.push('AS', fieldOptions.AS); + } + args.push(fieldOptions.type); + switch (fieldOptions.type) { + case SchemaFieldTypes.TEXT: + if (fieldOptions.NOSTEM) { + args.push('NOSTEM'); + } + if (fieldOptions.WEIGHT) { + args.push('WEIGHT', fieldOptions.WEIGHT.toString()); + } + if (fieldOptions.PHONETIC) { + args.push('PHONETIC', fieldOptions.PHONETIC); + } + if (fieldOptions.WITHSUFFIXTRIE) { + args.push('WITHSUFFIXTRIE'); + } + pushCommonFieldArguments(args, fieldOptions); + if (fieldOptions.INDEXEMPTY) { + args.push('INDEXEMPTY'); + } + break; + case SchemaFieldTypes.NUMERIC: + case SchemaFieldTypes.GEO: + pushCommonFieldArguments(args, fieldOptions); + break; + case SchemaFieldTypes.TAG: + if (fieldOptions.SEPARATOR) { + args.push('SEPARATOR', fieldOptions.SEPARATOR); + } + if (fieldOptions.CASESENSITIVE) { + args.push('CASESENSITIVE'); + } + if (fieldOptions.WITHSUFFIXTRIE) { + args.push('WITHSUFFIXTRIE'); + } + pushCommonFieldArguments(args, fieldOptions); + if (fieldOptions.INDEXEMPTY) { + args.push('INDEXEMPTY'); + } + break; + case SchemaFieldTypes.VECTOR: + args.push(fieldOptions.ALGORITHM); + pushArgumentsWithLength(args, () => { + args.push('TYPE', fieldOptions.TYPE, 'DIM', fieldOptions.DIM.toString(), 'DISTANCE_METRIC', fieldOptions.DISTANCE_METRIC); + if (fieldOptions.INITIAL_CAP) { + args.push('INITIAL_CAP', fieldOptions.INITIAL_CAP.toString()); + } + switch (fieldOptions.ALGORITHM) { + case VectorAlgorithms.FLAT: + if (fieldOptions.BLOCK_SIZE) { + args.push('BLOCK_SIZE', fieldOptions.BLOCK_SIZE.toString()); + } + break; + case VectorAlgorithms.HNSW: + if (fieldOptions.M) { + args.push('M', fieldOptions.M.toString()); + } + if (fieldOptions.EF_CONSTRUCTION) { + args.push('EF_CONSTRUCTION', fieldOptions.EF_CONSTRUCTION.toString()); + } + if (fieldOptions.EF_RUNTIME) { + args.push('EF_RUNTIME', fieldOptions.EF_RUNTIME.toString()); + } + break; + } + }); + break; + case SchemaFieldTypes.GEOSHAPE: + if (fieldOptions.COORD_SYSTEM !== undefined) { + args.push('COORD_SYSTEM', fieldOptions.COORD_SYSTEM); + } + pushCommonFieldArguments(args, fieldOptions); + break; + } + if (fieldOptions.INDEXMISSING) { + args.push('INDEXMISSING'); + } + } +} +exports.pushSchema = pushSchema; +function pushParamsArgs(args, params) { + if (params) { + const enrties = Object.entries(params); + args.push('PARAMS', (enrties.length * 2).toString()); + for (const [key, value] of enrties) { + args.push(key, typeof value === 'number' ? value.toString() : value); + } + } + return args; +} +exports.pushParamsArgs = pushParamsArgs; +function pushSearchOptions(args, options) { + if (options?.VERBATIM) { + args.push('VERBATIM'); + } + if (options?.NOSTOPWORDS) { + args.push('NOSTOPWORDS'); + } + // if (options?.WITHSCORES) { + // args.push('WITHSCORES'); + // } + // if (options?.WITHPAYLOADS) { + // args.push('WITHPAYLOADS'); + // } + (0, generic_transformers_1.pushOptionalVerdictArgument)(args, 'INKEYS', options?.INKEYS); + (0, generic_transformers_1.pushOptionalVerdictArgument)(args, 'INFIELDS', options?.INFIELDS); + (0, generic_transformers_1.pushOptionalVerdictArgument)(args, 'RETURN', options?.RETURN); + if (options?.SUMMARIZE) { + args.push('SUMMARIZE'); + if (typeof options.SUMMARIZE === 'object') { + if (options.SUMMARIZE.FIELDS) { + args.push('FIELDS'); + (0, generic_transformers_1.pushVerdictArgument)(args, options.SUMMARIZE.FIELDS); + } + if (options.SUMMARIZE.FRAGS) { + args.push('FRAGS', options.SUMMARIZE.FRAGS.toString()); + } + if (options.SUMMARIZE.LEN) { + args.push('LEN', options.SUMMARIZE.LEN.toString()); + } + if (options.SUMMARIZE.SEPARATOR) { + args.push('SEPARATOR', options.SUMMARIZE.SEPARATOR); + } + } + } + if (options?.HIGHLIGHT) { + args.push('HIGHLIGHT'); + if (typeof options.HIGHLIGHT === 'object') { + if (options.HIGHLIGHT.FIELDS) { + args.push('FIELDS'); + (0, generic_transformers_1.pushVerdictArgument)(args, options.HIGHLIGHT.FIELDS); + } + if (options.HIGHLIGHT.TAGS) { + args.push('TAGS', options.HIGHLIGHT.TAGS.open, options.HIGHLIGHT.TAGS.close); + } + } + } + if (options?.SLOP) { + args.push('SLOP', options.SLOP.toString()); + } + if (options?.INORDER) { + args.push('INORDER'); + } + if (options?.LANGUAGE) { + args.push('LANGUAGE', options.LANGUAGE); + } + if (options?.EXPANDER) { + args.push('EXPANDER', options.EXPANDER); + } + if (options?.SCORER) { + args.push('SCORER', options.SCORER); + } + // if (options?.EXPLAINSCORE) { + // args.push('EXPLAINSCORE'); + // } + // if (options?.PAYLOAD) { + // args.push('PAYLOAD', options.PAYLOAD); + // } + if (options?.SORTBY) { + args.push('SORTBY'); + pushSortByProperty(args, options.SORTBY); + } + // if (options?.MSORTBY) { + // pushSortByArguments(args, 'MSORTBY', options.MSORTBY); + // } + if (options?.LIMIT) { + args.push('LIMIT', options.LIMIT.from.toString(), options.LIMIT.size.toString()); + } + if (options?.PARAMS) { + pushParamsArgs(args, options.PARAMS); + } + if (options?.DIALECT) { + args.push('DIALECT', options.DIALECT.toString()); + } + if (options?.RETURN?.length === 0) { + args.preserve = true; + } + if (options?.TIMEOUT !== undefined) { + args.push('TIMEOUT', options.TIMEOUT.toString()); + } + return args; +} +exports.pushSearchOptions = pushSearchOptions; +function transformProfile(reply) { + return { + totalProfileTime: reply[0][1], + parsingTime: reply[1][1], + pipelineCreationTime: reply[2][1], + iteratorsProfile: transformIterators(reply[3][1]) + }; +} +exports.transformProfile = transformProfile; +function transformIterators(IteratorsProfile) { + var res = {}; + for (let i = 0; i < IteratorsProfile.length; i += 2) { + const value = IteratorsProfile[i + 1]; + switch (IteratorsProfile[i]) { + case 'Type': + res.type = value; + break; + case 'Counter': + res.counter = value; + break; + case 'Time': + res.time = value; + break; + case 'Query type': + res.queryType = value; + break; + case 'Child iterators': + res.childIterators = value.map(transformChildIterators); + break; + } + } + return res; +} +function transformChildIterators(IteratorsProfile) { + var res = {}; + for (let i = 1; i < IteratorsProfile.length; i += 2) { + const value = IteratorsProfile[i + 1]; + switch (IteratorsProfile[i]) { + case 'Type': + res.type = value; + break; + case 'Counter': + res.counter = value; + break; + case 'Time': + res.time = value; + break; + case 'Size': + res.size = value; + break; + case 'Term': + res.term = value; + break; + case 'Child iterators': + res.childIterators = value.map(transformChildIterators); + break; + } + } + return res; +} diff --git a/node_modules/@redis/search/dist/index.d.ts b/node_modules/@redis/search/dist/index.d.ts new file mode 100644 index 0000000..745ad29 --- /dev/null +++ b/node_modules/@redis/search/dist/index.d.ts @@ -0,0 +1,4 @@ +export { default } from './commands'; +export { RediSearchSchema, RedisSearchLanguages, SchemaFieldTypes, SchemaTextFieldPhonetics, SearchReply, VectorAlgorithms } from './commands'; +export { AggregateGroupByReducers, AggregateSteps } from './commands/AGGREGATE'; +export { SearchOptions } from './commands/SEARCH'; diff --git a/node_modules/@redis/search/dist/index.js b/node_modules/@redis/search/dist/index.js new file mode 100644 index 0000000..36d7c1d --- /dev/null +++ b/node_modules/@redis/search/dist/index.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AggregateSteps = exports.AggregateGroupByReducers = exports.VectorAlgorithms = exports.SchemaTextFieldPhonetics = exports.SchemaFieldTypes = exports.RedisSearchLanguages = exports.default = void 0; +var commands_1 = require("./commands"); +Object.defineProperty(exports, "default", { enumerable: true, get: function () { return commands_1.default; } }); +var commands_2 = require("./commands"); +Object.defineProperty(exports, "RedisSearchLanguages", { enumerable: true, get: function () { return commands_2.RedisSearchLanguages; } }); +Object.defineProperty(exports, "SchemaFieldTypes", { enumerable: true, get: function () { return commands_2.SchemaFieldTypes; } }); +Object.defineProperty(exports, "SchemaTextFieldPhonetics", { enumerable: true, get: function () { return commands_2.SchemaTextFieldPhonetics; } }); +Object.defineProperty(exports, "VectorAlgorithms", { enumerable: true, get: function () { return commands_2.VectorAlgorithms; } }); +var AGGREGATE_1 = require("./commands/AGGREGATE"); +Object.defineProperty(exports, "AggregateGroupByReducers", { enumerable: true, get: function () { return AGGREGATE_1.AggregateGroupByReducers; } }); +Object.defineProperty(exports, "AggregateSteps", { enumerable: true, get: function () { return AGGREGATE_1.AggregateSteps; } }); diff --git a/node_modules/@redis/search/package.json b/node_modules/@redis/search/package.json new file mode 100644 index 0000000..aaf9bc5 --- /dev/null +++ b/node_modules/@redis/search/package.json @@ -0,0 +1,41 @@ +{ + "name": "@redis/search", + "version": "1.2.0", + "license": "MIT", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist/" + ], + "scripts": { + "test": "nyc -r text-summary -r lcov mocha -r source-map-support/register -r ts-node/register './lib/**/*.spec.ts'", + "build": "tsc", + "documentation": "typedoc" + }, + "peerDependencies": { + "@redis/client": "^1.0.0" + }, + "devDependencies": { + "@istanbuljs/nyc-config-typescript": "^1.0.2", + "@redis/test-utils": "*", + "@types/node": "^20.6.2", + "nyc": "^15.1.0", + "release-it": "^16.1.5", + "source-map-support": "^0.5.21", + "ts-node": "^10.9.1", + "typedoc": "^0.25.1", + "typescript": "^5.2.2" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis/tree/master/packages/search", + "keywords": [ + "redis", + "RediSearch" + ] +} diff --git a/node_modules/@redis/time-series/README.md b/node_modules/@redis/time-series/README.md new file mode 100644 index 0000000..5923979 --- /dev/null +++ b/node_modules/@redis/time-series/README.md @@ -0,0 +1,151 @@ +# @redis/time-series + +This package provides support for the [RedisTimeSeries](https://redistimeseries.io) module, which adds a time series data structure to Redis. It extends the [Node Redis client](https://github.com/redis/node-redis) to include functions for each of the RedisTimeSeries commands. + +To use these extra commands, your Redis server must have the RedisTimeSeries module installed. + +## Usage + +For a complete example, see [`time-series.js`](https://github.com/redis/node-redis/blob/master/examples/time-series.js) in the Node Redis examples folder. + +### Creating Time Series data structure in Redis + +The [`TS.CREATE`](https://oss.redis.com/redistimeseries/commands/#tscreate) command creates a new time series. + +Here, we'll create a new time series "`temperature`": + +```javascript + +import { createClient } from 'redis'; +import { TimeSeriesDuplicatePolicies, TimeSeriesEncoding, TimeSeriesAggregationType } from '@redis/time-series'; + +... + + const created = await client.ts.create('temperature', { + RETENTION: 86400000, // 1 day in milliseconds + ENCODING: TimeSeriesEncoding.UNCOMPRESSED, // No compression - When not specified, the option is set to COMPRESSED + DUPLICATE_POLICY: TimeSeriesDuplicatePolicies.BLOCK, // No duplicates - When not specified: set to the global DUPLICATE_POLICY configuration of the database (which by default, is BLOCK). + }); + + if (created === 'OK') { + console.log('Created timeseries.'); + } else { + console.log('Error creating timeseries :('); + process.exit(1); + } + +``` + +### Adding new value to a Time Series data structure in Redis + +With RedisTimeSeries, we can add a single value to time series data structure using the [`TS.ADD`](https://redis.io/commands/ts.add/) command and if we would like to add multiple values we can use the [`TS.MADD`](https://redis.io/commands/ts.madd/) command. + +```javascript + +let value = Math.floor(Math.random() * 1000) + 1; // Random data point value + let currentTimestamp = 1640995200000; // Jan 1 2022 00:00:00 + let num = 0; + + while (num < 10000) { + // Add a new value to the timeseries, providing our own timestamp: + // https://redis.io/commands/ts.add/ + await client.ts.add('temperature', currentTimestamp, value); + console.log(`Added timestamp ${currentTimestamp}, value ${value}.`); + + num += 1; + value = Math.floor(Math.random() * 1000) + 1; // Get another random value + currentTimestamp += 1000; // Move on one second. + } + + // Add multiple values to the timeseries in round trip to the server: + // https://redis.io/commands/ts.madd/ + const response = await client.ts.mAdd([{ + key: 'temperature', + timestamp: currentTimestamp + 60000, + value: Math.floor(Math.random() * 1000) + 1 + }, { + key: 'temperature', + timestamp: currentTimestamp + 120000, + value: Math.floor(Math.random() * 1000) + 1 + }]); + + +``` + +### Retrieving Time Series data from Redis + +With RedisTimeSeries, we can retrieve the time series data using the [`TS.RANGE`](https://redis.io/commands/ts.range/) command by passing the criteria as follows: + +```javascript + +// Query the timeseries with TS.RANGE: + // https://redis.io/commands/ts.range/ + const fromTimestamp = 1640995200000; // Jan 1 2022 00:00:00 + const toTimestamp = 1640995260000; // Jan 1 2022 00:01:00 + const rangeResponse = await client.ts.range('temperature', fromTimestamp, toTimestamp, { + // Group into 10 second averages. + AGGREGATION: { + type: TimeSeriesAggregationType.AVERAGE, + timeBucket: 10000 + } + }); + + console.log('RANGE RESPONSE:'); + // rangeResponse looks like: + // [ + // { timestamp: 1640995200000, value: 356.8 }, + // { timestamp: 1640995210000, value: 534.8 }, + // { timestamp: 1640995220000, value: 481.3 }, + // { timestamp: 1640995230000, value: 437 }, + // { timestamp: 1640995240000, value: 507.3 }, + // { timestamp: 1640995250000, value: 581.2 }, + // { timestamp: 1640995260000, value: 600 } + // ] + +``` + +### Altering Time Series data Stored in Redis + +RedisTimeSeries includes commands that can update values in a time series data structure. + +Using the [`TS.ALTER`](https://redis.io/commands/ts.alter/) command, we can update time series retention like this: + +```javascript + + // https://redis.io/commands/ts.alter/ + const alterResponse = await client.ts.alter('temperature', { + RETENTION: 0 // Keep the entries forever + }); + +``` + +### Retrieving Information about the timeseries Stored in Redis + +RedisTimeSeries also includes commands that can help to view the information on the state of a time series. + +Using the [`TS.INFO`](https://redis.io/commands/ts.info/) command, we can view timeseries information like this: + +```javascript + + // Get some information about the state of the timeseries. + // https://redis.io/commands/ts.info/ + const tsInfo = await client.ts.info('temperature'); + + // tsInfo looks like this: + // { + // totalSamples: 1440, + // memoryUsage: 28904, + // firstTimestamp: 1641508920000, + // lastTimestamp: 1641595320000, + // retentionTime: 86400000, + // chunkCount: 7, + // chunkSize: 4096, + // chunkType: 'uncompressed', + // duplicatePolicy: 'block', + // labels: [], + // sourceKey: null, + // rules: [] + // } + +``` + diff --git a/node_modules/@redis/time-series/dist/commands/ADD.d.ts b/node_modules/@redis/time-series/dist/commands/ADD.d.ts new file mode 100644 index 0000000..58b0489 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/ADD.d.ts @@ -0,0 +1,17 @@ +import { TimeSeriesEncoding, TimeSeriesDuplicatePolicies, Labels, Timestamp } from '.'; +export interface TsIgnoreOptions { + MAX_TIME_DIFF: number; + MAX_VAL_DIFF: number; +} +interface AddOptions { + RETENTION?: number; + ENCODING?: TimeSeriesEncoding; + CHUNK_SIZE?: number; + ON_DUPLICATE?: TimeSeriesDuplicatePolicies; + LABELS?: Labels; + IGNORE?: TsIgnoreOptions; +} +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, timestamp: Timestamp, value: number, options?: AddOptions): Array; +export declare function transformReply(): number; +export {}; diff --git a/node_modules/@redis/time-series/dist/commands/ADD.js b/node_modules/@redis/time-series/dist/commands/ADD.js new file mode 100644 index 0000000..a799b62 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/ADD.js @@ -0,0 +1,23 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, timestamp, value, options) { + const args = [ + 'TS.ADD', + key, + (0, _1.transformTimestampArgument)(timestamp), + value.toString() + ]; + (0, _1.pushRetentionArgument)(args, options?.RETENTION); + (0, _1.pushEncodingArgument)(args, options?.ENCODING); + (0, _1.pushChunkSizeArgument)(args, options?.CHUNK_SIZE); + if (options?.ON_DUPLICATE) { + args.push('ON_DUPLICATE', options.ON_DUPLICATE); + } + (0, _1.pushLabelsArgument)(args, options?.LABELS); + (0, _1.pushIgnoreArgument)(args, options?.IGNORE); + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/time-series/dist/commands/ALTER.d.ts b/node_modules/@redis/time-series/dist/commands/ALTER.d.ts new file mode 100644 index 0000000..8f30a2a --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/ALTER.d.ts @@ -0,0 +1,13 @@ +import { Labels, TimeSeriesDuplicatePolicies } from '.'; +import { TsIgnoreOptions } from './ADD'; +export declare const FIRST_KEY_INDEX = 1; +interface AlterOptions { + RETENTION?: number; + CHUNK_SIZE?: number; + DUPLICATE_POLICY?: TimeSeriesDuplicatePolicies; + LABELS?: Labels; + IGNORE?: TsIgnoreOptions; +} +export declare function transformArguments(key: string, options?: AlterOptions): Array; +export declare function transformReply(): 'OK'; +export {}; diff --git a/node_modules/@redis/time-series/dist/commands/ALTER.js b/node_modules/@redis/time-series/dist/commands/ALTER.js new file mode 100644 index 0000000..3041885 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/ALTER.js @@ -0,0 +1,15 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, options) { + const args = ['TS.ALTER', key]; + (0, _1.pushRetentionArgument)(args, options?.RETENTION); + (0, _1.pushChunkSizeArgument)(args, options?.CHUNK_SIZE); + (0, _1.pushDuplicatePolicy)(args, options?.DUPLICATE_POLICY); + (0, _1.pushLabelsArgument)(args, options?.LABELS); + (0, _1.pushIgnoreArgument)(args, options?.IGNORE); + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/time-series/dist/commands/CREATE.d.ts b/node_modules/@redis/time-series/dist/commands/CREATE.d.ts new file mode 100644 index 0000000..386ecfb --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/CREATE.d.ts @@ -0,0 +1,14 @@ +import { TimeSeriesEncoding, TimeSeriesDuplicatePolicies, Labels } from '.'; +import { TsIgnoreOptions } from './ADD'; +export declare const FIRST_KEY_INDEX = 1; +interface CreateOptions { + RETENTION?: number; + ENCODING?: TimeSeriesEncoding; + CHUNK_SIZE?: number; + DUPLICATE_POLICY?: TimeSeriesDuplicatePolicies; + LABELS?: Labels; + IGNORE?: TsIgnoreOptions; +} +export declare function transformArguments(key: string, options?: CreateOptions): Array; +export declare function transformReply(): 'OK'; +export {}; diff --git a/node_modules/@redis/time-series/dist/commands/CREATE.js b/node_modules/@redis/time-series/dist/commands/CREATE.js new file mode 100644 index 0000000..0c3ae17 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/CREATE.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, options) { + const args = ['TS.CREATE', key]; + (0, _1.pushRetentionArgument)(args, options?.RETENTION); + (0, _1.pushEncodingArgument)(args, options?.ENCODING); + (0, _1.pushChunkSizeArgument)(args, options?.CHUNK_SIZE); + (0, _1.pushDuplicatePolicy)(args, options?.DUPLICATE_POLICY); + (0, _1.pushLabelsArgument)(args, options?.LABELS); + (0, _1.pushIgnoreArgument)(args, options?.IGNORE); + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/time-series/dist/commands/CREATERULE.d.ts b/node_modules/@redis/time-series/dist/commands/CREATERULE.d.ts new file mode 100644 index 0000000..c2544f4 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/CREATERULE.d.ts @@ -0,0 +1,4 @@ +import { TimeSeriesAggregationType } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(sourceKey: string, destinationKey: string, aggregationType: TimeSeriesAggregationType, bucketDuration: number, alignTimestamp?: number): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/time-series/dist/commands/CREATERULE.js b/node_modules/@redis/time-series/dist/commands/CREATERULE.js new file mode 100644 index 0000000..a77fcb4 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/CREATERULE.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(sourceKey, destinationKey, aggregationType, bucketDuration, alignTimestamp) { + const args = [ + 'TS.CREATERULE', + sourceKey, + destinationKey, + 'AGGREGATION', + aggregationType, + bucketDuration.toString() + ]; + if (alignTimestamp) { + args.push(alignTimestamp.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/time-series/dist/commands/DECRBY.d.ts b/node_modules/@redis/time-series/dist/commands/DECRBY.d.ts new file mode 100644 index 0000000..8c04451 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/DECRBY.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { IncrDecrOptions } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, value: number, options?: IncrDecrOptions): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/time-series/dist/commands/DECRBY.js b/node_modules/@redis/time-series/dist/commands/DECRBY.js new file mode 100644 index 0000000..adb19a9 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/DECRBY.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, value, options) { + return (0, _1.transformIncrDecrArguments)('TS.DECRBY', key, value, options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/time-series/dist/commands/DEL.d.ts b/node_modules/@redis/time-series/dist/commands/DEL.d.ts new file mode 100644 index 0000000..c33a8ab --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/DEL.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { Timestamp } from '.'; +export declare const FIRTS_KEY_INDEX = 1; +export declare function transformArguments(key: string, fromTimestamp: Timestamp, toTimestamp: Timestamp): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/time-series/dist/commands/DEL.js b/node_modules/@redis/time-series/dist/commands/DEL.js new file mode 100644 index 0000000..39a8236 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/DEL.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRTS_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRTS_KEY_INDEX = 1; +function transformArguments(key, fromTimestamp, toTimestamp) { + return [ + 'TS.DEL', + key, + (0, _1.transformTimestampArgument)(fromTimestamp), + (0, _1.transformTimestampArgument)(toTimestamp) + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/time-series/dist/commands/DELETERULE.d.ts b/node_modules/@redis/time-series/dist/commands/DELETERULE.d.ts new file mode 100644 index 0000000..a13a756 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/DELETERULE.d.ts @@ -0,0 +1,3 @@ +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(sourceKey: string, destinationKey: string): Array; +export declare function transformReply(): 'OK'; diff --git a/node_modules/@redis/time-series/dist/commands/DELETERULE.js b/node_modules/@redis/time-series/dist/commands/DELETERULE.js new file mode 100644 index 0000000..aece0d6 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/DELETERULE.js @@ -0,0 +1,12 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +function transformArguments(sourceKey, destinationKey) { + return [ + 'TS.DELETERULE', + sourceKey, + destinationKey + ]; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/time-series/dist/commands/GET.d.ts b/node_modules/@redis/time-series/dist/commands/GET.d.ts new file mode 100644 index 0000000..16f1ff8 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/GET.d.ts @@ -0,0 +1,10 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { SampleRawReply, SampleReply } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +interface GetOptions { + LATEST?: boolean; +} +export declare function transformArguments(key: string, options?: GetOptions): RedisCommandArguments; +export declare function transformReply(reply: [] | SampleRawReply): null | SampleReply; +export {}; diff --git a/node_modules/@redis/time-series/dist/commands/GET.js b/node_modules/@redis/time-series/dist/commands/GET.js new file mode 100644 index 0000000..9f7b9a9 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/GET.js @@ -0,0 +1,16 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, options) { + return (0, _1.pushLatestArgument)(['TS.GET', key], options?.LATEST); +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + if (reply.length === 0) + return null; + return (0, _1.transformSampleReply)(reply); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/time-series/dist/commands/INCRBY.d.ts b/node_modules/@redis/time-series/dist/commands/INCRBY.d.ts new file mode 100644 index 0000000..8c04451 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/INCRBY.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { IncrDecrOptions } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare function transformArguments(key: string, value: number, options?: IncrDecrOptions): RedisCommandArguments; +export declare function transformReply(): number; diff --git a/node_modules/@redis/time-series/dist/commands/INCRBY.js b/node_modules/@redis/time-series/dist/commands/INCRBY.js new file mode 100644 index 0000000..18037d0 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/INCRBY.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(key, value, options) { + return (0, _1.transformIncrDecrArguments)('TS.INCRBY', key, value, options); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/time-series/dist/commands/INFO.d.ts b/node_modules/@redis/time-series/dist/commands/INFO.d.ts new file mode 100644 index 0000000..6eb9482 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/INFO.d.ts @@ -0,0 +1,52 @@ +import { TimeSeriesAggregationType, TimeSeriesDuplicatePolicies } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string): Array; +export type InfoRawReply = [ + 'totalSamples', + number, + 'memoryUsage', + number, + 'firstTimestamp', + number, + 'lastTimestamp', + number, + 'retentionTime', + number, + 'chunkCount', + number, + 'chunkSize', + number, + 'chunkType', + string, + 'duplicatePolicy', + TimeSeriesDuplicatePolicies | null, + 'labels', + Array<[name: string, value: string]>, + 'sourceKey', + string | null, + 'rules', + Array<[key: string, timeBucket: number, aggregationType: TimeSeriesAggregationType]> +]; +export interface InfoReply { + totalSamples: number; + memoryUsage: number; + firstTimestamp: number; + lastTimestamp: number; + retentionTime: number; + chunkCount: number; + chunkSize: number; + chunkType: string; + duplicatePolicy: TimeSeriesDuplicatePolicies | null; + labels: Array<{ + name: string; + value: string; + }>; + sourceKey: string | null; + rules: Array<{ + key: string; + timeBucket: number; + aggregationType: TimeSeriesAggregationType; + }>; +} +export declare function transformReply(reply: InfoRawReply): InfoReply; diff --git a/node_modules/@redis/time-series/dist/commands/INFO.js b/node_modules/@redis/time-series/dist/commands/INFO.js new file mode 100644 index 0000000..007582a --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/INFO.js @@ -0,0 +1,33 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key) { + return ['TS.INFO', key]; +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return { + totalSamples: reply[1], + memoryUsage: reply[3], + firstTimestamp: reply[5], + lastTimestamp: reply[7], + retentionTime: reply[9], + chunkCount: reply[11], + chunkSize: reply[13], + chunkType: reply[15], + duplicatePolicy: reply[17], + labels: reply[19].map(([name, value]) => ({ + name, + value + })), + sourceKey: reply[21], + rules: reply[23].map(([key, timeBucket, aggregationType]) => ({ + key, + timeBucket, + aggregationType + })) + }; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/time-series/dist/commands/INFO_DEBUG.d.ts b/node_modules/@redis/time-series/dist/commands/INFO_DEBUG.d.ts new file mode 100644 index 0000000..4b71b47 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/INFO_DEBUG.d.ts @@ -0,0 +1,32 @@ +import { InfoRawReply, InfoReply } from './INFO'; +export { IS_READ_ONLY, FIRST_KEY_INDEX } from './INFO'; +export declare function transformArguments(key: string): Array; +type InfoDebugRawReply = [ + ...InfoRawReply, + 'keySelfName', + string, + 'chunks', + Array<[ + 'startTimestamp', + number, + 'endTimestamp', + number, + 'samples', + number, + 'size', + number, + 'bytesPerSample', + string + ]> +]; +interface InfoDebugReply extends InfoReply { + keySelfName: string; + chunks: Array<{ + startTimestamp: number; + endTimestamp: number; + samples: number; + size: number; + bytesPerSample: string; + }>; +} +export declare function transformReply(rawReply: InfoDebugRawReply): InfoDebugReply; diff --git a/node_modules/@redis/time-series/dist/commands/INFO_DEBUG.js b/node_modules/@redis/time-series/dist/commands/INFO_DEBUG.js new file mode 100644 index 0000000..c289e07 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/INFO_DEBUG.js @@ -0,0 +1,26 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.FIRST_KEY_INDEX = exports.IS_READ_ONLY = void 0; +const INFO_1 = require("./INFO"); +var INFO_2 = require("./INFO"); +Object.defineProperty(exports, "IS_READ_ONLY", { enumerable: true, get: function () { return INFO_2.IS_READ_ONLY; } }); +Object.defineProperty(exports, "FIRST_KEY_INDEX", { enumerable: true, get: function () { return INFO_2.FIRST_KEY_INDEX; } }); +function transformArguments(key) { + const args = (0, INFO_1.transformArguments)(key); + args.push('DEBUG'); + return args; +} +exports.transformArguments = transformArguments; +function transformReply(rawReply) { + const reply = (0, INFO_1.transformReply)(rawReply); + reply.keySelfName = rawReply[25]; + reply.chunks = rawReply[27].map(chunk => ({ + startTimestamp: chunk[1], + endTimestamp: chunk[3], + samples: chunk[5], + size: chunk[7], + bytesPerSample: chunk[9] + })); + return reply; +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/time-series/dist/commands/MADD.d.ts b/node_modules/@redis/time-series/dist/commands/MADD.d.ts new file mode 100644 index 0000000..f9c5e98 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MADD.d.ts @@ -0,0 +1,10 @@ +import { Timestamp } from '.'; +export declare const FIRST_KEY_INDEX = 1; +interface MAddSample { + key: string; + timestamp: Timestamp; + value: number; +} +export declare function transformArguments(toAdd: Array): Array; +export declare function transformReply(): Array; +export {}; diff --git a/node_modules/@redis/time-series/dist/commands/MADD.js b/node_modules/@redis/time-series/dist/commands/MADD.js new file mode 100644 index 0000000..54b74c2 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MADD.js @@ -0,0 +1,13 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +function transformArguments(toAdd) { + const args = ['TS.MADD']; + for (const { key, timestamp, value } of toAdd) { + args.push(key, (0, _1.transformTimestampArgument)(timestamp), value.toString()); + } + return args; +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/time-series/dist/commands/MGET.d.ts b/node_modules/@redis/time-series/dist/commands/MGET.d.ts new file mode 100644 index 0000000..7b1f74b --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MGET.d.ts @@ -0,0 +1,17 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { Filter, RawLabels, SampleRawReply, SampleReply } from '.'; +export declare const IS_READ_ONLY = true; +export interface MGetOptions { + LATEST?: boolean; +} +export declare function transformArguments(filter: Filter, options?: MGetOptions): RedisCommandArguments; +export type MGetRawReply = Array<[ + key: string, + labels: RawLabels, + sample: SampleRawReply +]>; +export interface MGetReply { + key: string; + sample: SampleReply; +} +export declare function transformReply(reply: MGetRawReply): Array; diff --git a/node_modules/@redis/time-series/dist/commands/MGET.js b/node_modules/@redis/time-series/dist/commands/MGET.js new file mode 100644 index 0000000..c0e4012 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MGET.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const _1 = require("."); +exports.IS_READ_ONLY = true; +function transformArguments(filter, options) { + const args = (0, _1.pushLatestArgument)(['TS.MGET'], options?.LATEST); + return (0, _1.pushFilterArgument)(args, filter); +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return reply.map(([key, _, sample]) => ({ + key, + sample: (0, _1.transformSampleReply)(sample) + })); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/time-series/dist/commands/MGET_WITHLABELS.d.ts b/node_modules/@redis/time-series/dist/commands/MGET_WITHLABELS.d.ts new file mode 100644 index 0000000..7b32b64 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MGET_WITHLABELS.d.ts @@ -0,0 +1,13 @@ +import { SelectedLabels, Labels, Filter } from '.'; +import { MGetOptions, MGetRawReply, MGetReply } from './MGET'; +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +export declare const IS_READ_ONLY = true; +interface MGetWithLabelsOptions extends MGetOptions { + SELECTED_LABELS?: SelectedLabels; +} +export declare function transformArguments(filter: Filter, options?: MGetWithLabelsOptions): RedisCommandArguments; +export interface MGetWithLabelsReply extends MGetReply { + labels: Labels; +} +export declare function transformReply(reply: MGetRawReply): Array; +export {}; diff --git a/node_modules/@redis/time-series/dist/commands/MGET_WITHLABELS.js b/node_modules/@redis/time-series/dist/commands/MGET_WITHLABELS.js new file mode 100644 index 0000000..fcda8f9 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MGET_WITHLABELS.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const _1 = require("."); +exports.IS_READ_ONLY = true; +function transformArguments(filter, options) { + const args = (0, _1.pushWithLabelsArgument)(['TS.MGET'], options?.SELECTED_LABELS); + return (0, _1.pushFilterArgument)(args, filter); +} +exports.transformArguments = transformArguments; +; +function transformReply(reply) { + return reply.map(([key, labels, sample]) => ({ + key, + labels: (0, _1.transformLablesReply)(labels), + sample: (0, _1.transformSampleReply)(sample) + })); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/time-series/dist/commands/MRANGE.d.ts b/node_modules/@redis/time-series/dist/commands/MRANGE.d.ts new file mode 100644 index 0000000..874d1e5 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MRANGE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { MRangeOptions, Timestamp, Filter } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(fromTimestamp: Timestamp, toTimestamp: Timestamp, filters: Filter, options?: MRangeOptions): RedisCommandArguments; +export { transformMRangeReply as transformReply } from '.'; diff --git a/node_modules/@redis/time-series/dist/commands/MRANGE.js b/node_modules/@redis/time-series/dist/commands/MRANGE.js new file mode 100644 index 0000000..0c7cdeb --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MRANGE.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const _1 = require("."); +exports.IS_READ_ONLY = true; +function transformArguments(fromTimestamp, toTimestamp, filters, options) { + return (0, _1.pushMRangeArguments)(['TS.MRANGE'], fromTimestamp, toTimestamp, filters, options); +} +exports.transformArguments = transformArguments; +var _2 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _2.transformMRangeReply; } }); diff --git a/node_modules/@redis/time-series/dist/commands/MRANGE_WITHLABELS.d.ts b/node_modules/@redis/time-series/dist/commands/MRANGE_WITHLABELS.d.ts new file mode 100644 index 0000000..99c86e5 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MRANGE_WITHLABELS.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { Timestamp, MRangeWithLabelsOptions } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(fromTimestamp: Timestamp, toTimestamp: Timestamp, filters: string | Array, options?: MRangeWithLabelsOptions): RedisCommandArguments; +export { transformMRangeWithLabelsReply as transformReply } from '.'; diff --git a/node_modules/@redis/time-series/dist/commands/MRANGE_WITHLABELS.js b/node_modules/@redis/time-series/dist/commands/MRANGE_WITHLABELS.js new file mode 100644 index 0000000..8a70547 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MRANGE_WITHLABELS.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const _1 = require("."); +exports.IS_READ_ONLY = true; +function transformArguments(fromTimestamp, toTimestamp, filters, options) { + return (0, _1.pushMRangeWithLabelsArguments)(['TS.MRANGE'], fromTimestamp, toTimestamp, filters, options); +} +exports.transformArguments = transformArguments; +var _2 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _2.transformMRangeWithLabelsReply; } }); diff --git a/node_modules/@redis/time-series/dist/commands/MREVRANGE.d.ts b/node_modules/@redis/time-series/dist/commands/MREVRANGE.d.ts new file mode 100644 index 0000000..874d1e5 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MREVRANGE.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { MRangeOptions, Timestamp, Filter } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(fromTimestamp: Timestamp, toTimestamp: Timestamp, filters: Filter, options?: MRangeOptions): RedisCommandArguments; +export { transformMRangeReply as transformReply } from '.'; diff --git a/node_modules/@redis/time-series/dist/commands/MREVRANGE.js b/node_modules/@redis/time-series/dist/commands/MREVRANGE.js new file mode 100644 index 0000000..f433633 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MREVRANGE.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const _1 = require("."); +exports.IS_READ_ONLY = true; +function transformArguments(fromTimestamp, toTimestamp, filters, options) { + return (0, _1.pushMRangeArguments)(['TS.MREVRANGE'], fromTimestamp, toTimestamp, filters, options); +} +exports.transformArguments = transformArguments; +var _2 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _2.transformMRangeReply; } }); diff --git a/node_modules/@redis/time-series/dist/commands/MREVRANGE_WITHLABELS.d.ts b/node_modules/@redis/time-series/dist/commands/MREVRANGE_WITHLABELS.d.ts new file mode 100644 index 0000000..0cdbf08 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MREVRANGE_WITHLABELS.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { Timestamp, MRangeWithLabelsOptions, Filter } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(fromTimestamp: Timestamp, toTimestamp: Timestamp, filters: Filter, options?: MRangeWithLabelsOptions): RedisCommandArguments; +export { transformMRangeWithLabelsReply as transformReply } from '.'; diff --git a/node_modules/@redis/time-series/dist/commands/MREVRANGE_WITHLABELS.js b/node_modules/@redis/time-series/dist/commands/MREVRANGE_WITHLABELS.js new file mode 100644 index 0000000..81b04c0 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/MREVRANGE_WITHLABELS.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = void 0; +const _1 = require("."); +exports.IS_READ_ONLY = true; +function transformArguments(fromTimestamp, toTimestamp, filters, options) { + return (0, _1.pushMRangeWithLabelsArguments)(['TS.MREVRANGE'], fromTimestamp, toTimestamp, filters, options); +} +exports.transformArguments = transformArguments; +var _2 = require("."); +Object.defineProperty(exports, "transformReply", { enumerable: true, get: function () { return _2.transformMRangeWithLabelsReply; } }); diff --git a/node_modules/@redis/time-series/dist/commands/QUERYINDEX.d.ts b/node_modules/@redis/time-series/dist/commands/QUERYINDEX.d.ts new file mode 100644 index 0000000..10f6c1c --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/QUERYINDEX.d.ts @@ -0,0 +1,5 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { Filter } from '.'; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(filter: Filter): RedisCommandArguments; +export declare function transformReply(): Array; diff --git a/node_modules/@redis/time-series/dist/commands/QUERYINDEX.js b/node_modules/@redis/time-series/dist/commands/QUERYINDEX.js new file mode 100644 index 0000000..37ea026 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/QUERYINDEX.js @@ -0,0 +1,9 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformArguments = exports.IS_READ_ONLY = void 0; +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +exports.IS_READ_ONLY = true; +function transformArguments(filter) { + return (0, generic_transformers_1.pushVerdictArguments)(['TS.QUERYINDEX'], filter); +} +exports.transformArguments = transformArguments; diff --git a/node_modules/@redis/time-series/dist/commands/RANGE.d.ts b/node_modules/@redis/time-series/dist/commands/RANGE.d.ts new file mode 100644 index 0000000..07fa6f3 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/RANGE.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { RangeOptions, Timestamp, SampleRawReply, SampleReply } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, fromTimestamp: Timestamp, toTimestamp: Timestamp, options?: RangeOptions): RedisCommandArguments; +export declare function transformReply(reply: Array): Array; diff --git a/node_modules/@redis/time-series/dist/commands/RANGE.js b/node_modules/@redis/time-series/dist/commands/RANGE.js new file mode 100644 index 0000000..0136280 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/RANGE.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, fromTimestamp, toTimestamp, options) { + return (0, _1.pushRangeArguments)(['TS.RANGE', key], fromTimestamp, toTimestamp, options); +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return (0, _1.transformRangeReply)(reply); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/time-series/dist/commands/REVRANGE.d.ts b/node_modules/@redis/time-series/dist/commands/REVRANGE.d.ts new file mode 100644 index 0000000..07fa6f3 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/REVRANGE.d.ts @@ -0,0 +1,6 @@ +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +import { RangeOptions, Timestamp, SampleRawReply, SampleReply } from '.'; +export declare const FIRST_KEY_INDEX = 1; +export declare const IS_READ_ONLY = true; +export declare function transformArguments(key: string, fromTimestamp: Timestamp, toTimestamp: Timestamp, options?: RangeOptions): RedisCommandArguments; +export declare function transformReply(reply: Array): Array; diff --git a/node_modules/@redis/time-series/dist/commands/REVRANGE.js b/node_modules/@redis/time-series/dist/commands/REVRANGE.js new file mode 100644 index 0000000..e3abdac --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/REVRANGE.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.transformReply = exports.transformArguments = exports.IS_READ_ONLY = exports.FIRST_KEY_INDEX = void 0; +const _1 = require("."); +exports.FIRST_KEY_INDEX = 1; +exports.IS_READ_ONLY = true; +function transformArguments(key, fromTimestamp, toTimestamp, options) { + return (0, _1.pushRangeArguments)(['TS.REVRANGE', key], fromTimestamp, toTimestamp, options); +} +exports.transformArguments = transformArguments; +function transformReply(reply) { + return (0, _1.transformRangeReply)(reply); +} +exports.transformReply = transformReply; diff --git a/node_modules/@redis/time-series/dist/commands/index.d.ts b/node_modules/@redis/time-series/dist/commands/index.d.ts new file mode 100644 index 0000000..0db3157 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/index.d.ts @@ -0,0 +1,193 @@ +import * as ADD from './ADD'; +import * as ALTER from './ALTER'; +import * as CREATE from './CREATE'; +import * as CREATERULE from './CREATERULE'; +import * as DECRBY from './DECRBY'; +import * as DEL from './DEL'; +import * as DELETERULE from './DELETERULE'; +import * as GET from './GET'; +import * as INCRBY from './INCRBY'; +import * as INFO_DEBUG from './INFO_DEBUG'; +import * as INFO from './INFO'; +import * as MADD from './MADD'; +import * as MGET from './MGET'; +import * as MGET_WITHLABELS from './MGET_WITHLABELS'; +import * as QUERYINDEX from './QUERYINDEX'; +import * as RANGE from './RANGE'; +import * as REVRANGE from './REVRANGE'; +import * as MRANGE from './MRANGE'; +import * as MRANGE_WITHLABELS from './MRANGE_WITHLABELS'; +import * as MREVRANGE from './MREVRANGE'; +import * as MREVRANGE_WITHLABELS from './MREVRANGE_WITHLABELS'; +import { RedisCommandArguments } from '@redis/client/dist/lib/commands'; +declare const _default: { + ADD: typeof ADD; + add: typeof ADD; + ALTER: typeof ALTER; + alter: typeof ALTER; + CREATE: typeof CREATE; + create: typeof CREATE; + CREATERULE: typeof CREATERULE; + createRule: typeof CREATERULE; + DECRBY: typeof DECRBY; + decrBy: typeof DECRBY; + DEL: typeof DEL; + del: typeof DEL; + DELETERULE: typeof DELETERULE; + deleteRule: typeof DELETERULE; + GET: typeof GET; + get: typeof GET; + INCRBY: typeof INCRBY; + incrBy: typeof INCRBY; + INFO_DEBUG: typeof INFO_DEBUG; + infoDebug: typeof INFO_DEBUG; + INFO: typeof INFO; + info: typeof INFO; + MADD: typeof MADD; + mAdd: typeof MADD; + MGET: typeof MGET; + mGet: typeof MGET; + MGET_WITHLABELS: typeof MGET_WITHLABELS; + mGetWithLabels: typeof MGET_WITHLABELS; + QUERYINDEX: typeof QUERYINDEX; + queryIndex: typeof QUERYINDEX; + RANGE: typeof RANGE; + range: typeof RANGE; + REVRANGE: typeof REVRANGE; + revRange: typeof REVRANGE; + MRANGE: typeof MRANGE; + mRange: typeof MRANGE; + MRANGE_WITHLABELS: typeof MRANGE_WITHLABELS; + mRangeWithLabels: typeof MRANGE_WITHLABELS; + MREVRANGE: typeof MREVRANGE; + mRevRange: typeof MREVRANGE; + MREVRANGE_WITHLABELS: typeof MREVRANGE_WITHLABELS; + mRevRangeWithLabels: typeof MREVRANGE_WITHLABELS; +}; +export default _default; +export declare enum TimeSeriesAggregationType { + AVG = "AVG", + AVERAGE = "AVG", + FIRST = "FIRST", + LAST = "LAST", + MIN = "MIN", + MINIMUM = "MIN", + MAX = "MAX", + MAXIMUM = "MAX", + SUM = "SUM", + RANGE = "RANGE", + COUNT = "COUNT", + STD_P = "STD.P", + STD_S = "STD.S", + VAR_P = "VAR.P", + VAR_S = "VAR.S", + TWA = "TWA" +} +export declare enum TimeSeriesDuplicatePolicies { + BLOCK = "BLOCK", + FIRST = "FIRST", + LAST = "LAST", + MIN = "MIN", + MAX = "MAX", + SUM = "SUM" +} +export declare enum TimeSeriesReducers { + AVG = "AVG", + SUM = "SUM", + MIN = "MIN", + MINIMUM = "MIN", + MAX = "MAX", + MAXIMUM = "MAX", + RANGE = "range", + COUNT = "COUNT", + STD_P = "STD.P", + STD_S = "STD.S", + VAR_P = "VAR.P", + VAR_S = "VAR.S" +} +export type Timestamp = number | Date | string; +export declare function transformTimestampArgument(timestamp: Timestamp): string; +export declare function pushIgnoreArgument(args: RedisCommandArguments, ignore?: ADD.TsIgnoreOptions): void; +export declare function pushRetentionArgument(args: RedisCommandArguments, retention?: number): RedisCommandArguments; +export declare enum TimeSeriesEncoding { + COMPRESSED = "COMPRESSED", + UNCOMPRESSED = "UNCOMPRESSED" +} +export declare function pushEncodingArgument(args: RedisCommandArguments, encoding?: TimeSeriesEncoding): RedisCommandArguments; +export declare function pushChunkSizeArgument(args: RedisCommandArguments, chunkSize?: number): RedisCommandArguments; +export declare function pushDuplicatePolicy(args: RedisCommandArguments, duplicatePolicy?: TimeSeriesDuplicatePolicies): RedisCommandArguments; +export type RawLabels = Array<[label: string, value: string]>; +export type Labels = { + [label: string]: string; +}; +export declare function transformLablesReply(reply: RawLabels): Labels; +export declare function pushLabelsArgument(args: RedisCommandArguments, labels?: Labels): RedisCommandArguments; +export interface IncrDecrOptions { + TIMESTAMP?: Timestamp; + RETENTION?: number; + UNCOMPRESSED?: boolean; + CHUNK_SIZE?: number; + LABELS?: Labels; +} +export declare function transformIncrDecrArguments(command: 'TS.INCRBY' | 'TS.DECRBY', key: string, value: number, options?: IncrDecrOptions): RedisCommandArguments; +export type SampleRawReply = [timestamp: number, value: string]; +export interface SampleReply { + timestamp: number; + value: number; +} +export declare function transformSampleReply(reply: SampleRawReply): SampleReply; +export declare enum TimeSeriesBucketTimestamp { + LOW = "-", + HIGH = "+", + MID = "~" +} +export interface RangeOptions { + LATEST?: boolean; + FILTER_BY_TS?: Array; + FILTER_BY_VALUE?: { + min: number; + max: number; + }; + COUNT?: number; + ALIGN?: Timestamp; + AGGREGATION?: { + type: TimeSeriesAggregationType; + timeBucket: Timestamp; + BUCKETTIMESTAMP?: TimeSeriesBucketTimestamp; + EMPTY?: boolean; + }; +} +export declare function pushRangeArguments(args: RedisCommandArguments, fromTimestamp: Timestamp, toTimestamp: Timestamp, options?: RangeOptions): RedisCommandArguments; +interface MRangeGroupBy { + label: string; + reducer: TimeSeriesReducers; +} +export declare function pushMRangeGroupByArguments(args: RedisCommandArguments, groupBy?: MRangeGroupBy): RedisCommandArguments; +export type Filter = string | Array; +export declare function pushFilterArgument(args: RedisCommandArguments, filter: string | Array): RedisCommandArguments; +export interface MRangeOptions extends RangeOptions { + GROUPBY?: MRangeGroupBy; +} +export declare function pushMRangeArguments(args: RedisCommandArguments, fromTimestamp: Timestamp, toTimestamp: Timestamp, filter: Filter, options?: MRangeOptions): RedisCommandArguments; +export type SelectedLabels = string | Array; +export declare function pushWithLabelsArgument(args: RedisCommandArguments, selectedLabels?: SelectedLabels): RedisCommandArguments; +export interface MRangeWithLabelsOptions extends MRangeOptions { + SELECTED_LABELS?: SelectedLabels; +} +export declare function pushMRangeWithLabelsArguments(args: RedisCommandArguments, fromTimestamp: Timestamp, toTimestamp: Timestamp, filter: Filter, options?: MRangeWithLabelsOptions): RedisCommandArguments; +export declare function transformRangeReply(reply: Array): Array; +type MRangeRawReply = Array<[ + key: string, + labels: RawLabels, + samples: Array +]>; +interface MRangeReplyItem { + key: string; + samples: Array; +} +export declare function transformMRangeReply(reply: MRangeRawReply): Array; +export interface MRangeWithLabelsReplyItem extends MRangeReplyItem { + labels: Labels; +} +export declare function transformMRangeWithLabelsReply(reply: MRangeRawReply): Array; +export declare function pushLatestArgument(args: RedisCommandArguments, latest?: boolean): RedisCommandArguments; diff --git a/node_modules/@redis/time-series/dist/commands/index.js b/node_modules/@redis/time-series/dist/commands/index.js new file mode 100644 index 0000000..0123582 --- /dev/null +++ b/node_modules/@redis/time-series/dist/commands/index.js @@ -0,0 +1,313 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.pushLatestArgument = exports.transformMRangeWithLabelsReply = exports.transformMRangeReply = exports.transformRangeReply = exports.pushMRangeWithLabelsArguments = exports.pushWithLabelsArgument = exports.pushMRangeArguments = exports.pushFilterArgument = exports.pushMRangeGroupByArguments = exports.pushRangeArguments = exports.TimeSeriesBucketTimestamp = exports.transformSampleReply = exports.transformIncrDecrArguments = exports.pushLabelsArgument = exports.transformLablesReply = exports.pushDuplicatePolicy = exports.pushChunkSizeArgument = exports.pushEncodingArgument = exports.TimeSeriesEncoding = exports.pushRetentionArgument = exports.pushIgnoreArgument = exports.transformTimestampArgument = exports.TimeSeriesReducers = exports.TimeSeriesDuplicatePolicies = exports.TimeSeriesAggregationType = void 0; +const ADD = require("./ADD"); +const ALTER = require("./ALTER"); +const CREATE = require("./CREATE"); +const CREATERULE = require("./CREATERULE"); +const DECRBY = require("./DECRBY"); +const DEL = require("./DEL"); +const DELETERULE = require("./DELETERULE"); +const GET = require("./GET"); +const INCRBY = require("./INCRBY"); +const INFO_DEBUG = require("./INFO_DEBUG"); +const INFO = require("./INFO"); +const MADD = require("./MADD"); +const MGET = require("./MGET"); +const MGET_WITHLABELS = require("./MGET_WITHLABELS"); +const QUERYINDEX = require("./QUERYINDEX"); +const RANGE = require("./RANGE"); +const REVRANGE = require("./REVRANGE"); +const MRANGE = require("./MRANGE"); +const MRANGE_WITHLABELS = require("./MRANGE_WITHLABELS"); +const MREVRANGE = require("./MREVRANGE"); +const MREVRANGE_WITHLABELS = require("./MREVRANGE_WITHLABELS"); +const generic_transformers_1 = require("@redis/client/dist/lib/commands/generic-transformers"); +exports.default = { + ADD, + add: ADD, + ALTER, + alter: ALTER, + CREATE, + create: CREATE, + CREATERULE, + createRule: CREATERULE, + DECRBY, + decrBy: DECRBY, + DEL, + del: DEL, + DELETERULE, + deleteRule: DELETERULE, + GET, + get: GET, + INCRBY, + incrBy: INCRBY, + INFO_DEBUG, + infoDebug: INFO_DEBUG, + INFO, + info: INFO, + MADD, + mAdd: MADD, + MGET, + mGet: MGET, + MGET_WITHLABELS, + mGetWithLabels: MGET_WITHLABELS, + QUERYINDEX, + queryIndex: QUERYINDEX, + RANGE, + range: RANGE, + REVRANGE, + revRange: REVRANGE, + MRANGE, + mRange: MRANGE, + MRANGE_WITHLABELS, + mRangeWithLabels: MRANGE_WITHLABELS, + MREVRANGE, + mRevRange: MREVRANGE, + MREVRANGE_WITHLABELS, + mRevRangeWithLabels: MREVRANGE_WITHLABELS +}; +var TimeSeriesAggregationType; +(function (TimeSeriesAggregationType) { + TimeSeriesAggregationType["AVG"] = "AVG"; + // @deprecated + TimeSeriesAggregationType["AVERAGE"] = "AVG"; + TimeSeriesAggregationType["FIRST"] = "FIRST"; + TimeSeriesAggregationType["LAST"] = "LAST"; + TimeSeriesAggregationType["MIN"] = "MIN"; + // @deprecated + TimeSeriesAggregationType["MINIMUM"] = "MIN"; + TimeSeriesAggregationType["MAX"] = "MAX"; + // @deprecated + TimeSeriesAggregationType["MAXIMUM"] = "MAX"; + TimeSeriesAggregationType["SUM"] = "SUM"; + TimeSeriesAggregationType["RANGE"] = "RANGE"; + TimeSeriesAggregationType["COUNT"] = "COUNT"; + TimeSeriesAggregationType["STD_P"] = "STD.P"; + TimeSeriesAggregationType["STD_S"] = "STD.S"; + TimeSeriesAggregationType["VAR_P"] = "VAR.P"; + TimeSeriesAggregationType["VAR_S"] = "VAR.S"; + TimeSeriesAggregationType["TWA"] = "TWA"; +})(TimeSeriesAggregationType || (exports.TimeSeriesAggregationType = TimeSeriesAggregationType = {})); +var TimeSeriesDuplicatePolicies; +(function (TimeSeriesDuplicatePolicies) { + TimeSeriesDuplicatePolicies["BLOCK"] = "BLOCK"; + TimeSeriesDuplicatePolicies["FIRST"] = "FIRST"; + TimeSeriesDuplicatePolicies["LAST"] = "LAST"; + TimeSeriesDuplicatePolicies["MIN"] = "MIN"; + TimeSeriesDuplicatePolicies["MAX"] = "MAX"; + TimeSeriesDuplicatePolicies["SUM"] = "SUM"; +})(TimeSeriesDuplicatePolicies || (exports.TimeSeriesDuplicatePolicies = TimeSeriesDuplicatePolicies = {})); +var TimeSeriesReducers; +(function (TimeSeriesReducers) { + TimeSeriesReducers["AVG"] = "AVG"; + TimeSeriesReducers["SUM"] = "SUM"; + TimeSeriesReducers["MIN"] = "MIN"; + // @deprecated + TimeSeriesReducers["MINIMUM"] = "MIN"; + TimeSeriesReducers["MAX"] = "MAX"; + // @deprecated + TimeSeriesReducers["MAXIMUM"] = "MAX"; + TimeSeriesReducers["RANGE"] = "range"; + TimeSeriesReducers["COUNT"] = "COUNT"; + TimeSeriesReducers["STD_P"] = "STD.P"; + TimeSeriesReducers["STD_S"] = "STD.S"; + TimeSeriesReducers["VAR_P"] = "VAR.P"; + TimeSeriesReducers["VAR_S"] = "VAR.S"; +})(TimeSeriesReducers || (exports.TimeSeriesReducers = TimeSeriesReducers = {})); +function transformTimestampArgument(timestamp) { + if (typeof timestamp === 'string') + return timestamp; + return (typeof timestamp === 'number' ? + timestamp : + timestamp.getTime()).toString(); +} +exports.transformTimestampArgument = transformTimestampArgument; +function pushIgnoreArgument(args, ignore) { + if (ignore !== undefined) { + args.push('IGNORE', ignore.MAX_TIME_DIFF.toString(), ignore.MAX_VAL_DIFF.toString()); + } +} +exports.pushIgnoreArgument = pushIgnoreArgument; +function pushRetentionArgument(args, retention) { + if (retention !== undefined) { + args.push('RETENTION', retention.toString()); + } + return args; +} +exports.pushRetentionArgument = pushRetentionArgument; +var TimeSeriesEncoding; +(function (TimeSeriesEncoding) { + TimeSeriesEncoding["COMPRESSED"] = "COMPRESSED"; + TimeSeriesEncoding["UNCOMPRESSED"] = "UNCOMPRESSED"; +})(TimeSeriesEncoding || (exports.TimeSeriesEncoding = TimeSeriesEncoding = {})); +function pushEncodingArgument(args, encoding) { + if (encoding !== undefined) { + args.push('ENCODING', encoding); + } + return args; +} +exports.pushEncodingArgument = pushEncodingArgument; +function pushChunkSizeArgument(args, chunkSize) { + if (chunkSize !== undefined) { + args.push('CHUNK_SIZE', chunkSize.toString()); + } + return args; +} +exports.pushChunkSizeArgument = pushChunkSizeArgument; +function pushDuplicatePolicy(args, duplicatePolicy) { + if (duplicatePolicy !== undefined) { + args.push('DUPLICATE_POLICY', duplicatePolicy); + } + return args; +} +exports.pushDuplicatePolicy = pushDuplicatePolicy; +function transformLablesReply(reply) { + const labels = {}; + for (const [key, value] of reply) { + labels[key] = value; + } + return labels; +} +exports.transformLablesReply = transformLablesReply; +function pushLabelsArgument(args, labels) { + if (labels) { + args.push('LABELS'); + for (const [label, value] of Object.entries(labels)) { + args.push(label, value); + } + } + return args; +} +exports.pushLabelsArgument = pushLabelsArgument; +function transformIncrDecrArguments(command, key, value, options) { + const args = [ + command, + key, + value.toString() + ]; + if (options?.TIMESTAMP !== undefined && options?.TIMESTAMP !== null) { + args.push('TIMESTAMP', transformTimestampArgument(options.TIMESTAMP)); + } + pushRetentionArgument(args, options?.RETENTION); + if (options?.UNCOMPRESSED) { + args.push('UNCOMPRESSED'); + } + pushChunkSizeArgument(args, options?.CHUNK_SIZE); + pushLabelsArgument(args, options?.LABELS); + return args; +} +exports.transformIncrDecrArguments = transformIncrDecrArguments; +function transformSampleReply(reply) { + return { + timestamp: reply[0], + value: Number(reply[1]) + }; +} +exports.transformSampleReply = transformSampleReply; +var TimeSeriesBucketTimestamp; +(function (TimeSeriesBucketTimestamp) { + TimeSeriesBucketTimestamp["LOW"] = "-"; + TimeSeriesBucketTimestamp["HIGH"] = "+"; + TimeSeriesBucketTimestamp["MID"] = "~"; +})(TimeSeriesBucketTimestamp || (exports.TimeSeriesBucketTimestamp = TimeSeriesBucketTimestamp = {})); +function pushRangeArguments(args, fromTimestamp, toTimestamp, options) { + args.push(transformTimestampArgument(fromTimestamp), transformTimestampArgument(toTimestamp)); + pushLatestArgument(args, options?.LATEST); + if (options?.FILTER_BY_TS) { + args.push('FILTER_BY_TS'); + for (const ts of options.FILTER_BY_TS) { + args.push(transformTimestampArgument(ts)); + } + } + if (options?.FILTER_BY_VALUE) { + args.push('FILTER_BY_VALUE', options.FILTER_BY_VALUE.min.toString(), options.FILTER_BY_VALUE.max.toString()); + } + if (options?.COUNT) { + args.push('COUNT', options.COUNT.toString()); + } + if (options?.ALIGN) { + args.push('ALIGN', transformTimestampArgument(options.ALIGN)); + } + if (options?.AGGREGATION) { + args.push('AGGREGATION', options.AGGREGATION.type, transformTimestampArgument(options.AGGREGATION.timeBucket)); + if (options.AGGREGATION.BUCKETTIMESTAMP) { + args.push('BUCKETTIMESTAMP', options.AGGREGATION.BUCKETTIMESTAMP); + } + if (options.AGGREGATION.EMPTY) { + args.push('EMPTY'); + } + } + return args; +} +exports.pushRangeArguments = pushRangeArguments; +function pushMRangeGroupByArguments(args, groupBy) { + if (groupBy) { + args.push('GROUPBY', groupBy.label, 'REDUCE', groupBy.reducer); + } + return args; +} +exports.pushMRangeGroupByArguments = pushMRangeGroupByArguments; +function pushFilterArgument(args, filter) { + args.push('FILTER'); + return (0, generic_transformers_1.pushVerdictArguments)(args, filter); +} +exports.pushFilterArgument = pushFilterArgument; +function pushMRangeArguments(args, fromTimestamp, toTimestamp, filter, options) { + args = pushRangeArguments(args, fromTimestamp, toTimestamp, options); + args = pushFilterArgument(args, filter); + return pushMRangeGroupByArguments(args, options?.GROUPBY); +} +exports.pushMRangeArguments = pushMRangeArguments; +function pushWithLabelsArgument(args, selectedLabels) { + if (!selectedLabels) { + args.push('WITHLABELS'); + } + else { + args.push('SELECTED_LABELS'); + args = (0, generic_transformers_1.pushVerdictArguments)(args, selectedLabels); + } + return args; +} +exports.pushWithLabelsArgument = pushWithLabelsArgument; +function pushMRangeWithLabelsArguments(args, fromTimestamp, toTimestamp, filter, options) { + args = pushRangeArguments(args, fromTimestamp, toTimestamp, options); + args = pushWithLabelsArgument(args, options?.SELECTED_LABELS); + args = pushFilterArgument(args, filter); + return pushMRangeGroupByArguments(args, options?.GROUPBY); +} +exports.pushMRangeWithLabelsArguments = pushMRangeWithLabelsArguments; +function transformRangeReply(reply) { + return reply.map(transformSampleReply); +} +exports.transformRangeReply = transformRangeReply; +function transformMRangeReply(reply) { + const args = []; + for (const [key, _, sample] of reply) { + args.push({ + key, + samples: sample.map(transformSampleReply) + }); + } + return args; +} +exports.transformMRangeReply = transformMRangeReply; +function transformMRangeWithLabelsReply(reply) { + const args = []; + for (const [key, labels, samples] of reply) { + args.push({ + key, + labels: transformLablesReply(labels), + samples: samples.map(transformSampleReply) + }); + } + return args; +} +exports.transformMRangeWithLabelsReply = transformMRangeWithLabelsReply; +function pushLatestArgument(args, latest) { + if (latest) { + args.push('LATEST'); + } + return args; +} +exports.pushLatestArgument = pushLatestArgument; diff --git a/node_modules/@redis/time-series/dist/index.d.ts b/node_modules/@redis/time-series/dist/index.d.ts new file mode 100644 index 0000000..308850a --- /dev/null +++ b/node_modules/@redis/time-series/dist/index.d.ts @@ -0,0 +1,2 @@ +export { default } from './commands'; +export { TimeSeriesDuplicatePolicies, TimeSeriesEncoding, TimeSeriesAggregationType, TimeSeriesReducers, TimeSeriesBucketTimestamp } from './commands'; diff --git a/node_modules/@redis/time-series/dist/index.js b/node_modules/@redis/time-series/dist/index.js new file mode 100644 index 0000000..363237e --- /dev/null +++ b/node_modules/@redis/time-series/dist/index.js @@ -0,0 +1,11 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.TimeSeriesBucketTimestamp = exports.TimeSeriesReducers = exports.TimeSeriesAggregationType = exports.TimeSeriesEncoding = exports.TimeSeriesDuplicatePolicies = exports.default = void 0; +var commands_1 = require("./commands"); +Object.defineProperty(exports, "default", { enumerable: true, get: function () { return commands_1.default; } }); +var commands_2 = require("./commands"); +Object.defineProperty(exports, "TimeSeriesDuplicatePolicies", { enumerable: true, get: function () { return commands_2.TimeSeriesDuplicatePolicies; } }); +Object.defineProperty(exports, "TimeSeriesEncoding", { enumerable: true, get: function () { return commands_2.TimeSeriesEncoding; } }); +Object.defineProperty(exports, "TimeSeriesAggregationType", { enumerable: true, get: function () { return commands_2.TimeSeriesAggregationType; } }); +Object.defineProperty(exports, "TimeSeriesReducers", { enumerable: true, get: function () { return commands_2.TimeSeriesReducers; } }); +Object.defineProperty(exports, "TimeSeriesBucketTimestamp", { enumerable: true, get: function () { return commands_2.TimeSeriesBucketTimestamp; } }); diff --git a/node_modules/@redis/time-series/package.json b/node_modules/@redis/time-series/package.json new file mode 100644 index 0000000..65ee1e9 --- /dev/null +++ b/node_modules/@redis/time-series/package.json @@ -0,0 +1,41 @@ +{ + "name": "@redis/time-series", + "version": "1.1.0", + "license": "MIT", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist/" + ], + "scripts": { + "test": "nyc -r text-summary -r lcov mocha -r source-map-support/register -r ts-node/register './lib/**/*.spec.ts'", + "build": "tsc", + "documentation": "typedoc" + }, + "peerDependencies": { + "@redis/client": "^1.0.0" + }, + "devDependencies": { + "@istanbuljs/nyc-config-typescript": "^1.0.2", + "@redis/test-utils": "*", + "@types/node": "^20.6.2", + "nyc": "^15.1.0", + "release-it": "^16.1.5", + "source-map-support": "^0.5.21", + "ts-node": "^10.9.1", + "typedoc": "^0.25.1", + "typescript": "^5.2.2" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis/tree/master/packages/time-series", + "keywords": [ + "redis", + "RedisTimeSeries" + ] +} diff --git a/node_modules/abbrev/LICENSE b/node_modules/abbrev/LICENSE new file mode 100644 index 0000000..9bcfa9d --- /dev/null +++ b/node_modules/abbrev/LICENSE @@ -0,0 +1,46 @@ +This software is dual-licensed under the ISC and MIT licenses. +You may use this software under EITHER of the following licenses. + +---------- + +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------- + +Copyright Isaac Z. Schlueter and Contributors +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/abbrev/README.md b/node_modules/abbrev/README.md new file mode 100644 index 0000000..99746fe --- /dev/null +++ b/node_modules/abbrev/README.md @@ -0,0 +1,23 @@ +# abbrev-js + +Just like [ruby's Abbrev](http://apidock.com/ruby/Abbrev). + +Usage: + + var abbrev = require("abbrev"); + abbrev("foo", "fool", "folding", "flop"); + + // returns: + { fl: 'flop' + , flo: 'flop' + , flop: 'flop' + , fol: 'folding' + , fold: 'folding' + , foldi: 'folding' + , foldin: 'folding' + , folding: 'folding' + , foo: 'foo' + , fool: 'fool' + } + +This is handy for command-line scripts, or other cases where you want to be able to accept shorthands. diff --git a/node_modules/abbrev/abbrev.js b/node_modules/abbrev/abbrev.js new file mode 100644 index 0000000..7b1dc5d --- /dev/null +++ b/node_modules/abbrev/abbrev.js @@ -0,0 +1,61 @@ +module.exports = exports = abbrev.abbrev = abbrev + +abbrev.monkeyPatch = monkeyPatch + +function monkeyPatch () { + Object.defineProperty(Array.prototype, 'abbrev', { + value: function () { return abbrev(this) }, + enumerable: false, configurable: true, writable: true + }) + + Object.defineProperty(Object.prototype, 'abbrev', { + value: function () { return abbrev(Object.keys(this)) }, + enumerable: false, configurable: true, writable: true + }) +} + +function abbrev (list) { + if (arguments.length !== 1 || !Array.isArray(list)) { + list = Array.prototype.slice.call(arguments, 0) + } + for (var i = 0, l = list.length, args = [] ; i < l ; i ++) { + args[i] = typeof list[i] === "string" ? list[i] : String(list[i]) + } + + // sort them lexicographically, so that they're next to their nearest kin + args = args.sort(lexSort) + + // walk through each, seeing how much it has in common with the next and previous + var abbrevs = {} + , prev = "" + for (var i = 0, l = args.length ; i < l ; i ++) { + var current = args[i] + , next = args[i + 1] || "" + , nextMatches = true + , prevMatches = true + if (current === next) continue + for (var j = 0, cl = current.length ; j < cl ; j ++) { + var curChar = current.charAt(j) + nextMatches = nextMatches && curChar === next.charAt(j) + prevMatches = prevMatches && curChar === prev.charAt(j) + if (!nextMatches && !prevMatches) { + j ++ + break + } + } + prev = current + if (j === cl) { + abbrevs[current] = current + continue + } + for (var a = current.substr(0, j) ; j <= cl ; j ++) { + abbrevs[a] = current + a += current.charAt(j) + } + } + return abbrevs +} + +function lexSort (a, b) { + return a === b ? 0 : a > b ? 1 : -1 +} diff --git a/node_modules/abbrev/package.json b/node_modules/abbrev/package.json new file mode 100644 index 0000000..bf4e801 --- /dev/null +++ b/node_modules/abbrev/package.json @@ -0,0 +1,21 @@ +{ + "name": "abbrev", + "version": "1.1.1", + "description": "Like ruby's abbrev module, but in js", + "author": "Isaac Z. Schlueter ", + "main": "abbrev.js", + "scripts": { + "test": "tap test.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": "http://github.com/isaacs/abbrev-js", + "license": "ISC", + "devDependencies": { + "tap": "^10.1" + }, + "files": [ + "abbrev.js" + ] +} diff --git a/node_modules/agent-base/README.md b/node_modules/agent-base/README.md new file mode 100644 index 0000000..256f1f3 --- /dev/null +++ b/node_modules/agent-base/README.md @@ -0,0 +1,145 @@ +agent-base +========== +### Turn a function into an [`http.Agent`][http.Agent] instance +[![Build Status](https://github.com/TooTallNate/node-agent-base/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-agent-base/actions?workflow=Node+CI) + +This module provides an `http.Agent` generator. That is, you pass it an async +callback function, and it returns a new `http.Agent` instance that will invoke the +given callback function when sending outbound HTTP requests. + +#### Some subclasses: + +Here's some more interesting uses of `agent-base`. +Send a pull request to list yours! + + * [`http-proxy-agent`][http-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTP endpoints + * [`https-proxy-agent`][https-proxy-agent]: An HTTP(s) proxy `http.Agent` implementation for HTTPS endpoints + * [`pac-proxy-agent`][pac-proxy-agent]: A PAC file proxy `http.Agent` implementation for HTTP and HTTPS + * [`socks-proxy-agent`][socks-proxy-agent]: A SOCKS proxy `http.Agent` implementation for HTTP and HTTPS + + +Installation +------------ + +Install with `npm`: + +``` bash +$ npm install agent-base +``` + + +Example +------- + +Here's a minimal example that creates a new `net.Socket` connection to the server +for every HTTP request (i.e. the equivalent of `agent: false` option): + +```js +var net = require('net'); +var tls = require('tls'); +var url = require('url'); +var http = require('http'); +var agent = require('agent-base'); + +var endpoint = 'http://nodejs.org/api/'; +var parsed = url.parse(endpoint); + +// This is the important part! +parsed.agent = agent(function (req, opts) { + var socket; + // `secureEndpoint` is true when using the https module + if (opts.secureEndpoint) { + socket = tls.connect(opts); + } else { + socket = net.connect(opts); + } + return socket; +}); + +// Everything else works just like normal... +http.get(parsed, function (res) { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + +Returning a Promise or using an `async` function is also supported: + +```js +agent(async function (req, opts) { + await sleep(1000); + // etc… +}); +``` + +Return another `http.Agent` instance to "pass through" the responsibility +for that HTTP request to that agent: + +```js +agent(function (req, opts) { + return opts.secureEndpoint ? https.globalAgent : http.globalAgent; +}); +``` + + +API +--- + +## Agent(Function callback[, Object options]) → [http.Agent][] + +Creates a base `http.Agent` that will execute the callback function `callback` +for every HTTP request that it is used as the `agent` for. The callback function +is responsible for creating a `stream.Duplex` instance of some kind that will be +used as the underlying socket in the HTTP request. + +The `options` object accepts the following properties: + + * `timeout` - Number - Timeout for the `callback()` function in milliseconds. Defaults to Infinity (optional). + +The callback function should have the following signature: + +### callback(http.ClientRequest req, Object options, Function cb) → undefined + +The ClientRequest `req` can be accessed to read request headers and +and the path, etc. The `options` object contains the options passed +to the `http.request()`/`https.request()` function call, and is formatted +to be directly passed to `net.connect()`/`tls.connect()`, or however +else you want a Socket to be created. Pass the created socket to +the callback function `cb` once created, and the HTTP request will +continue to proceed. + +If the `https` module is used to invoke the HTTP request, then the +`secureEndpoint` property on `options` _will be set to `true`_. + + +License +------- + +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +[http-proxy-agent]: https://github.com/TooTallNate/node-http-proxy-agent +[https-proxy-agent]: https://github.com/TooTallNate/node-https-proxy-agent +[pac-proxy-agent]: https://github.com/TooTallNate/node-pac-proxy-agent +[socks-proxy-agent]: https://github.com/TooTallNate/node-socks-proxy-agent +[http.Agent]: https://nodejs.org/api/http.html#http_class_http_agent diff --git a/node_modules/agent-base/dist/src/index.d.ts b/node_modules/agent-base/dist/src/index.d.ts new file mode 100644 index 0000000..bc4ab74 --- /dev/null +++ b/node_modules/agent-base/dist/src/index.d.ts @@ -0,0 +1,78 @@ +/// +import net from 'net'; +import http from 'http'; +import https from 'https'; +import { Duplex } from 'stream'; +import { EventEmitter } from 'events'; +declare function createAgent(opts?: createAgent.AgentOptions): createAgent.Agent; +declare function createAgent(callback: createAgent.AgentCallback, opts?: createAgent.AgentOptions): createAgent.Agent; +declare namespace createAgent { + interface ClientRequest extends http.ClientRequest { + _last?: boolean; + _hadError?: boolean; + method: string; + } + interface AgentRequestOptions { + host?: string; + path?: string; + port: number; + } + interface HttpRequestOptions extends AgentRequestOptions, Omit { + secureEndpoint: false; + } + interface HttpsRequestOptions extends AgentRequestOptions, Omit { + secureEndpoint: true; + } + type RequestOptions = HttpRequestOptions | HttpsRequestOptions; + type AgentLike = Pick | http.Agent; + type AgentCallbackReturn = Duplex | AgentLike; + type AgentCallbackCallback = (err?: Error | null, socket?: createAgent.AgentCallbackReturn) => void; + type AgentCallbackPromise = (req: createAgent.ClientRequest, opts: createAgent.RequestOptions) => createAgent.AgentCallbackReturn | Promise; + type AgentCallback = typeof Agent.prototype.callback; + type AgentOptions = { + timeout?: number; + }; + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends EventEmitter { + timeout: number | null; + maxFreeSockets: number; + maxTotalSockets: number; + maxSockets: number; + sockets: { + [key: string]: net.Socket[]; + }; + freeSockets: { + [key: string]: net.Socket[]; + }; + requests: { + [key: string]: http.IncomingMessage[]; + }; + options: https.AgentOptions; + private promisifiedCallback?; + private explicitDefaultPort?; + private explicitProtocol?; + constructor(callback?: createAgent.AgentCallback | createAgent.AgentOptions, _opts?: createAgent.AgentOptions); + get defaultPort(): number; + set defaultPort(v: number); + get protocol(): string; + set protocol(v: string); + callback(req: createAgent.ClientRequest, opts: createAgent.RequestOptions, fn: createAgent.AgentCallbackCallback): void; + callback(req: createAgent.ClientRequest, opts: createAgent.RequestOptions): createAgent.AgentCallbackReturn | Promise; + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req: ClientRequest, _opts: RequestOptions): void; + freeSocket(socket: net.Socket, opts: AgentOptions): void; + destroy(): void; + } +} +export = createAgent; diff --git a/node_modules/agent-base/dist/src/index.js b/node_modules/agent-base/dist/src/index.js new file mode 100644 index 0000000..bfd9e22 --- /dev/null +++ b/node_modules/agent-base/dist/src/index.js @@ -0,0 +1,203 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const events_1 = require("events"); +const debug_1 = __importDefault(require("debug")); +const promisify_1 = __importDefault(require("./promisify")); +const debug = debug_1.default('agent-base'); +function isAgent(v) { + return Boolean(v) && typeof v.addRequest === 'function'; +} +function isSecureEndpoint() { + const { stack } = new Error(); + if (typeof stack !== 'string') + return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} +function createAgent(callback, opts) { + return new createAgent.Agent(callback, opts); +} +(function (createAgent) { + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + class Agent extends events_1.EventEmitter { + constructor(callback, _opts) { + super(); + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } + else if (callback) { + opts = callback; + } + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + get defaultPort() { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + set defaultPort(v) { + this.explicitDefaultPort = v; + } + get protocol() { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + set protocol(v) { + this.explicitProtocol = v; + } + callback(req, opts, fn) { + throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); + } + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req, _opts) { + const opts = Object.assign({}, _opts); + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + if (opts.host == null) { + opts.host = 'localhost'; + } + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + let timedOut = false; + let timeoutId = null; + const timeoutMs = opts.timeout || this.timeout; + const onerror = (err) => { + if (req._hadError) + return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); + err.code = 'ETIMEOUT'; + onerror(err); + }; + const callbackError = (err) => { + if (timedOut) + return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + const onsocket = (socket) => { + if (timedOut) + return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug('Callback returned another Agent instance %o', socket.constructor.name); + socket.addRequest(req, opts); + return; + } + if (socket) { + socket.once('free', () => { + this.freeSocket(socket, opts); + }); + req.onSocket(socket); + return; + } + const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); + onerror(err); + }; + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify_1.default(this.callback); + } + else { + this.promisifiedCallback = this.callback; + } + } + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + try { + debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); + Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); + } + catch (err) { + Promise.reject(err).catch(callbackError); + } + } + freeSocket(socket, opts) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + createAgent.Agent = Agent; + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +})(createAgent || (createAgent = {})); +module.exports = createAgent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/agent-base/dist/src/index.js.map b/node_modules/agent-base/dist/src/index.js.map new file mode 100644 index 0000000..bd118ab --- /dev/null +++ b/node_modules/agent-base/dist/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;AAIA,mCAAsC;AACtC,kDAAgC;AAChC,4DAAoC;AAEpC,MAAM,KAAK,GAAG,eAAW,CAAC,YAAY,CAAC,CAAC;AAExC,SAAS,OAAO,CAAC,CAAM;IACtB,OAAO,OAAO,CAAC,CAAC,CAAC,IAAI,OAAO,CAAC,CAAC,UAAU,KAAK,UAAU,CAAC;AACzD,CAAC;AAED,SAAS,gBAAgB;IACxB,MAAM,EAAE,KAAK,EAAE,GAAG,IAAI,KAAK,EAAE,CAAC;IAC9B,IAAI,OAAO,KAAK,KAAK,QAAQ;QAAE,OAAO,KAAK,CAAC;IAC5C,OAAO,KAAK,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,YAAY,CAAC,KAAK,CAAC,CAAC,IAAK,CAAC,CAAC,OAAO,CAAC,aAAa,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC;AACxG,CAAC;AAOD,SAAS,WAAW,CACnB,QAA+D,EAC/D,IAA+B;IAE/B,OAAO,IAAI,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE,IAAI,CAAC,CAAC;AAC9C,CAAC;AAED,WAAU,WAAW;IAmDpB;;;;;;OAMG;IACH,MAAa,KAAM,SAAQ,qBAAY;QAmBtC,YACC,QAA+D,EAC/D,KAAgC;YAEhC,KAAK,EAAE,CAAC;YAER,IAAI,IAAI,GAAG,KAAK,CAAC;YACjB,IAAI,OAAO,QAAQ,KAAK,UAAU,EAAE;gBACnC,IAAI,CAAC,QAAQ,GAAG,QAAQ,CAAC;aACzB;iBAAM,IAAI,QAAQ,EAAE;gBACpB,IAAI,GAAG,QAAQ,CAAC;aAChB;YAED,0DAA0D;YAC1D,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC;YACpB,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,OAAO,KAAK,QAAQ,EAAE;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,OAAO,CAAC;aAC5B;YAED,+DAA+D;YAC/D,0DAA0D;YAC1D,IAAI,CAAC,cAAc,GAAG,CAAC,CAAC;YACxB,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC;YACpB,IAAI,CAAC,eAAe,GAAG,QAAQ,CAAC;YAChC,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;YAClB,IAAI,CAAC,WAAW,GAAG,EAAE,CAAC;YACtB,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAC;YACnB,IAAI,CAAC,OAAO,GAAG,EAAE,CAAC;QACnB,CAAC;QAED,IAAI,WAAW;YACd,IAAI,OAAO,IAAI,CAAC,mBAAmB,KAAK,QAAQ,EAAE;gBACjD,OAAO,IAAI,CAAC,mBAAmB,CAAC;aAChC;YACD,OAAO,gBAAgB,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;QACtC,CAAC;QAED,IAAI,WAAW,CAAC,CAAS;YACxB,IAAI,CAAC,mBAAmB,GAAG,CAAC,CAAC;QAC9B,CAAC;QAED,IAAI,QAAQ;YACX,IAAI,OAAO,IAAI,CAAC,gBAAgB,KAAK,QAAQ,EAAE;gBAC9C,OAAO,IAAI,CAAC,gBAAgB,CAAC;aAC7B;YACD,OAAO,gBAAgB,EAAE,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;QAChD,CAAC;QAED,IAAI,QAAQ,CAAC,CAAS;YACrB,IAAI,CAAC,gBAAgB,GAAG,CAAC,CAAC;QAC3B,CAAC;QAaD,QAAQ,CACP,GAA8B,EAC9B,IAA8B,EAC9B,EAAsC;YAKtC,MAAM,IAAI,KAAK,CACd,yFAAyF,CACzF,CAAC;QACH,CAAC;QAED;;;;;WAKG;QACH,UAAU,CAAC,GAAkB,EAAE,KAAqB;YACnD,MAAM,IAAI,qBAAwB,KAAK,CAAE,CAAC;YAE1C,IAAI,OAAO,IAAI,CAAC,cAAc,KAAK,SAAS,EAAE;gBAC7C,IAAI,CAAC,cAAc,GAAG,gBAAgB,EAAE,CAAC;aACzC;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE;gBACtB,IAAI,CAAC,IAAI,GAAG,WAAW,CAAC;aACxB;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,EAAE;gBACtB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;aAC3C;YAED,IAAI,IAAI,CAAC,QAAQ,IAAI,IAAI,EAAE;gBAC1B,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,cAAc,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC;aACzD;YAED,IAAI,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE;gBAC3B,2DAA2D;gBAC3D,0DAA0D;gBAC1D,4DAA4D;gBAC5D,8CAA8C;gBAC9C,OAAO,IAAI,CAAC,IAAI,CAAC;aACjB;YAED,OAAO,IAAI,CAAC,KAAK,CAAC;YAClB,OAAO,IAAI,CAAC,QAAQ,CAAC;YACrB,OAAO,IAAI,CAAC,aAAa,CAAC;YAC1B,OAAO,IAAI,CAAC,WAAW,CAAC;YACxB,OAAO,IAAI,CAAC,gBAAgB,CAAC;YAE7B,kCAAkC;YAClC,2CAA2C;YAC3C,GAAG,CAAC,KAAK,GAAG,IAAI,CAAC;YACjB,GAAG,CAAC,eAAe,GAAG,KAAK,CAAC;YAE5B,IAAI,QAAQ,GAAG,KAAK,CAAC;YACrB,IAAI,SAAS,GAAyC,IAAI,CAAC;YAC3D,MAAM,SAAS,GAAG,IAAI,CAAC,OAAO,IAAI,IAAI,CAAC,OAAO,CAAC;YAE/C,MAAM,OAAO,GAAG,CAAC,GAA0B,EAAE,EAAE;gBAC9C,IAAI,GAAG,CAAC,SAAS;oBAAE,OAAO;gBAC1B,GAAG,CAAC,IAAI,CAAC,OAAO,EAAE,GAAG,CAAC,CAAC;gBACvB,yDAAyD;gBACzD,iEAAiE;gBACjE,GAAG,CAAC,SAAS,GAAG,IAAI,CAAC;YACtB,CAAC,CAAC;YAEF,MAAM,SAAS,GAAG,GAAG,EAAE;gBACtB,SAAS,GAAG,IAAI,CAAC;gBACjB,QAAQ,GAAG,IAAI,CAAC;gBAChB,MAAM,GAAG,GAA0B,IAAI,KAAK,CAC3C,sDAAsD,SAAS,IAAI,CACnE,CAAC;gBACF,GAAG,CAAC,IAAI,GAAG,UAAU,CAAC;gBACtB,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,MAAM,aAAa,GAAG,CAAC,GAA0B,EAAE,EAAE;gBACpD,IAAI,QAAQ;oBAAE,OAAO;gBACrB,IAAI,SAAS,KAAK,IAAI,EAAE;oBACvB,YAAY,CAAC,SAAS,CAAC,CAAC;oBACxB,SAAS,GAAG,IAAI,CAAC;iBACjB;gBACD,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,MAAM,QAAQ,GAAG,CAAC,MAA2B,EAAE,EAAE;gBAChD,IAAI,QAAQ;oBAAE,OAAO;gBACrB,IAAI,SAAS,IAAI,IAAI,EAAE;oBACtB,YAAY,CAAC,SAAS,CAAC,CAAC;oBACxB,SAAS,GAAG,IAAI,CAAC;iBACjB;gBAED,IAAI,OAAO,CAAC,MAAM,CAAC,EAAE;oBACpB,oDAAoD;oBACpD,wDAAwD;oBACxD,eAAe;oBACf,KAAK,CACJ,6CAA6C,EAC7C,MAAM,CAAC,WAAW,CAAC,IAAI,CACvB,CAAC;oBACD,MAA4B,CAAC,UAAU,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC;oBACpD,OAAO;iBACP;gBAED,IAAI,MAAM,EAAE;oBACX,MAAM,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,EAAE;wBACxB,IAAI,CAAC,UAAU,CAAC,MAAoB,EAAE,IAAI,CAAC,CAAC;oBAC7C,CAAC,CAAC,CAAC;oBACH,GAAG,CAAC,QAAQ,CAAC,MAAoB,CAAC,CAAC;oBACnC,OAAO;iBACP;gBAED,MAAM,GAAG,GAAG,IAAI,KAAK,CACpB,qDAAqD,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,IAAI,IAAI,CAC/E,CAAC;gBACF,OAAO,CAAC,GAAG,CAAC,CAAC;YACd,CAAC,CAAC;YAEF,IAAI,OAAO,IAAI,CAAC,QAAQ,KAAK,UAAU,EAAE;gBACxC,OAAO,CAAC,IAAI,KAAK,CAAC,2BAA2B,CAAC,CAAC,CAAC;gBAChD,OAAO;aACP;YAED,IAAI,CAAC,IAAI,CAAC,mBAAmB,EAAE;gBAC9B,IAAI,IAAI,CAAC,QAAQ,CAAC,MAAM,IAAI,CAAC,EAAE;oBAC9B,KAAK,CAAC,gDAAgD,CAAC,CAAC;oBACxD,IAAI,CAAC,mBAAmB,GAAG,mBAAS,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;iBACpD;qBAAM;oBACN,IAAI,CAAC,mBAAmB,GAAG,IAAI,CAAC,QAAQ,CAAC;iBACzC;aACD;YAED,IAAI,OAAO,SAAS,KAAK,QAAQ,IAAI,SAAS,GAAG,CAAC,EAAE;gBACnD,SAAS,GAAG,UAAU,CAAC,SAAS,EAAE,SAAS,CAAC,CAAC;aAC7C;YAED,IAAI,MAAM,IAAI,IAAI,IAAI,OAAO,IAAI,CAAC,IAAI,KAAK,QAAQ,EAAE;gBACpD,IAAI,CAAC,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;aAC9B;YAED,IAAI;gBACH,KAAK,CACJ,qCAAqC,EACrC,IAAI,CAAC,QAAQ,EACb,GAAG,GAAG,CAAC,MAAM,IAAI,GAAG,CAAC,IAAI,EAAE,CAC3B,CAAC;gBACF,OAAO,CAAC,OAAO,CAAC,IAAI,CAAC,mBAAmB,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC,IAAI,CACxD,QAAQ,EACR,aAAa,CACb,CAAC;aACF;YAAC,OAAO,GAAG,EAAE;gBACb,OAAO,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC;aACzC;QACF,CAAC;QAED,UAAU,CAAC,MAAkB,EAAE,IAAkB;YAChD,KAAK,CAAC,sBAAsB,EAAE,MAAM,CAAC,WAAW,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;YAC7D,MAAM,CAAC,OAAO,EAAE,CAAC;QAClB,CAAC;QAED,OAAO;YACN,KAAK,CAAC,qBAAqB,EAAE,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,CAAC;QACrD,CAAC;KACD;IAxPY,iBAAK,QAwPjB,CAAA;IAED,uCAAuC;IACvC,WAAW,CAAC,SAAS,GAAG,WAAW,CAAC,KAAK,CAAC,SAAS,CAAC;AACrD,CAAC,EAtTS,WAAW,KAAX,WAAW,QAsTpB;AAED,iBAAS,WAAW,CAAC"} \ No newline at end of file diff --git a/node_modules/agent-base/dist/src/promisify.d.ts b/node_modules/agent-base/dist/src/promisify.d.ts new file mode 100644 index 0000000..0268869 --- /dev/null +++ b/node_modules/agent-base/dist/src/promisify.d.ts @@ -0,0 +1,4 @@ +import { ClientRequest, RequestOptions, AgentCallbackCallback, AgentCallbackPromise } from './index'; +declare type LegacyCallback = (req: ClientRequest, opts: RequestOptions, fn: AgentCallbackCallback) => void; +export default function promisify(fn: LegacyCallback): AgentCallbackPromise; +export {}; diff --git a/node_modules/agent-base/dist/src/promisify.js b/node_modules/agent-base/dist/src/promisify.js new file mode 100644 index 0000000..b2f6132 --- /dev/null +++ b/node_modules/agent-base/dist/src/promisify.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +function promisify(fn) { + return function (req, opts) { + return new Promise((resolve, reject) => { + fn.call(this, req, opts, (err, rtn) => { + if (err) { + reject(err); + } + else { + resolve(rtn); + } + }); + }); + }; +} +exports.default = promisify; +//# sourceMappingURL=promisify.js.map \ No newline at end of file diff --git a/node_modules/agent-base/dist/src/promisify.js.map b/node_modules/agent-base/dist/src/promisify.js.map new file mode 100644 index 0000000..4bff9bf --- /dev/null +++ b/node_modules/agent-base/dist/src/promisify.js.map @@ -0,0 +1 @@ +{"version":3,"file":"promisify.js","sourceRoot":"","sources":["../../src/promisify.ts"],"names":[],"mappings":";;AAeA,SAAwB,SAAS,CAAC,EAAkB;IACnD,OAAO,UAAsB,GAAkB,EAAE,IAAoB;QACpE,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;YACtC,EAAE,CAAC,IAAI,CACN,IAAI,EACJ,GAAG,EACH,IAAI,EACJ,CAAC,GAA6B,EAAE,GAAyB,EAAE,EAAE;gBAC5D,IAAI,GAAG,EAAE;oBACR,MAAM,CAAC,GAAG,CAAC,CAAC;iBACZ;qBAAM;oBACN,OAAO,CAAC,GAAG,CAAC,CAAC;iBACb;YACF,CAAC,CACD,CAAC;QACH,CAAC,CAAC,CAAC;IACJ,CAAC,CAAC;AACH,CAAC;AAjBD,4BAiBC"} \ No newline at end of file diff --git a/node_modules/agent-base/node_modules/debug/LICENSE b/node_modules/agent-base/node_modules/debug/LICENSE new file mode 100644 index 0000000..1a9820e --- /dev/null +++ b/node_modules/agent-base/node_modules/debug/LICENSE @@ -0,0 +1,20 @@ +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/agent-base/node_modules/debug/README.md b/node_modules/agent-base/node_modules/debug/README.md new file mode 100644 index 0000000..9ebdfbf --- /dev/null +++ b/node_modules/agent-base/node_modules/debug/README.md @@ -0,0 +1,481 @@ +# debug +[![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny JavaScript debugging utility modelled after Node.js core's debugging +technique. Works in Node.js and web browsers. + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example [_app.js_](./examples/node/app.js): + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %o', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example [_worker.js_](./examples/node/worker.js): + +```js +var a = require('debug')('worker:a') + , b = require('debug')('worker:b'); + +function work() { + a('doing lots of uninteresting work'); + setTimeout(work, Math.random() * 1000); +} + +work(); + +function workb() { + b('doing some work'); + setTimeout(workb, Math.random() * 2000); +} + +workb(); +``` + +The `DEBUG` environment variable is then used to enable these based on space or +comma-delimited names. + +Here are some examples: + +screen shot 2017-08-08 at 12 53 04 pm +screen shot 2017-08-08 at 12 53 38 pm +screen shot 2017-08-08 at 12 53 25 pm + +#### Windows command prompt notes + +##### CMD + +On Windows the environment variable is set using the `set` command. + +```cmd +set DEBUG=*,-not_this +``` + +Example: + +```cmd +set DEBUG=* & node app.js +``` + +##### PowerShell (VS Code default) + +PowerShell uses different syntax to set environment variables. + +```cmd +$env:DEBUG = "*,-not_this" +``` + +Example: + +```cmd +$env:DEBUG='app';node app.js +``` + +Then, run the program to be debugged as usual. + +npm script example: +```js + "windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js", +``` + +## Namespace Colors + +Every debug instance has a color generated for it based on its namespace name. +This helps when visually parsing the debug output to identify which debug instance +a debug line belongs to. + +#### Node.js + +In Node.js, colors are enabled when stderr is a TTY. You also _should_ install +the [`supports-color`](https://npmjs.org/supports-color) module alongside debug, +otherwise debug will only use a small handful of basic colors. + + + +#### Web Browser + +Colors are also enabled on "Web Inspectors" that understand the `%c` formatting +option. These are WebKit web inspectors, Firefox ([since version +31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) +and the Firebug plugin for Firefox (any version). + + + + +## Millisecond diff + +When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + + +When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below: + + + + +## Conventions + +If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output. + +## Wildcards + +The `*` character may be used as a wildcard. Suppose for example your library has +debuggers named "connect:bodyParser", "connect:compress", "connect:session", +instead of listing all three with +`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do +`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + +You can also exclude specific debuggers by prefixing them with a "-" character. +For example, `DEBUG=*,-connect:*` would include all debuggers except those +starting with "connect:". + +## Environment Variables + +When running through Node.js, you can set a few environment variables that will +change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + +__Note:__ The environment variables beginning with `DEBUG_` end up being +converted into an Options object that gets used with `%o`/`%O` formatters. +See the Node.js documentation for +[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) +for the complete list. + +## Formatters + +Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. +Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + + +### Custom formatters + +You can add custom formatters by extending the `debug.formatters` object. +For example, if you wanted to add support for rendering a Buffer as hex with +`%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + + +## Browser Support + +You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), +or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), +if you don't want to build it yourself. + +Debug's enable state is currently persisted by `localStorage`. +Consider the situation shown below where you have `worker:a` and `worker:b`, +and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + +In Chromium-based web browsers (e.g. Brave, Chrome, and Electron), the JavaScript console will—by default—only show messages logged by `debug` if the "Verbose" log level is _enabled_. + + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example [_stdout.js_](./examples/node/stdout.js): + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + +## Extend +You can simply extend debugger +```js +const log = require('debug')('auth'); + +//creates new debug instance with extended namespace +const logSign = log.extend('sign'); +const logLogin = log.extend('login'); + +log('hello'); // auth hello +logSign('hello'); //auth:sign hello +logLogin('hello'); //auth:login hello +``` + +## Set dynamically + +You can also enable debug dynamically by calling the `enable()` method : + +```js +let debug = require('debug'); + +console.log(1, debug.enabled('test')); + +debug.enable('test'); +console.log(2, debug.enabled('test')); + +debug.disable(); +console.log(3, debug.enabled('test')); + +``` + +print : +``` +1 false +2 true +3 false +``` + +Usage : +`enable(namespaces)` +`namespaces` can include modes separated by a colon and wildcards. + +Note that calling `enable()` completely overrides previously set DEBUG variable : + +``` +$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))' +=> false +``` + +`disable()` + +Will disable all namespaces. The functions returns the namespaces currently +enabled (and skipped). This can be useful if you want to disable debugging +temporarily without knowing what was enabled to begin with. + +For example: + +```js +let debug = require('debug'); +debug.enable('foo:*,-foo:bar'); +let namespaces = debug.disable(); +debug.enable(namespaces); +``` + +Note: There is no guarantee that the string will be identical to the initial +enable string, but semantically they will be identical. + +## Checking whether a debug target is enabled + +After you've created a debug instance, you can determine whether or not it is +enabled by checking the `enabled` property: + +```javascript +const debug = require('debug')('http'); + +if (debug.enabled) { + // do stuff... +} +``` + +You can also manually toggle this property to force the debug instance to be +enabled or disabled. + +## Usage in child processes + +Due to the way `debug` detects if the output is a TTY or not, colors are not shown in child processes when `stderr` is piped. A solution is to pass the `DEBUG_COLORS=1` environment variable to the child process. +For example: + +```javascript +worker = fork(WORKER_WRAP_PATH, [workerPath], { + stdio: [ + /* stdin: */ 0, + /* stdout: */ 'pipe', + /* stderr: */ 'pipe', + 'ipc', + ], + env: Object.assign({}, process.env, { + DEBUG_COLORS: 1 // without this settings, colors won't be shown + }), +}); + +worker.stderr.pipe(process.stderr, { end: false }); +``` + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + - Josh Junon + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca> +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/agent-base/node_modules/debug/package.json b/node_modules/agent-base/node_modules/debug/package.json new file mode 100644 index 0000000..ee8abb5 --- /dev/null +++ b/node_modules/agent-base/node_modules/debug/package.json @@ -0,0 +1,64 @@ +{ + "name": "debug", + "version": "4.4.3", + "repository": { + "type": "git", + "url": "git://github.com/debug-js/debug.git" + }, + "description": "Lightweight debugging utility for Node.js and the browser", + "keywords": [ + "debug", + "log", + "debugger" + ], + "files": [ + "src", + "LICENSE", + "README.md" + ], + "author": "Josh Junon (https://github.com/qix-)", + "contributors": [ + "TJ Holowaychuk ", + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "scripts": { + "lint": "xo", + "test": "npm run test:node && npm run test:browser && npm run lint", + "test:node": "mocha test.js test.node.js", + "test:browser": "karma start --single-run", + "test:coverage": "cat ./coverage/lcov.info | coveralls" + }, + "dependencies": { + "ms": "^2.1.3" + }, + "devDependencies": { + "brfs": "^2.0.1", + "browserify": "^16.2.3", + "coveralls": "^3.0.2", + "karma": "^3.1.4", + "karma-browserify": "^6.0.0", + "karma-chrome-launcher": "^2.2.0", + "karma-mocha": "^1.3.0", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "sinon": "^14.0.0", + "xo": "^0.23.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "engines": { + "node": ">=6.0" + }, + "xo": { + "rules": { + "import/extensions": "off" + } + } +} diff --git a/node_modules/agent-base/node_modules/debug/src/browser.js b/node_modules/agent-base/node_modules/debug/src/browser.js new file mode 100644 index 0000000..5993451 --- /dev/null +++ b/node_modules/agent-base/node_modules/debug/src/browser.js @@ -0,0 +1,272 @@ +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + let m; + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + // eslint-disable-next-line no-return-assign + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && (m = navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)) && parseInt(m[1], 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug') || exports.storage.getItem('DEBUG') ; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; diff --git a/node_modules/agent-base/node_modules/debug/src/common.js b/node_modules/agent-base/node_modules/debug/src/common.js new file mode 100644 index 0000000..141cb57 --- /dev/null +++ b/node_modules/agent-base/node_modules/debug/src/common.js @@ -0,0 +1,292 @@ + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + const split = (typeof namespaces === 'string' ? namespaces : '') + .trim() + .replace(/\s+/g, ',') + .split(',') + .filter(Boolean); + + for (const ns of split) { + if (ns[0] === '-') { + createDebug.skips.push(ns.slice(1)); + } else { + createDebug.names.push(ns); + } + } + } + + /** + * Checks if the given string matches a namespace template, honoring + * asterisks as wildcards. + * + * @param {String} search + * @param {String} template + * @return {Boolean} + */ + function matchesTemplate(search, template) { + let searchIndex = 0; + let templateIndex = 0; + let starIndex = -1; + let matchIndex = 0; + + while (searchIndex < search.length) { + if (templateIndex < template.length && (template[templateIndex] === search[searchIndex] || template[templateIndex] === '*')) { + // Match character or proceed with wildcard + if (template[templateIndex] === '*') { + starIndex = templateIndex; + matchIndex = searchIndex; + templateIndex++; // Skip the '*' + } else { + searchIndex++; + templateIndex++; + } + } else if (starIndex !== -1) { // eslint-disable-line no-negated-condition + // Backtrack to the last '*' and try to match more characters + templateIndex = starIndex + 1; + matchIndex++; + searchIndex = matchIndex; + } else { + return false; // No match + } + } + + // Handle trailing '*' in template + while (templateIndex < template.length && template[templateIndex] === '*') { + templateIndex++; + } + + return templateIndex === template.length; + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names, + ...createDebug.skips.map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + for (const skip of createDebug.skips) { + if (matchesTemplate(name, skip)) { + return false; + } + } + + for (const ns of createDebug.names) { + if (matchesTemplate(name, ns)) { + return true; + } + } + + return false; + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; diff --git a/node_modules/agent-base/node_modules/debug/src/index.js b/node_modules/agent-base/node_modules/debug/src/index.js new file mode 100644 index 0000000..bf4c57f --- /dev/null +++ b/node_modules/agent-base/node_modules/debug/src/index.js @@ -0,0 +1,10 @@ +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} diff --git a/node_modules/agent-base/node_modules/debug/src/node.js b/node_modules/agent-base/node_modules/debug/src/node.js new file mode 100644 index 0000000..715560a --- /dev/null +++ b/node_modules/agent-base/node_modules/debug/src/node.js @@ -0,0 +1,263 @@ +/** + * Module dependencies. + */ + +const tty = require('tty'); +const util = require('util'); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = require('supports-color'); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.formatWithOptions()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.formatWithOptions(exports.inspectOpts, ...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; diff --git a/node_modules/agent-base/node_modules/ms/index.js b/node_modules/agent-base/node_modules/ms/index.js new file mode 100644 index 0000000..ea734fb --- /dev/null +++ b/node_modules/agent-base/node_modules/ms/index.js @@ -0,0 +1,162 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function (val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} diff --git a/node_modules/agent-base/node_modules/ms/license.md b/node_modules/agent-base/node_modules/ms/license.md new file mode 100644 index 0000000..fa5d39b --- /dev/null +++ b/node_modules/agent-base/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Vercel, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/agent-base/node_modules/ms/package.json b/node_modules/agent-base/node_modules/ms/package.json new file mode 100644 index 0000000..4997189 --- /dev/null +++ b/node_modules/agent-base/node_modules/ms/package.json @@ -0,0 +1,38 @@ +{ + "name": "ms", + "version": "2.1.3", + "description": "Tiny millisecond conversion utility", + "repository": "vercel/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "4.18.2", + "expect.js": "0.3.1", + "husky": "0.14.3", + "lint-staged": "5.0.0", + "mocha": "4.0.1", + "prettier": "2.0.5" + } +} diff --git a/node_modules/agent-base/node_modules/ms/readme.md b/node_modules/agent-base/node_modules/ms/readme.md new file mode 100644 index 0000000..0fc1abb --- /dev/null +++ b/node_modules/agent-base/node_modules/ms/readme.md @@ -0,0 +1,59 @@ +# ms + +![CI](https://github.com/vercel/ms/workflows/CI/badge.svg) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +ms('-3 days') // -259200000 +ms('-1h') // -3600000 +ms('-200') // -200 +``` + +### Convert from Milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(-3 * 60000) // "-3m" +ms(ms('10 hours')) // "10h" +``` + +### Time Format Written-Out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(-3 * 60000, { long: true }) // "-3 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [Node.js](https://nodejs.org) and in the browser +- If a number is supplied to `ms`, a string with a unit is returned +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) +- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned + +## Related Packages + +- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. + +## Caught a Bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/agent-base/package.json b/node_modules/agent-base/package.json new file mode 100644 index 0000000..fadce3a --- /dev/null +++ b/node_modules/agent-base/package.json @@ -0,0 +1,64 @@ +{ + "name": "agent-base", + "version": "6.0.2", + "description": "Turn a function into an `http.Agent` instance", + "main": "dist/src/index", + "typings": "dist/src/index", + "files": [ + "dist/src", + "src" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "postbuild": "cpy --parents src test '!**/*.ts' dist", + "test": "mocha --reporter spec dist/test/*.js", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-agent-base.git" + }, + "keywords": [ + "http", + "agent", + "base", + "barebones", + "https" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/node-agent-base/issues" + }, + "dependencies": { + "debug": "4" + }, + "devDependencies": { + "@types/debug": "4", + "@types/mocha": "^5.2.7", + "@types/node": "^14.0.20", + "@types/semver": "^7.1.0", + "@types/ws": "^6.0.3", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "async-listen": "^1.2.0", + "cpy-cli": "^2.0.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.0", + "rimraf": "^3.0.0", + "semver": "^7.1.2", + "typescript": "^3.5.3", + "ws": "^3.0.0" + }, + "engines": { + "node": ">= 6.0.0" + } +} diff --git a/node_modules/agent-base/src/index.ts b/node_modules/agent-base/src/index.ts new file mode 100644 index 0000000..a47ccd4 --- /dev/null +++ b/node_modules/agent-base/src/index.ts @@ -0,0 +1,345 @@ +import net from 'net'; +import http from 'http'; +import https from 'https'; +import { Duplex } from 'stream'; +import { EventEmitter } from 'events'; +import createDebug from 'debug'; +import promisify from './promisify'; + +const debug = createDebug('agent-base'); + +function isAgent(v: any): v is createAgent.AgentLike { + return Boolean(v) && typeof v.addRequest === 'function'; +} + +function isSecureEndpoint(): boolean { + const { stack } = new Error(); + if (typeof stack !== 'string') return false; + return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); +} + +function createAgent(opts?: createAgent.AgentOptions): createAgent.Agent; +function createAgent( + callback: createAgent.AgentCallback, + opts?: createAgent.AgentOptions +): createAgent.Agent; +function createAgent( + callback?: createAgent.AgentCallback | createAgent.AgentOptions, + opts?: createAgent.AgentOptions +) { + return new createAgent.Agent(callback, opts); +} + +namespace createAgent { + export interface ClientRequest extends http.ClientRequest { + _last?: boolean; + _hadError?: boolean; + method: string; + } + + export interface AgentRequestOptions { + host?: string; + path?: string; + // `port` on `http.RequestOptions` can be a string or undefined, + // but `net.TcpNetConnectOpts` expects only a number + port: number; + } + + export interface HttpRequestOptions + extends AgentRequestOptions, + Omit { + secureEndpoint: false; + } + + export interface HttpsRequestOptions + extends AgentRequestOptions, + Omit { + secureEndpoint: true; + } + + export type RequestOptions = HttpRequestOptions | HttpsRequestOptions; + + export type AgentLike = Pick | http.Agent; + + export type AgentCallbackReturn = Duplex | AgentLike; + + export type AgentCallbackCallback = ( + err?: Error | null, + socket?: createAgent.AgentCallbackReturn + ) => void; + + export type AgentCallbackPromise = ( + req: createAgent.ClientRequest, + opts: createAgent.RequestOptions + ) => + | createAgent.AgentCallbackReturn + | Promise; + + export type AgentCallback = typeof Agent.prototype.callback; + + export type AgentOptions = { + timeout?: number; + }; + + /** + * Base `http.Agent` implementation. + * No pooling/keep-alive is implemented by default. + * + * @param {Function} callback + * @api public + */ + export class Agent extends EventEmitter { + public timeout: number | null; + public maxFreeSockets: number; + public maxTotalSockets: number; + public maxSockets: number; + public sockets: { + [key: string]: net.Socket[]; + }; + public freeSockets: { + [key: string]: net.Socket[]; + }; + public requests: { + [key: string]: http.IncomingMessage[]; + }; + public options: https.AgentOptions; + private promisifiedCallback?: createAgent.AgentCallbackPromise; + private explicitDefaultPort?: number; + private explicitProtocol?: string; + + constructor( + callback?: createAgent.AgentCallback | createAgent.AgentOptions, + _opts?: createAgent.AgentOptions + ) { + super(); + + let opts = _opts; + if (typeof callback === 'function') { + this.callback = callback; + } else if (callback) { + opts = callback; + } + + // Timeout for the socket to be returned from the callback + this.timeout = null; + if (opts && typeof opts.timeout === 'number') { + this.timeout = opts.timeout; + } + + // These aren't actually used by `agent-base`, but are required + // for the TypeScript definition files in `@types/node` :/ + this.maxFreeSockets = 1; + this.maxSockets = 1; + this.maxTotalSockets = Infinity; + this.sockets = {}; + this.freeSockets = {}; + this.requests = {}; + this.options = {}; + } + + get defaultPort(): number { + if (typeof this.explicitDefaultPort === 'number') { + return this.explicitDefaultPort; + } + return isSecureEndpoint() ? 443 : 80; + } + + set defaultPort(v: number) { + this.explicitDefaultPort = v; + } + + get protocol(): string { + if (typeof this.explicitProtocol === 'string') { + return this.explicitProtocol; + } + return isSecureEndpoint() ? 'https:' : 'http:'; + } + + set protocol(v: string) { + this.explicitProtocol = v; + } + + callback( + req: createAgent.ClientRequest, + opts: createAgent.RequestOptions, + fn: createAgent.AgentCallbackCallback + ): void; + callback( + req: createAgent.ClientRequest, + opts: createAgent.RequestOptions + ): + | createAgent.AgentCallbackReturn + | Promise; + callback( + req: createAgent.ClientRequest, + opts: createAgent.AgentOptions, + fn?: createAgent.AgentCallbackCallback + ): + | createAgent.AgentCallbackReturn + | Promise + | void { + throw new Error( + '"agent-base" has no default implementation, you must subclass and override `callback()`' + ); + } + + /** + * Called by node-core's "_http_client.js" module when creating + * a new HTTP request with this Agent instance. + * + * @api public + */ + addRequest(req: ClientRequest, _opts: RequestOptions): void { + const opts: RequestOptions = { ..._opts }; + + if (typeof opts.secureEndpoint !== 'boolean') { + opts.secureEndpoint = isSecureEndpoint(); + } + + if (opts.host == null) { + opts.host = 'localhost'; + } + + if (opts.port == null) { + opts.port = opts.secureEndpoint ? 443 : 80; + } + + if (opts.protocol == null) { + opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; + } + + if (opts.host && opts.path) { + // If both a `host` and `path` are specified then it's most + // likely the result of a `url.parse()` call... we need to + // remove the `path` portion so that `net.connect()` doesn't + // attempt to open that as a unix socket file. + delete opts.path; + } + + delete opts.agent; + delete opts.hostname; + delete opts._defaultAgent; + delete opts.defaultPort; + delete opts.createConnection; + + // Hint to use "Connection: close" + // XXX: non-documented `http` module API :( + req._last = true; + req.shouldKeepAlive = false; + + let timedOut = false; + let timeoutId: ReturnType | null = null; + const timeoutMs = opts.timeout || this.timeout; + + const onerror = (err: NodeJS.ErrnoException) => { + if (req._hadError) return; + req.emit('error', err); + // For Safety. Some additional errors might fire later on + // and we need to make sure we don't double-fire the error event. + req._hadError = true; + }; + + const ontimeout = () => { + timeoutId = null; + timedOut = true; + const err: NodeJS.ErrnoException = new Error( + `A "socket" was not created for HTTP request before ${timeoutMs}ms` + ); + err.code = 'ETIMEOUT'; + onerror(err); + }; + + const callbackError = (err: NodeJS.ErrnoException) => { + if (timedOut) return; + if (timeoutId !== null) { + clearTimeout(timeoutId); + timeoutId = null; + } + onerror(err); + }; + + const onsocket = (socket: AgentCallbackReturn) => { + if (timedOut) return; + if (timeoutId != null) { + clearTimeout(timeoutId); + timeoutId = null; + } + + if (isAgent(socket)) { + // `socket` is actually an `http.Agent` instance, so + // relinquish responsibility for this `req` to the Agent + // from here on + debug( + 'Callback returned another Agent instance %o', + socket.constructor.name + ); + (socket as createAgent.Agent).addRequest(req, opts); + return; + } + + if (socket) { + socket.once('free', () => { + this.freeSocket(socket as net.Socket, opts); + }); + req.onSocket(socket as net.Socket); + return; + } + + const err = new Error( + `no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\`` + ); + onerror(err); + }; + + if (typeof this.callback !== 'function') { + onerror(new Error('`callback` is not defined')); + return; + } + + if (!this.promisifiedCallback) { + if (this.callback.length >= 3) { + debug('Converting legacy callback function to promise'); + this.promisifiedCallback = promisify(this.callback); + } else { + this.promisifiedCallback = this.callback; + } + } + + if (typeof timeoutMs === 'number' && timeoutMs > 0) { + timeoutId = setTimeout(ontimeout, timeoutMs); + } + + if ('port' in opts && typeof opts.port !== 'number') { + opts.port = Number(opts.port); + } + + try { + debug( + 'Resolving socket for %o request: %o', + opts.protocol, + `${req.method} ${req.path}` + ); + Promise.resolve(this.promisifiedCallback(req, opts)).then( + onsocket, + callbackError + ); + } catch (err) { + Promise.reject(err).catch(callbackError); + } + } + + freeSocket(socket: net.Socket, opts: AgentOptions) { + debug('Freeing socket %o %o', socket.constructor.name, opts); + socket.destroy(); + } + + destroy() { + debug('Destroying agent %o', this.constructor.name); + } + } + + // So that `instanceof` works correctly + createAgent.prototype = createAgent.Agent.prototype; +} + +export = createAgent; diff --git a/node_modules/agent-base/src/promisify.ts b/node_modules/agent-base/src/promisify.ts new file mode 100644 index 0000000..60cc662 --- /dev/null +++ b/node_modules/agent-base/src/promisify.ts @@ -0,0 +1,33 @@ +import { + Agent, + ClientRequest, + RequestOptions, + AgentCallbackCallback, + AgentCallbackPromise, + AgentCallbackReturn +} from './index'; + +type LegacyCallback = ( + req: ClientRequest, + opts: RequestOptions, + fn: AgentCallbackCallback +) => void; + +export default function promisify(fn: LegacyCallback): AgentCallbackPromise { + return function(this: Agent, req: ClientRequest, opts: RequestOptions) { + return new Promise((resolve, reject) => { + fn.call( + this, + req, + opts, + (err: Error | null | undefined, rtn?: AgentCallbackReturn) => { + if (err) { + reject(err); + } else { + resolve(rtn); + } + } + ); + }); + }; +} diff --git a/node_modules/ansi-regex/index.d.ts b/node_modules/ansi-regex/index.d.ts new file mode 100644 index 0000000..2dbf6af --- /dev/null +++ b/node_modules/ansi-regex/index.d.ts @@ -0,0 +1,37 @@ +declare namespace ansiRegex { + interface Options { + /** + Match only the first ANSI escape. + + @default false + */ + onlyFirst: boolean; + } +} + +/** +Regular expression for matching ANSI escape codes. + +@example +``` +import ansiRegex = require('ansi-regex'); + +ansiRegex().test('\u001B[4mcake\u001B[0m'); +//=> true + +ansiRegex().test('cake'); +//=> false + +'\u001B[4mcake\u001B[0m'.match(ansiRegex()); +//=> ['\u001B[4m', '\u001B[0m'] + +'\u001B[4mcake\u001B[0m'.match(ansiRegex({onlyFirst: true})); +//=> ['\u001B[4m'] + +'\u001B]8;;https://github.com\u0007click\u001B]8;;\u0007'.match(ansiRegex()); +//=> ['\u001B]8;;https://github.com\u0007', '\u001B]8;;\u0007'] +``` +*/ +declare function ansiRegex(options?: ansiRegex.Options): RegExp; + +export = ansiRegex; diff --git a/node_modules/ansi-regex/index.js b/node_modules/ansi-regex/index.js new file mode 100644 index 0000000..616ff83 --- /dev/null +++ b/node_modules/ansi-regex/index.js @@ -0,0 +1,10 @@ +'use strict'; + +module.exports = ({onlyFirst = false} = {}) => { + const pattern = [ + '[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)', + '(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))' + ].join('|'); + + return new RegExp(pattern, onlyFirst ? undefined : 'g'); +}; diff --git a/node_modules/ansi-regex/license b/node_modules/ansi-regex/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/ansi-regex/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/ansi-regex/package.json b/node_modules/ansi-regex/package.json new file mode 100644 index 0000000..017f531 --- /dev/null +++ b/node_modules/ansi-regex/package.json @@ -0,0 +1,55 @@ +{ + "name": "ansi-regex", + "version": "5.0.1", + "description": "Regular expression for matching ANSI escape codes", + "license": "MIT", + "repository": "chalk/ansi-regex", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd", + "view-supported": "node fixtures/view-codes.js" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "cli", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "command-line", + "text", + "regex", + "regexp", + "re", + "match", + "test", + "find", + "pattern" + ], + "devDependencies": { + "ava": "^2.4.0", + "tsd": "^0.9.0", + "xo": "^0.25.3" + } +} diff --git a/node_modules/ansi-regex/readme.md b/node_modules/ansi-regex/readme.md new file mode 100644 index 0000000..4d848bc --- /dev/null +++ b/node_modules/ansi-regex/readme.md @@ -0,0 +1,78 @@ +# ansi-regex + +> Regular expression for matching [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) + + +## Install + +``` +$ npm install ansi-regex +``` + + +## Usage + +```js +const ansiRegex = require('ansi-regex'); + +ansiRegex().test('\u001B[4mcake\u001B[0m'); +//=> true + +ansiRegex().test('cake'); +//=> false + +'\u001B[4mcake\u001B[0m'.match(ansiRegex()); +//=> ['\u001B[4m', '\u001B[0m'] + +'\u001B[4mcake\u001B[0m'.match(ansiRegex({onlyFirst: true})); +//=> ['\u001B[4m'] + +'\u001B]8;;https://github.com\u0007click\u001B]8;;\u0007'.match(ansiRegex()); +//=> ['\u001B]8;;https://github.com\u0007', '\u001B]8;;\u0007'] +``` + + +## API + +### ansiRegex(options?) + +Returns a regex for matching ANSI escape codes. + +#### options + +Type: `object` + +##### onlyFirst + +Type: `boolean`
+Default: `false` *(Matches any ANSI escape codes in a string)* + +Match only the first ANSI escape. + + +## FAQ + +### Why do you test for codes not in the ECMA 48 standard? + +Some of the codes we run as a test are codes that we acquired finding various lists of non-standard or manufacturer specific codes. We test for both standard and non-standard codes, as most of them follow the same or similar format and can be safely matched in strings without the risk of removing actual string content. There are a few non-standard control codes that do not follow the traditional format (i.e. they end in numbers) thus forcing us to exclude them from the test because we cannot reliably match them. + +On the historical side, those ECMA standards were established in the early 90's whereas the VT100, for example, was designed in the mid/late 70's. At that point in time, control codes were still pretty ungoverned and engineers used them for a multitude of things, namely to activate hardware ports that may have been proprietary. Somewhere else you see a similar 'anarchy' of codes is in the x86 architecture for processors; there are a ton of "interrupts" that can mean different things on certain brands of processors, most of which have been phased out. + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + + +--- + +

+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/aproba/LICENSE b/node_modules/aproba/LICENSE new file mode 100644 index 0000000..f4be44d --- /dev/null +++ b/node_modules/aproba/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/aproba/README.md b/node_modules/aproba/README.md new file mode 100644 index 0000000..0bfc594 --- /dev/null +++ b/node_modules/aproba/README.md @@ -0,0 +1,94 @@ +aproba +====== + +A ridiculously light-weight function argument validator + +``` +var validate = require("aproba") + +function myfunc(a, b, c) { + // `a` must be a string, `b` a number, `c` a function + validate('SNF', arguments) // [a,b,c] is also valid +} + +myfunc('test', 23, function () {}) // ok +myfunc(123, 23, function () {}) // type error +myfunc('test', 23) // missing arg error +myfunc('test', 23, function () {}, true) // too many args error + +``` + +Valid types are: + +| type | description +| :--: | :---------- +| * | matches any type +| A | `Array.isArray` OR an `arguments` object +| S | typeof == string +| N | typeof == number +| F | typeof == function +| O | typeof == object and not type A and not type E +| B | typeof == boolean +| E | `instanceof Error` OR `null` **(special: see below)** +| Z | == `null` + +Validation failures throw one of three exception types, distinguished by a +`code` property of `EMISSINGARG`, `EINVALIDTYPE` or `ETOOMANYARGS`. + +If you pass in an invalid type then it will throw with a code of +`EUNKNOWNTYPE`. + +If an **error** argument is found and is not null then the remaining +arguments are optional. That is, if you say `ESO` then that's like using a +non-magical `E` in: `E|ESO|ZSO`. + +### But I have optional arguments?! + +You can provide more than one signature by separating them with pipes `|`. +If any signature matches the arguments then they'll be considered valid. + +So for example, say you wanted to write a signature for +`fs.createWriteStream`. The docs for it describe it thusly: + +``` +fs.createWriteStream(path[, options]) +``` + +This would be a signature of `SO|S`. That is, a string and and object, or +just a string. + +Now, if you read the full `fs` docs, you'll see that actually path can ALSO +be a buffer. And options can be a string, that is: +``` +path | +options | +``` + +To reproduce this you have to fully enumerate all of the possible +combinations and that implies a signature of `SO|SS|OO|OS|S|O`. The +awkwardness is a feature: It reminds you of the complexity you're adding to +your API when you do this sort of thing. + + +### Browser support + +This has no dependencies and should work in browsers, though you'll have +noisier stack traces. + +### Why this exists + +I wanted a very simple argument validator. It needed to do two things: + +1. Be more concise and easier to use than assertions + +2. Not encourage an infinite bikeshed of DSLs + +This is why types are specified by a single character and there's no such +thing as an optional argument. + +This is not intended to validate user data. This is specifically about +asserting the interface of your functions. + +If you need greater validation, I encourage you to write them by hand or +look elsewhere. + diff --git a/node_modules/aproba/index.js b/node_modules/aproba/index.js new file mode 100644 index 0000000..5a58e81 --- /dev/null +++ b/node_modules/aproba/index.js @@ -0,0 +1,105 @@ +'use strict' +module.exports = validate + +function isArguments (thingy) { + return thingy != null && typeof thingy === 'object' && thingy.hasOwnProperty('callee') +} + +const types = { + '*': {label: 'any', check: () => true}, + A: {label: 'array', check: _ => Array.isArray(_) || isArguments(_)}, + S: {label: 'string', check: _ => typeof _ === 'string'}, + N: {label: 'number', check: _ => typeof _ === 'number'}, + F: {label: 'function', check: _ => typeof _ === 'function'}, + O: {label: 'object', check: _ => typeof _ === 'object' && _ != null && !types.A.check(_) && !types.E.check(_)}, + B: {label: 'boolean', check: _ => typeof _ === 'boolean'}, + E: {label: 'error', check: _ => _ instanceof Error}, + Z: {label: 'null', check: _ => _ == null} +} + +function addSchema (schema, arity) { + const group = arity[schema.length] = arity[schema.length] || [] + if (group.indexOf(schema) === -1) group.push(schema) +} + +function validate (rawSchemas, args) { + if (arguments.length !== 2) throw wrongNumberOfArgs(['SA'], arguments.length) + if (!rawSchemas) throw missingRequiredArg(0, 'rawSchemas') + if (!args) throw missingRequiredArg(1, 'args') + if (!types.S.check(rawSchemas)) throw invalidType(0, ['string'], rawSchemas) + if (!types.A.check(args)) throw invalidType(1, ['array'], args) + const schemas = rawSchemas.split('|') + const arity = {} + + schemas.forEach(schema => { + for (let ii = 0; ii < schema.length; ++ii) { + const type = schema[ii] + if (!types[type]) throw unknownType(ii, type) + } + if (/E.*E/.test(schema)) throw moreThanOneError(schema) + addSchema(schema, arity) + if (/E/.test(schema)) { + addSchema(schema.replace(/E.*$/, 'E'), arity) + addSchema(schema.replace(/E/, 'Z'), arity) + if (schema.length === 1) addSchema('', arity) + } + }) + let matching = arity[args.length] + if (!matching) { + throw wrongNumberOfArgs(Object.keys(arity), args.length) + } + for (let ii = 0; ii < args.length; ++ii) { + let newMatching = matching.filter(schema => { + const type = schema[ii] + const typeCheck = types[type].check + return typeCheck(args[ii]) + }) + if (!newMatching.length) { + const labels = matching.map(_ => types[_[ii]].label).filter(_ => _ != null) + throw invalidType(ii, labels, args[ii]) + } + matching = newMatching + } +} + +function missingRequiredArg (num) { + return newException('EMISSINGARG', 'Missing required argument #' + (num + 1)) +} + +function unknownType (num, type) { + return newException('EUNKNOWNTYPE', 'Unknown type ' + type + ' in argument #' + (num + 1)) +} + +function invalidType (num, expectedTypes, value) { + let valueType + Object.keys(types).forEach(typeCode => { + if (types[typeCode].check(value)) valueType = types[typeCode].label + }) + return newException('EINVALIDTYPE', 'Argument #' + (num + 1) + ': Expected ' + + englishList(expectedTypes) + ' but got ' + valueType) +} + +function englishList (list) { + return list.join(', ').replace(/, ([^,]+)$/, ' or $1') +} + +function wrongNumberOfArgs (expected, got) { + const english = englishList(expected) + const args = expected.every(ex => ex.length === 1) + ? 'argument' + : 'arguments' + return newException('EWRONGARGCOUNT', 'Expected ' + english + ' ' + args + ' but got ' + got) +} + +function moreThanOneError (schema) { + return newException('ETOOMANYERRORTYPES', + 'Only one error type per argument signature is allowed, more than one found in "' + schema + '"') +} + +function newException (code, msg) { + const err = new TypeError(msg) + err.code = code + /* istanbul ignore else */ + if (Error.captureStackTrace) Error.captureStackTrace(err, validate) + return err +} diff --git a/node_modules/aproba/package.json b/node_modules/aproba/package.json new file mode 100644 index 0000000..71c7fca --- /dev/null +++ b/node_modules/aproba/package.json @@ -0,0 +1,35 @@ +{ + "name": "aproba", + "version": "2.1.0", + "description": "A ridiculously light-weight argument validator (now browser friendly)", + "main": "index.js", + "directories": { + "test": "test" + }, + "dependencies": {}, + "devDependencies": { + "standard": "^11.0.1", + "tap": "^12.0.1" + }, + "files": [ + "index.js" + ], + "scripts": { + "pretest": "standard", + "test": "tap --100 -J test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/aproba" + }, + "keywords": [ + "argument", + "validate" + ], + "author": "Rebecca Turner ", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/aproba/issues" + }, + "homepage": "https://github.com/iarna/aproba" +} diff --git a/node_modules/are-we-there-yet/LICENSE.md b/node_modules/are-we-there-yet/LICENSE.md new file mode 100644 index 0000000..845be76 --- /dev/null +++ b/node_modules/are-we-there-yet/LICENSE.md @@ -0,0 +1,18 @@ +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/are-we-there-yet/README.md b/node_modules/are-we-there-yet/README.md new file mode 100644 index 0000000..caae19b --- /dev/null +++ b/node_modules/are-we-there-yet/README.md @@ -0,0 +1,208 @@ +are-we-there-yet +---------------- + +Track complex hierarchies of asynchronous task completion statuses. This is +intended to give you a way of recording and reporting the progress of the big +recursive fan-out and gather type workflows that are so common in async. + +What you do with this completion data is up to you, but the most common use case is to +feed it to one of the many progress bar modules. + +Most progress bar modules include a rudimentary version of this, but my +needs were more complex. + +Usage +===== + +```javascript +var TrackerGroup = require("are-we-there-yet").TrackerGroup + +var top = new TrackerGroup("program") + +var single = top.newItem("one thing", 100) +single.completeWork(20) + +console.log(top.completed()) // 0.2 + +fs.stat("file", function(er, stat) { + if (er) throw er + var stream = top.newStream("file", stat.size) + console.log(top.completed()) // now 0.1 as single is 50% of the job and is 20% complete + // and 50% * 20% == 10% + fs.createReadStream("file").pipe(stream).on("data", function (chunk) { + // do stuff with chunk + }) + top.on("change", function (name) { + // called each time a chunk is read from "file" + // top.completed() will start at 0.1 and fill up to 0.6 as the file is read + }) +}) +``` + +Shared Methods +============== + +* var completed = tracker.completed() + +Implemented in: `Tracker`, `TrackerGroup`, `TrackerStream` + +Returns the ratio of completed work to work to be done. Range of 0 to 1. + +* tracker.finish() + +Implemented in: `Tracker`, `TrackerGroup` + +Marks the tracker as completed. With a TrackerGroup this marks all of its +components as completed. + +Marks all of the components of this tracker as finished, which in turn means +that `tracker.completed()` for this will now be 1. + +This will result in one or more `change` events being emitted. + +Events +====== + +All tracker objects emit `change` events with the following arguments: + +``` +function (name, completed, tracker) +``` + +`name` is the name of the tracker that originally emitted the event, +or if it didn't have one, the first containing tracker group that had one. + +`completed` is the percent complete (as returned by `tracker.completed()` method). + +`tracker` is the tracker object that you are listening for events on. + +TrackerGroup +============ + +* var tracker = new TrackerGroup(**name**) + + * **name** *(optional)* - The name of this tracker group, used in change + notifications if the component updating didn't have a name. Defaults to undefined. + +Creates a new empty tracker aggregation group. These are trackers whose +completion status is determined by the completion status of other trackers added to this aggregation group. + +Ex. + +```javascript +var tracker = new TrackerGroup("parent") +var foo = tracker.newItem("firstChild", 100) +var bar = tracker.newItem("secondChild", 100) + +foo.finish() +console.log(tracker.completed()) // 0.5 +bar.finish() +console.log(tracker.completed()) // 1 +``` + +* tracker.addUnit(**otherTracker**, **weight**) + + * **otherTracker** - Any of the other are-we-there-yet tracker objects + * **weight** *(optional)* - The weight to give the tracker, defaults to 1. + +Adds the **otherTracker** to this aggregation group. The weight determines +how long you expect this tracker to take to complete in proportion to other +units. So for instance, if you add one tracker with a weight of 1 and +another with a weight of 2, you're saying the second will take twice as long +to complete as the first. As such, the first will account for 33% of the +completion of this tracker and the second will account for the other 67%. + +Returns **otherTracker**. + +* var subGroup = tracker.newGroup(**name**, **weight**) + +The above is exactly equivalent to: + +```javascript + var subGroup = tracker.addUnit(new TrackerGroup(name), weight) +``` + +* var subItem = tracker.newItem(**name**, **todo**, **weight**) + +The above is exactly equivalent to: + +```javascript + var subItem = tracker.addUnit(new Tracker(name, todo), weight) +``` + +* var subStream = tracker.newStream(**name**, **todo**, **weight**) + +The above is exactly equivalent to: + +```javascript + var subStream = tracker.addUnit(new TrackerStream(name, todo), weight) +``` + +* console.log( tracker.debug() ) + +Returns a tree showing the completion of this tracker group and all of its +children, including recursively entering all of the children. + +Tracker +======= + +* var tracker = new Tracker(**name**, **todo**) + + * **name** *(optional)* The name of this counter to report in change + events. Defaults to undefined. + * **todo** *(optional)* The amount of work todo (a number). Defaults to 0. + +Ordinarily these are constructed as a part of a tracker group (via +`newItem`). + +* var completed = tracker.completed() + +Returns the ratio of completed work to work to be done. Range of 0 to 1. If +total work to be done is 0 then it will return 0. + +* tracker.addWork(**todo**) + + * **todo** A number to add to the amount of work to be done. + +Increases the amount of work to be done, thus decreasing the completion +percentage. Triggers a `change` event. + +* tracker.completeWork(**completed**) + + * **completed** A number to add to the work complete + +Increase the amount of work complete, thus increasing the completion percentage. +Will never increase the work completed past the amount of work todo. That is, +percentages > 100% are not allowed. Triggers a `change` event. + +* tracker.finish() + +Marks this tracker as finished, tracker.completed() will now be 1. Triggers +a `change` event. + +TrackerStream +============= + +* var tracker = new TrackerStream(**name**, **size**, **options**) + + * **name** *(optional)* The name of this counter to report in change + events. Defaults to undefined. + * **size** *(optional)* The number of bytes being sent through this stream. + * **options** *(optional)* A hash of stream options + +The tracker stream object is a pass through stream that updates an internal +tracker object each time a block passes through. It's intended to track +downloads, file extraction and other related activities. You use it by piping +your data source into it and then using it as your data source. + +If your data has a length attribute then that's used as the amount of work +completed when the chunk is passed through. If it does not (eg, object +streams) then each chunk counts as completing 1 unit of work, so your size +should be the total number of objects being streamed. + +* tracker.addWork(**todo**) + + * **todo** Increase the expected overall size by **todo** bytes. + +Increases the amount of work to be done, thus decreasing the completion +percentage. Triggers a `change` event. diff --git a/node_modules/are-we-there-yet/lib/index.js b/node_modules/are-we-there-yet/lib/index.js new file mode 100644 index 0000000..57d8743 --- /dev/null +++ b/node_modules/are-we-there-yet/lib/index.js @@ -0,0 +1,4 @@ +'use strict' +exports.TrackerGroup = require('./tracker-group.js') +exports.Tracker = require('./tracker.js') +exports.TrackerStream = require('./tracker-stream.js') diff --git a/node_modules/are-we-there-yet/lib/tracker-base.js b/node_modules/are-we-there-yet/lib/tracker-base.js new file mode 100644 index 0000000..6f43687 --- /dev/null +++ b/node_modules/are-we-there-yet/lib/tracker-base.js @@ -0,0 +1,11 @@ +'use strict' +var EventEmitter = require('events').EventEmitter +var util = require('util') + +var trackerId = 0 +var TrackerBase = module.exports = function (name) { + EventEmitter.call(this) + this.id = ++trackerId + this.name = name +} +util.inherits(TrackerBase, EventEmitter) diff --git a/node_modules/are-we-there-yet/lib/tracker-group.js b/node_modules/are-we-there-yet/lib/tracker-group.js new file mode 100644 index 0000000..9da13f8 --- /dev/null +++ b/node_modules/are-we-there-yet/lib/tracker-group.js @@ -0,0 +1,116 @@ +'use strict' +var util = require('util') +var TrackerBase = require('./tracker-base.js') +var Tracker = require('./tracker.js') +var TrackerStream = require('./tracker-stream.js') + +var TrackerGroup = module.exports = function (name) { + TrackerBase.call(this, name) + this.parentGroup = null + this.trackers = [] + this.completion = {} + this.weight = {} + this.totalWeight = 0 + this.finished = false + this.bubbleChange = bubbleChange(this) +} +util.inherits(TrackerGroup, TrackerBase) + +function bubbleChange (trackerGroup) { + return function (name, completed, tracker) { + trackerGroup.completion[tracker.id] = completed + if (trackerGroup.finished) { + return + } + trackerGroup.emit('change', name || trackerGroup.name, trackerGroup.completed(), trackerGroup) + } +} + +TrackerGroup.prototype.nameInTree = function () { + var names = [] + var from = this + while (from) { + names.unshift(from.name) + from = from.parentGroup + } + return names.join('/') +} + +TrackerGroup.prototype.addUnit = function (unit, weight) { + if (unit.addUnit) { + var toTest = this + while (toTest) { + if (unit === toTest) { + throw new Error( + 'Attempted to add tracker group ' + + unit.name + ' to tree that already includes it ' + + this.nameInTree(this)) + } + toTest = toTest.parentGroup + } + unit.parentGroup = this + } + this.weight[unit.id] = weight || 1 + this.totalWeight += this.weight[unit.id] + this.trackers.push(unit) + this.completion[unit.id] = unit.completed() + unit.on('change', this.bubbleChange) + if (!this.finished) { + this.emit('change', unit.name, this.completion[unit.id], unit) + } + return unit +} + +TrackerGroup.prototype.completed = function () { + if (this.trackers.length === 0) { + return 0 + } + var valPerWeight = 1 / this.totalWeight + var completed = 0 + for (var ii = 0; ii < this.trackers.length; ii++) { + var trackerId = this.trackers[ii].id + completed += + valPerWeight * this.weight[trackerId] * this.completion[trackerId] + } + return completed +} + +TrackerGroup.prototype.newGroup = function (name, weight) { + return this.addUnit(new TrackerGroup(name), weight) +} + +TrackerGroup.prototype.newItem = function (name, todo, weight) { + return this.addUnit(new Tracker(name, todo), weight) +} + +TrackerGroup.prototype.newStream = function (name, todo, weight) { + return this.addUnit(new TrackerStream(name, todo), weight) +} + +TrackerGroup.prototype.finish = function () { + this.finished = true + if (!this.trackers.length) { + this.addUnit(new Tracker(), 1, true) + } + for (var ii = 0; ii < this.trackers.length; ii++) { + var tracker = this.trackers[ii] + tracker.finish() + tracker.removeListener('change', this.bubbleChange) + } + this.emit('change', this.name, 1, this) +} + +var buffer = ' ' +TrackerGroup.prototype.debug = function (depth) { + depth = depth || 0 + var indent = depth ? buffer.substr(0, depth) : '' + var output = indent + (this.name || 'top') + ': ' + this.completed() + '\n' + this.trackers.forEach(function (tracker) { + if (tracker instanceof TrackerGroup) { + output += tracker.debug(depth + 1) + } else { + output += indent + ' ' + tracker.name + ': ' + tracker.completed() + '\n' + } + }) + return output +} diff --git a/node_modules/are-we-there-yet/lib/tracker-stream.js b/node_modules/are-we-there-yet/lib/tracker-stream.js new file mode 100644 index 0000000..e1cf850 --- /dev/null +++ b/node_modules/are-we-there-yet/lib/tracker-stream.js @@ -0,0 +1,36 @@ +'use strict' +var util = require('util') +var stream = require('readable-stream') +var delegate = require('delegates') +var Tracker = require('./tracker.js') + +var TrackerStream = module.exports = function (name, size, options) { + stream.Transform.call(this, options) + this.tracker = new Tracker(name, size) + this.name = name + this.id = this.tracker.id + this.tracker.on('change', delegateChange(this)) +} +util.inherits(TrackerStream, stream.Transform) + +function delegateChange (trackerStream) { + return function (name, completion, tracker) { + trackerStream.emit('change', name, completion, trackerStream) + } +} + +TrackerStream.prototype._transform = function (data, encoding, cb) { + this.tracker.completeWork(data.length ? data.length : 1) + this.push(data) + cb() +} + +TrackerStream.prototype._flush = function (cb) { + this.tracker.finish() + cb() +} + +delegate(TrackerStream.prototype, 'tracker') + .method('completed') + .method('addWork') + .method('finish') diff --git a/node_modules/are-we-there-yet/lib/tracker.js b/node_modules/are-we-there-yet/lib/tracker.js new file mode 100644 index 0000000..a8f8b3b --- /dev/null +++ b/node_modules/are-we-there-yet/lib/tracker.js @@ -0,0 +1,32 @@ +'use strict' +var util = require('util') +var TrackerBase = require('./tracker-base.js') + +var Tracker = module.exports = function (name, todo) { + TrackerBase.call(this, name) + this.workDone = 0 + this.workTodo = todo || 0 +} +util.inherits(Tracker, TrackerBase) + +Tracker.prototype.completed = function () { + return this.workTodo === 0 ? 0 : this.workDone / this.workTodo +} + +Tracker.prototype.addWork = function (work) { + this.workTodo += work + this.emit('change', this.name, this.completed(), this) +} + +Tracker.prototype.completeWork = function (work) { + this.workDone += work + if (this.workDone > this.workTodo) { + this.workDone = this.workTodo + } + this.emit('change', this.name, this.completed(), this) +} + +Tracker.prototype.finish = function () { + this.workTodo = this.workDone = 1 + this.emit('change', this.name, 1, this) +} diff --git a/node_modules/are-we-there-yet/package.json b/node_modules/are-we-there-yet/package.json new file mode 100644 index 0000000..5714e09 --- /dev/null +++ b/node_modules/are-we-there-yet/package.json @@ -0,0 +1,53 @@ +{ + "name": "are-we-there-yet", + "version": "2.0.0", + "description": "Keep track of the overall completion of many disparate processes", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint '**/*.js'", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "snap": "tap" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/are-we-there-yet.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/are-we-there-yet/issues" + }, + "homepage": "https://github.com/npm/are-we-there-yet", + "devDependencies": { + "@npmcli/eslint-config": "^1.0.0", + "@npmcli/template-oss": "^1.0.2", + "eslint": "^7.32.0", + "eslint-plugin-node": "^11.1.0", + "tap": "^15.0.9" + }, + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "files": [ + "bin", + "lib" + ], + "engines": { + "node": ">=10" + }, + "tap": { + "branches": 68, + "statements": 92, + "functions": 86, + "lines": 92 + }, + "templateVersion": "1.0.2" +} diff --git a/node_modules/balanced-match/.github/FUNDING.yml b/node_modules/balanced-match/.github/FUNDING.yml new file mode 100644 index 0000000..cea8b16 --- /dev/null +++ b/node_modules/balanced-match/.github/FUNDING.yml @@ -0,0 +1,2 @@ +tidelift: "npm/balanced-match" +patreon: juliangruber diff --git a/node_modules/balanced-match/LICENSE.md b/node_modules/balanced-match/LICENSE.md new file mode 100644 index 0000000..2cdc8e4 --- /dev/null +++ b/node_modules/balanced-match/LICENSE.md @@ -0,0 +1,21 @@ +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/README.md b/node_modules/balanced-match/README.md new file mode 100644 index 0000000..d2a48b6 --- /dev/null +++ b/node_modules/balanced-match/README.md @@ -0,0 +1,97 @@ +# balanced-match + +Match balanced string pairs, like `{` and `}` or `` and ``. Supports regular expressions as well! + +[![build status](https://secure.travis-ci.org/juliangruber/balanced-match.svg)](http://travis-ci.org/juliangruber/balanced-match) +[![downloads](https://img.shields.io/npm/dm/balanced-match.svg)](https://www.npmjs.org/package/balanced-match) + +[![testling badge](https://ci.testling.com/juliangruber/balanced-match.png)](https://ci.testling.com/juliangruber/balanced-match) + +## Example + +Get the first matching pair of braces: + +```js +var balanced = require('balanced-match'); + +console.log(balanced('{', '}', 'pre{in{nested}}post')); +console.log(balanced('{', '}', 'pre{first}between{second}post')); +console.log(balanced(/\s+\{\s+/, /\s+\}\s+/, 'pre { in{nest} } post')); +``` + +The matches are: + +```bash +$ node example.js +{ start: 3, end: 14, pre: 'pre', body: 'in{nested}', post: 'post' } +{ start: 3, + end: 9, + pre: 'pre', + body: 'first', + post: 'between{second}post' } +{ start: 3, end: 17, pre: 'pre', body: 'in{nest}', post: 'post' } +``` + +## API + +### var m = balanced(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +object with those keys: + +* **start** the index of the first match of `a` +* **end** the index of the matching `b` +* **pre** the preamble, `a` and `b` not included +* **body** the match, `a` and `b` not included +* **post** the postscript, `a` and `b` not included + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `['{', 'a', '']` and `{a}}` will match `['', 'a', '}']`. + +### var r = balanced.range(a, b, str) + +For the first non-nested matching pair of `a` and `b` in `str`, return an +array with indexes: `[ , ]`. + +If there's no match, `undefined` will be returned. + +If the `str` contains more `a` than `b` / there are unmatched pairs, the first match that was closed will be used. For example, `{{a}` will match `[ 1, 3 ]` and `{a}}` will match `[0, 2]`. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install balanced-match +``` + +## Security contact information + +To report a security vulnerability, please use the +[Tidelift security contact](https://tidelift.com/security). +Tidelift will coordinate the fix and disclosure. + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/balanced-match/index.js b/node_modules/balanced-match/index.js new file mode 100644 index 0000000..c67a646 --- /dev/null +++ b/node_modules/balanced-match/index.js @@ -0,0 +1,62 @@ +'use strict'; +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + if(a===b) { + return [ai, bi]; + } + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); + } + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} diff --git a/node_modules/balanced-match/package.json b/node_modules/balanced-match/package.json new file mode 100644 index 0000000..ce6073e --- /dev/null +++ b/node_modules/balanced-match/package.json @@ -0,0 +1,48 @@ +{ + "name": "balanced-match", + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "version": "1.0.2", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/balanced-match.git" + }, + "homepage": "https://github.com/juliangruber/balanced-match", + "main": "index.js", + "scripts": { + "test": "tape test/test.js", + "bench": "matcha test/bench.js" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/bcrypt/.editorconfig b/node_modules/bcrypt/.editorconfig new file mode 100644 index 0000000..4e12f93 --- /dev/null +++ b/node_modules/bcrypt/.editorconfig @@ -0,0 +1,19 @@ +root = true + +[*] +indent_style = space +indent_size = 4 +end_of_line = lf +charset = utf-8 +trim_trailing_whitespace = true +insert_final_newline = true + +[{package.json,*.yml}] +indent_style = space +indent_size = 2 + +[appveyor.yml] +end_of_line = crlf + +[*.md] +trim_trailing_whitespace = false diff --git a/node_modules/bcrypt/.github/workflows/ci.yaml b/node_modules/bcrypt/.github/workflows/ci.yaml new file mode 100644 index 0000000..dc3f12f --- /dev/null +++ b/node_modules/bcrypt/.github/workflows/ci.yaml @@ -0,0 +1,59 @@ +name: ci + +on: + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + build: + strategy: + matrix: + os: [ubuntu-20.04, macos-11.0, windows-2019] + nodeVersion: [14, 16, 18, 20] + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v3 + - name: Use Node.js ${{ matrix.nodeVersion }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.nodeVersion }} + - name: Test + run: npm test + - name: Package + if: startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master') + run: npx node-pre-gyp package + - name: Upload + uses: actions/upload-artifact@v3 + if: matrix.nodeVersion == '14' && (startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master')) + with: + name: bcrypt-lib-${{ matrix.os }}-${{ matrix.nodeVersion }} + path: build/stage/**/bcrypt_lib*.tar.gz + + build-alpine: + runs-on: ubuntu-latest + strategy: + matrix: + nodeVersion: [14, 16, 18, 20] + container: + image: node:${{ matrix.nodeVersion }}-alpine + steps: + - uses: actions/checkout@v3 + - name: Install dependencies + run: | + apk add make g++ python3 + - name: Test + run: | + npm test --unsafe-perm + - name: Package + if: startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master') + run: npx node-pre-gyp package --unsafe-perm + - name: Upload + if: matrix.nodeVersion == '14' && (startsWith(github.ref, 'refs/tags/') || startsWith(github.ref, 'refs/heads/master')) + uses: actions/upload-artifact@v3 + with: + name: bcrypt-lib-alpine-${{ matrix.nodeVersion }} + path: build/stage/**/bcrypt_lib*.tar.gz diff --git a/node_modules/bcrypt/.travis.yml b/node_modules/bcrypt/.travis.yml new file mode 100644 index 0000000..ed78c27 --- /dev/null +++ b/node_modules/bcrypt/.travis.yml @@ -0,0 +1,62 @@ +language: node_js + +services: +- docker + +env: +- LINUX_CXX=g++-4.8 + +os: +- linux +- osx + +arch: +- amd64 +- arm64 + +node_js: +- '14' +- '16' +- '17' +- '18' + +addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - g++-4.8 + - bc + +before_install: +- echo Building for Node $TRAVIS_NODE_VERSION +- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then export CXX=$LINUX_CXX; $CXX --version; + fi; +- if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then c++ --version; fi; +- npm install -g npm@latest + +install: true + +script: +- npm test +- "./node_modules/.bin/node-pre-gyp configure" +- "./node_modules/.bin/node-pre-gyp build" +- "./node_modules/.bin/node-pre-gyp package" +- | + if [[ "$TRAVIS_OS_NAME" == "linux" ]] + then + docker image pull public.ecr.aws/docker/library/node:${TRAVIS_NODE_VERSION}-alpine + docker run -w /src --entrypoint /bin/sh -v`pwd`:/src "node:${TRAVIS_NODE_VERSION}-alpine" test_alpine.sh + fi + +deploy: + provider: releases + skip_cleanup: true + api_key: + secure: j4gQ+m02izaw56EOd0gEStHAjCRfSCkohDWvpABiPzh1YPM9MvfEMSIvzzjV/0oMqi3Sy7eGyFv47EgQHZvouW0I8BIUzxuTCE5wP8z2SjABXCa/rz4WTppTc9d9ABq8JSdz80JxEwjmuwnYeMwWgOd7sT/VDiMxLYaXj0JWO7w= + file_glob: true + file: build/stage/kelektiv/node.bcrypt.js/releases/download/*/* + on: + node_js: '14' + repo: kelektiv/node.bcrypt.js + tags: true diff --git a/node_modules/bcrypt/CHANGELOG.md b/node_modules/bcrypt/CHANGELOG.md new file mode 100644 index 0000000..f2fcb47 --- /dev/null +++ b/node_modules/bcrypt/CHANGELOG.md @@ -0,0 +1,178 @@ +# 5.1.0 (2022-10-06) + * Update `node-pre-gyp` to 1.0.11 + +# 5.1.0 (2022-10-06) + * Update `node-pre-gyp` to 1.0.10 + * Replace `nodeunit` with `jest` as the testing library + +# 5.0.1 (2021-02-22) + + * Update `node-pre-gyp` to 1.0.0 + +# 5.0.0 (2020-06-02) + + * Fix the bcrypt "wrap-around" bug. It affects passwords with lengths >= 255. + It is uncommon but it's a bug nevertheless. Previous attempts to fix the bug + was unsuccessful. + * Experimental support for z/OS + * Fix a bug related to NUL in password input + * Update `node-pre-gyp` to 0.15.0 + +# 4.0.1 (2020-02-27) + + * Fix compilation errors in Alpine linux + +# 4.0.0 (2020-02-17) + + * Switch to NAPI bcrypt + * Drop support for NodeJS 8 + +# 3.0.8 (2019-12-31) + + * Update `node-pre-gyp` to 0.14 + * Pre-built binaries for NodeJS 13 + +# 3.0.7 (2019-10-18) + + * Update `nan` to 2.14.0 + * Update `node-pre-gyp` to 0.13 + +# 3.0.6 (2019-04-11) + + * Update `nan` to 2.13.2 + +# 3.0.5 (2019-03-19) + + * Update `nan` to 2.13.1 + * NodeJS 12 compatibility + * Remove `node-pre-gyp` from bundled dependencies + +# 3.0.4-napi (2019-03-08) + + * Sync N-API bcrypt with NAN bcrypt + +# 3.0.4 (2019-02-07) + + * Fix GCC, NAN and V8 deprecation warnings + +# 3.0.3 (2018-12-19) + + * Update `nan` to 2.12.1 + +# 3.0.2 (2018-10-18) + + * Update `nan` to 2.11.1 + +# 3.0.1 (2018-09-20) + + * Update `nan` to 2.11.0 + +# 3.0.0 (2018-07-06) + + * Drop support for NodeJS <= 4 + +# 2.0.1 (2018-04-20) + + * Update `node-pre-gyp` to allow downloading prebuilt modules + +# 2.0.0 (2018-04-07) + + * Make `2b` the default bcrypt version + +# 1.1.0-napi (2018-01-21) + + * Initial support for [N-API](https://nodejs.org/api/n-api.html) + +# 1.0.3 (2016-08-23) + + * update to nan v2.6.2 for NodeJS 8 support + * Fix: use npm scripts instead of node-gyp directly. + +# 1.0.2 (2016-12-31) + + * Fix `compare` promise rejection with invalid arguments + +# 1.0.1 (2016-12-07) + + * Fix destructuring imports with promises + +# 1.0.0 (2016-12-04) + + * add Promise support (commit 2488473) + +# 0.8.7 (2016-06-09) + + * update nan to 2.3.5 for improved node v6 support + +# 0.8.6 (2016-04-20) + + * update nan for node v6 support + +# 0.8.5 (2015-08-12) + + * update to nan v2 (adds support for iojs 3) + +# 0.8.4 (2015-07-24) + + * fix deprecation warning for the Encode API + +# 0.8.3 (2015-05-06) + + * update nan to 1.8.4 for iojs 2.x support + +# 0.8.2 (2015-03-28) + + * always use callback for generating random bytes to avoid blocking + +# 0.8.1 (2015-01-18) + * update NaN to 1.5.0 for iojs support + +# 0.8.0 (2014-08-03) + * migrate to NAN for bindings + +# v0.5.0 + * Fix for issue around empty string params throwing Errors. + * Method deprecation. + * Upgrade from libeio/ev to libuv. (shtylman) + ** --- NOTE --- Breaks 0.4.x compatability + * EV_MULTIPLICITY compile flag. + +# v0.4.1 + * Thread safety fix around OpenSSL (GH-32). (bnoordhuis - through node) + * C++ code changes using delete and new instead of malloc and free. (shtylman) + * Compile options for speed, zoom. (shtylman) + * Move much of the type and variable checking to the JS. (shtylman) + +# v0.4.0 + * Added getRounds function that will tell you the number of rounds within a hash/salt + +# v0.3.2 + * Fix api issue with async salt gen first param + +# v0.3.1 + * Compile under node 0.5.x + +# v0.3.0 + * Internal Refactoring + * Remove pthread dependencies and locking + * Fix compiler warnings and a memory bug + +# v0.2.4 + * Use threadsafe functions instead of pthread mutexes + * salt validation to make sure the salt is of the correct size and format + +# v0.2.3 + * cygwin support + +# v0.2.2 + * Remove dependency on libbsd, use libssl instead + +# v0.2.0 + * Added async functionality + * API changes + * hashpw -> encrypt + * all old sync methods now end with _sync + * Removed libbsd(arc4random) dependency...now uses openssl which is more widely spread + +# v0.1.2 + * Security fix. Wasn't reading rounds in properly and was always only using 4 rounds diff --git a/node_modules/bcrypt/ISSUE_TEMPLATE.md b/node_modules/bcrypt/ISSUE_TEMPLATE.md new file mode 100644 index 0000000..b4baa00 --- /dev/null +++ b/node_modules/bcrypt/ISSUE_TEMPLATE.md @@ -0,0 +1,18 @@ +Thanks for reporting a new issue with the node bcrypt module! + +To help you resolve your issue faster please make sure you have done the following: + +* Searched existing issues (even closed ones) for your same problem +* Make sure you have installed the required dependencies listed on the readme +* Read your npm error log for lines telling you what failed, usually it is a problem with not having the correct dependencies installed to build the native module + +Once you have done the above and are still confident that the issue is with the module, please describe it below. Some things that really help get your issue resolved faster are: + +* What went wrong? +* What did you expect to happen? +* Which version of nodejs and OS? +* If you find a bug, please write a failing test. + +Thanks! + +P.S. If it doesn't look like you read the above then your issue will likely be closed without further explanation. Sorry, but there are just too many issues opened with no useful information or questions which have been previously addressed. diff --git a/node_modules/bcrypt/LICENSE b/node_modules/bcrypt/LICENSE new file mode 100644 index 0000000..94e2ba5 --- /dev/null +++ b/node_modules/bcrypt/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2010 Nicholas Campbell + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/bcrypt/Makefile b/node_modules/bcrypt/Makefile new file mode 100644 index 0000000..0b49b3d --- /dev/null +++ b/node_modules/bcrypt/Makefile @@ -0,0 +1,19 @@ +TESTS = test/*.js + +all: test + +build: clean compile + +compile: + npm install . + npm run install + +test: build + @./node_modules/nodeunit/bin/nodeunit \ + $(TESTS) + +clean: + rm -Rf lib/bindings/ + + +.PHONY: clean test build diff --git a/node_modules/bcrypt/README.md b/node_modules/bcrypt/README.md new file mode 100644 index 0000000..059aa3a --- /dev/null +++ b/node_modules/bcrypt/README.md @@ -0,0 +1,388 @@ +# node.bcrypt.js + +[![ci](https://github.com/kelektiv/node.bcrypt.js/actions/workflows/ci.yaml/badge.svg)](https://github.com/kelektiv/node.bcrypt.js/actions/workflows/ci.yaml) + +[![Build Status](https://ci.appveyor.com/api/projects/status/github/kelektiv/node.bcrypt.js)](https://ci.appveyor.com/project/defunctzombie/node-bcrypt-js-pgo26/branch/master) + +A library to help you hash passwords. + +You can read about [bcrypt in Wikipedia][bcryptwiki] as well as in the following article: +[How To Safely Store A Password][codahale] + +## If You Are Submitting Bugs or Issues + +Please verify that the NodeJS version you are using is a _stable_ version; Unstable versions are currently not supported and issues created while using an unstable version will be closed. + +If you are on a stable version of NodeJS, please provide a sufficient code snippet or log files for installation issues. The code snippet does not require you to include confidential information. However, it must provide enough information so the problem can be replicable, or it may be closed without an explanation. + + +## Version Compatibility + +_Please upgrade to atleast v5.0.0 to avoid security issues mentioned below._ + +| Node Version | Bcrypt Version | +| -------------- | ------------------| +| 0.4 | <= 0.4 | +| 0.6, 0.8, 0.10 | >= 0.5 | +| 0.11 | >= 0.8 | +| 4 | <= 2.1.0 | +| 8 | >= 1.0.3 < 4.0.0 | +| 10, 11 | >= 3 | +| 12 onwards | >= 3.0.6 | + +`node-gyp` only works with stable/released versions of node. Since the `bcrypt` module uses `node-gyp` to build and install, you'll need a stable version of node to use bcrypt. If you do not, you'll likely see an error that starts with: + +``` +gyp ERR! stack Error: "pre" versions of node cannot be installed, use the --nodedir flag instead +``` + +## Security Issues And Concerns + +> Per bcrypt implementation, only the first 72 bytes of a string are used. Any extra bytes are ignored when matching passwords. Note that this is not the first 72 *characters*. It is possible for a string to contain less than 72 characters, while taking up more than 72 bytes (e.g. a UTF-8 encoded string containing emojis). + +As should be the case with any security tool, anyone using this library should scrutinise it. If you find or suspect an issue with the code, please bring it to the maintainers' attention. We will spend some time ensuring that this library is as secure as possible. + +Here is a list of BCrypt-related security issues/concerns that have come up over the years. + +* An [issue with passwords][jtr] was found with a version of the Blowfish algorithm developed for John the Ripper. This is not present in the OpenBSD version and is thus not a problem for this module. HT [zooko][zooko]. +* Versions `< 5.0.0` suffer from bcrypt wrap-around bug and _will truncate passwords >= 255 characters leading to severely weakened passwords_. Please upgrade at earliest. See [this wiki page][wrap-around-bug] for more details. +* Versions `< 5.0.0` _do not handle NUL characters inside passwords properly leading to all subsequent characters being dropped and thus resulting in severely weakened passwords_. Please upgrade at earliest. See [this wiki page][improper-nuls] for more details. + +## Compatibility Note + +This library supports `$2a$` and `$2b$` prefix bcrypt hashes. `$2x$` and `$2y$` hashes are specific to bcrypt implementation developed for John the Ripper. In theory, they should be compatible with `$2b$` prefix. + +Compatibility with hashes generated by other languages is not 100% guaranteed due to difference in character encodings. However, it should not be an issue for most cases. + +### Migrating from v1.0.x + +Hashes generated in earlier version of `bcrypt` remain 100% supported in `v2.x.x` and later versions. In most cases, the migration should be a bump in the `package.json`. + +Hashes generated in `v2.x.x` using the defaults parameters will not work in earlier versions. + +## Dependencies + +* NodeJS +* `node-gyp` + * Please check the dependencies for this tool at: https://github.com/nodejs/node-gyp + * Windows users will need the options for c# and c++ installed with their visual studio instance. + * Python 2.x/3.x +* `OpenSSL` - This is only required to build the `bcrypt` project if you are using versions <= 0.7.7. Otherwise, we're using the builtin node crypto bindings for seed data (which use the same OpenSSL code paths we were, but don't have the external dependency). + +## Install via NPM + +``` +npm install bcrypt +``` +***Note:*** OS X users using Xcode 4.3.1 or above may need to run the following command in their terminal prior to installing if errors occur regarding xcodebuild: ```sudo xcode-select -switch /Applications/Xcode.app/Contents/Developer``` + +_Pre-built binaries for various NodeJS versions are made available on a best-effort basis._ + +Only the current stable and supported LTS releases are actively tested against. + +_There may be an interval between the release of the module and the availabilty of the compiled modules._ + +Currently, we have pre-built binaries that support the following platforms: + +1. Windows x32 and x64 +2. Linux x64 (GlibC and musl) +3. macOS + +If you face an error like this: + +``` +node-pre-gyp ERR! Tried to download(404): https://github.com/kelektiv/node.bcrypt.js/releases/download/v1.0.2/bcrypt_lib-v1.0.2-node-v48-linux-x64.tar.gz +``` + +make sure you have the appropriate dependencies installed and configured for your platform. You can find installation instructions for the dependencies for some common platforms [in this page][depsinstall]. + +## Usage + +### async (recommended) + +```javascript +const bcrypt = require('bcrypt'); +const saltRounds = 10; +const myPlaintextPassword = 's0/\/\P4$$w0rD'; +const someOtherPlaintextPassword = 'not_bacon'; +``` + +#### To hash a password: + +Technique 1 (generate a salt and hash on separate function calls): + +```javascript +bcrypt.genSalt(saltRounds, function(err, salt) { + bcrypt.hash(myPlaintextPassword, salt, function(err, hash) { + // Store hash in your password DB. + }); +}); +``` + +Technique 2 (auto-gen a salt and hash): + +```javascript +bcrypt.hash(myPlaintextPassword, saltRounds, function(err, hash) { + // Store hash in your password DB. +}); +``` + +Note that both techniques achieve the same end-result. + +#### To check a password: + +```javascript +// Load hash from your password DB. +bcrypt.compare(myPlaintextPassword, hash, function(err, result) { + // result == true +}); +bcrypt.compare(someOtherPlaintextPassword, hash, function(err, result) { + // result == false +}); +``` + +[A Note on Timing Attacks](#a-note-on-timing-attacks) + +### with promises + +bcrypt uses whatever `Promise` implementation is available in `global.Promise`. NodeJS >= 0.12 has a native `Promise` implementation built in. However, this should work in any Promises/A+ compliant implementation. + +Async methods that accept a callback, return a `Promise` when callback is not specified if Promise support is available. + +```javascript +bcrypt.hash(myPlaintextPassword, saltRounds).then(function(hash) { + // Store hash in your password DB. +}); +``` +```javascript +// Load hash from your password DB. +bcrypt.compare(myPlaintextPassword, hash).then(function(result) { + // result == true +}); +bcrypt.compare(someOtherPlaintextPassword, hash).then(function(result) { + // result == false +}); +``` + +This is also compatible with `async/await` + +```javascript +async function checkUser(username, password) { + //... fetch user from a db etc. + + const match = await bcrypt.compare(password, user.passwordHash); + + if(match) { + //login + } + + //... +} +``` + +### ESM import +```javascript +import bcrypt from "bcrypt"; + +// later +await bcrypt.compare(password, hash); +``` + +### sync + +```javascript +const bcrypt = require('bcrypt'); +const saltRounds = 10; +const myPlaintextPassword = 's0/\/\P4$$w0rD'; +const someOtherPlaintextPassword = 'not_bacon'; +``` + +#### To hash a password: + +Technique 1 (generate a salt and hash on separate function calls): + +```javascript +const salt = bcrypt.genSaltSync(saltRounds); +const hash = bcrypt.hashSync(myPlaintextPassword, salt); +// Store hash in your password DB. +``` + +Technique 2 (auto-gen a salt and hash): + +```javascript +const hash = bcrypt.hashSync(myPlaintextPassword, saltRounds); +// Store hash in your password DB. +``` + +As with async, both techniques achieve the same end-result. + +#### To check a password: + +```javascript +// Load hash from your password DB. +bcrypt.compareSync(myPlaintextPassword, hash); // true +bcrypt.compareSync(someOtherPlaintextPassword, hash); // false +``` + +[A Note on Timing Attacks](#a-note-on-timing-attacks) + +### Why is async mode recommended over sync mode? +We recommend using async API if you use `bcrypt` on a server. Bcrypt hashing is CPU intensive which will cause the sync APIs to block the event loop and prevent your application from servicing any inbound requests or events. The async version uses a thread pool which does not block the main event loop. + +## API + +`BCrypt.` + + * `genSaltSync(rounds, minor)` + * `rounds` - [OPTIONAL] - the cost of processing the data. (default - 10) + * `minor` - [OPTIONAL] - minor version of bcrypt to use. (default - b) + * `genSalt(rounds, minor, cb)` + * `rounds` - [OPTIONAL] - the cost of processing the data. (default - 10) + * `minor` - [OPTIONAL] - minor version of bcrypt to use. (default - b) + * `cb` - [OPTIONAL] - a callback to be fired once the salt has been generated. uses eio making it asynchronous. If `cb` is not specified, a `Promise` is returned if Promise support is available. + * `err` - First parameter to the callback detailing any errors. + * `salt` - Second parameter to the callback providing the generated salt. + * `hashSync(data, salt)` + * `data` - [REQUIRED] - the data to be encrypted. + * `salt` - [REQUIRED] - the salt to be used to hash the password. if specified as a number then a salt will be generated with the specified number of rounds and used (see example under **Usage**). + * `hash(data, salt, cb)` + * `data` - [REQUIRED] - the data to be encrypted. + * `salt` - [REQUIRED] - the salt to be used to hash the password. if specified as a number then a salt will be generated with the specified number of rounds and used (see example under **Usage**). + * `cb` - [OPTIONAL] - a callback to be fired once the data has been encrypted. uses eio making it asynchronous. If `cb` is not specified, a `Promise` is returned if Promise support is available. + * `err` - First parameter to the callback detailing any errors. + * `encrypted` - Second parameter to the callback providing the encrypted form. + * `compareSync(data, encrypted)` + * `data` - [REQUIRED] - data to compare. + * `encrypted` - [REQUIRED] - data to be compared to. + * `compare(data, encrypted, cb)` + * `data` - [REQUIRED] - data to compare. + * `encrypted` - [REQUIRED] - data to be compared to. + * `cb` - [OPTIONAL] - a callback to be fired once the data has been compared. uses eio making it asynchronous. If `cb` is not specified, a `Promise` is returned if Promise support is available. + * `err` - First parameter to the callback detailing any errors. + * `same` - Second parameter to the callback providing whether the data and encrypted forms match [true | false]. + * `getRounds(encrypted)` - return the number of rounds used to encrypt a given hash + * `encrypted` - [REQUIRED] - hash from which the number of rounds used should be extracted. + +## A Note on Rounds + +A note about the cost: when you are hashing your data, the module will go through a series of rounds to give you a secure hash. The value you submit is not just the number of rounds the module will go through to hash your data. The module will use the value you enter and go through `2^rounds` hashing iterations. + +From @garthk, on a 2GHz core you can roughly expect: + + rounds=8 : ~40 hashes/sec + rounds=9 : ~20 hashes/sec + rounds=10: ~10 hashes/sec + rounds=11: ~5 hashes/sec + rounds=12: 2-3 hashes/sec + rounds=13: ~1 sec/hash + rounds=14: ~1.5 sec/hash + rounds=15: ~3 sec/hash + rounds=25: ~1 hour/hash + rounds=31: 2-3 days/hash + + +## A Note on Timing Attacks + +Because it's come up multiple times in this project and other bcrypt projects, it needs to be said. The `bcrypt` library is not susceptible to timing attacks. From codahale/bcrypt-ruby#42: + +> One of the desired properties of a cryptographic hash function is preimage attack resistance, which means there is no shortcut for generating a message which, when hashed, produces a specific digest. + +A great thread on this, in much more detail can be found @ codahale/bcrypt-ruby#43 + +If you're unfamiliar with timing attacks and want to learn more you can find a great writeup @ [A Lesson In Timing Attacks][timingatk] + +However, timing attacks are real. And the comparison function is _not_ time safe. That means that it may exit the function early in the comparison process. Timing attacks happen because of the above. We don't need to be careful that an attacker will learn anything, and our comparison function provides a comparison of hashes. It is a utility to the overall purpose of the library. If you end up using it for something else, we cannot guarantee the security of the comparator. Keep that in mind as you use the library. + +## Hash Info + +The characters that comprise the resultant hash are `./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789$`. + +Resultant hashes will be 60 characters long and they will include the salt among other parameters, as follows: + +`$[algorithm]$[cost]$[salt][hash]` + +- 2 chars hash algorithm identifier prefix. `"$2a$" or "$2b$"` indicates BCrypt +- Cost-factor (n). Represents the exponent used to determine how many iterations 2^n +- 16-byte (128-bit) salt, base64 encoded to 22 characters +- 24-byte (192-bit) hash, base64 encoded to 31 characters + +Example: +``` +$2b$10$nOUIs5kJ7naTuTFkBy1veuK0kSxUFXfuaOKdOKf9xYT0KKIGSJwFa + | | | | + | | | hash-value = K0kSxUFXfuaOKdOKf9xYT0KKIGSJwFa + | | | + | | salt = nOUIs5kJ7naTuTFkBy1veu + | | + | cost-factor => 10 = 2^10 rounds + | + hash-algorithm identifier => 2b = BCrypt +``` + +## Testing + +If you create a pull request, tests better pass :) + +``` +npm install +npm test +``` + +## Credits + +The code for this comes from a few sources: + +* blowfish.cc - OpenBSD +* bcrypt.cc - OpenBSD +* bcrypt::gen_salt - [gen_salt inclusion to bcrypt][bcryptgs] +* bcrypt_node.cc - me + +## Contributors + +* [Antonio Salazar Cardozo][shadowfiend] - Early MacOS X support (when we used libbsd) +* [Ben Glow][pixelglow] - Fixes for thread safety with async calls +* [Van Nguyen][thegoleffect] - Found a timing attack in the comparator +* [NewITFarmer][newitfarmer] - Initial Cygwin support +* [David Trejo][dtrejo] - packaging fixes +* [Alfred Westerveld][alfredwesterveld] - packaging fixes +* [Vincent Côté-Roy][vincentr] - Testing around concurrency issues +* [Lloyd Hilaiel][lloyd] - Documentation fixes +* [Roman Shtylman][shtylman] - Code refactoring, general rot reduction, compile options, better memory management with delete and new, and an upgrade to libuv over eio/ev. +* [Vadim Graboys][vadimg] - Code changes to support 0.5.5+ +* [Ben Noordhuis][bnoordhuis] - Fixed a thread safety issue in nodejs that was perfectly mappable to this module. +* [Nate Rajlich][tootallnate] - Bindings and build process. +* [Sean McArthur][seanmonstar] - Windows Support +* [Fanie Oosthuysen][weareu] - Windows Support +* [Amitosh Swain Mahapatra][recrsn] - $2b$ hash support, ES6 Promise support +* [Nicola Del Gobbo][NickNaso] - Initial implementation with N-API + +## License +Unless stated elsewhere, file headers or otherwise, the license as stated in the LICENSE file. + +[bcryptwiki]: https://en.wikipedia.org/wiki/Bcrypt +[bcryptgs]: http://mail-index.netbsd.org/tech-crypto/2002/05/24/msg000204.html +[codahale]: http://codahale.com/how-to-safely-store-a-password/ +[gh13]: https://github.com/ncb000gt/node.bcrypt.js/issues/13 +[jtr]: http://www.openwall.com/lists/oss-security/2011/06/20/2 +[depsinstall]: https://github.com/kelektiv/node.bcrypt.js/wiki/Installation-Instructions +[timingatk]: https://codahale.com/a-lesson-in-timing-attacks/ +[wrap-around-bug]: https://github.com/kelektiv/node.bcrypt.js/wiki/Security-Issues-and-Concerns#bcrypt-wrap-around-bug-medium-severity +[improper-nuls]: https://github.com/kelektiv/node.bcrypt.js/wiki/Security-Issues-and-Concerns#improper-nul-handling-medium-severity + +[shadowfiend]:https://github.com/Shadowfiend +[thegoleffect]:https://github.com/thegoleffect +[pixelglow]:https://github.com/pixelglow +[dtrejo]:https://github.com/dtrejo +[alfredwesterveld]:https://github.com/alfredwesterveld +[newitfarmer]:https://github.com/newitfarmer +[zooko]:https://twitter.com/zooko +[vincentr]:https://twitter.com/vincentcr +[lloyd]:https://github.com/lloyd +[shtylman]:https://github.com/shtylman +[vadimg]:https://github.com/vadimg +[bnoordhuis]:https://github.com/bnoordhuis +[tootallnate]:https://github.com/tootallnate +[seanmonstar]:https://github.com/seanmonstar +[weareu]:https://github.com/weareu +[recrsn]:https://github.com/recrsn +[NickNaso]: https://github.com/NickNaso diff --git a/node_modules/bcrypt/SECURITY.md b/node_modules/bcrypt/SECURITY.md new file mode 100644 index 0000000..c132dc8 --- /dev/null +++ b/node_modules/bcrypt/SECURITY.md @@ -0,0 +1,15 @@ +# Security Policy + +As with any software, `bcrypt` is likely to have bugs. Please report any security vulnerabilities responsibly + +## Supported Versions + +| Version | Supported | +| ------- | ------------------ | +| 5.0.x | :white_check_mark: | +| < 5.0 | :x: | + +## Reporting a Vulnerability + +If you are reporting a security vulnerability, please refrain from opening a GitHub issue and instead mail it to +one of the maintainers listed in the README. diff --git a/node_modules/bcrypt/appveyor.yml b/node_modules/bcrypt/appveyor.yml new file mode 100644 index 0000000..795d3f6 --- /dev/null +++ b/node_modules/bcrypt/appveyor.yml @@ -0,0 +1,39 @@ +environment: + matrix: + - nodejs_version: "14" + platform: x64 + - nodejs_version: "14" + platform: x86 + - nodejs_version: "16" + platform: x64 + - nodejs_version: "16" + platform: x86 + - nodejs_version: "18" + platform: x64 + +install: + - where npm + - where node + - ps: Install-Product node $env:nodejs_version $env:platform + +build: off + +artifacts: + - path: 'build/stage/**/bcrypt*.tar.gz' + +test_script: + - node --version + - npm --version + - npm test + +after_test: + - .\node_modules\.bin\node-pre-gyp package + +on_success: + - ps: > + if ($env:NODE_PRE_GYP_GITHUB_TOKEN -ne $null -and $env:APPVEYOR_REPO_TAG_NAME -match '^v(0|[1-9]+)\.(0|[1-9]+)\.(0|[1-9]+)(-\w)?$') { + echo "Publishing $env:APPVEYOR_REPO_TAG_NAME" + npm install node-pre-gyp-github@1.4.3 + ./node_modules/.bin/node-pre-gyp-github publish --release + } + diff --git a/node_modules/bcrypt/bcrypt.js b/node_modules/bcrypt/bcrypt.js new file mode 100644 index 0000000..612f9dc --- /dev/null +++ b/node_modules/bcrypt/bcrypt.js @@ -0,0 +1,236 @@ +'use strict'; + +var nodePreGyp = require('@mapbox/node-pre-gyp'); +var path = require('path'); +var binding_path = nodePreGyp.find(path.resolve(path.join(__dirname, './package.json'))); +var bindings = require(binding_path); + +var crypto = require('crypto'); + +var promises = require('./promises'); + +/// generate a salt (sync) +/// @param {Number} [rounds] number of rounds (default 10) +/// @return {String} salt +module.exports.genSaltSync = function genSaltSync(rounds, minor) { + // default 10 rounds + if (!rounds) { + rounds = 10; + } else if (typeof rounds !== 'number') { + throw new Error('rounds must be a number'); + } + + if(!minor) { + minor = 'b'; + } else if(minor !== 'b' && minor !== 'a') { + throw new Error('minor must be either "a" or "b"'); + } + + return bindings.gen_salt_sync(minor, rounds, crypto.randomBytes(16)); +}; + +/// generate a salt +/// @param {Number} [rounds] number of rounds (default 10) +/// @param {Function} cb callback(err, salt) +module.exports.genSalt = function genSalt(rounds, minor, cb) { + var error; + + // if callback is first argument, then use defaults for others + if (typeof arguments[0] === 'function') { + // have to set callback first otherwise arguments are overriden + cb = arguments[0]; + rounds = 10; + minor = 'b'; + // callback is second argument + } else if (typeof arguments[1] === 'function') { + // have to set callback first otherwise arguments are overriden + cb = arguments[1]; + minor = 'b'; + } + + if (!cb) { + return promises.promise(genSalt, this, [rounds, minor]); + } + + // default 10 rounds + if (!rounds) { + rounds = 10; + } else if (typeof rounds !== 'number') { + // callback error asynchronously + error = new Error('rounds must be a number'); + return process.nextTick(function() { + cb(error); + }); + } + + if(!minor) { + minor = 'b' + } else if(minor !== 'b' && minor !== 'a') { + error = new Error('minor must be either "a" or "b"'); + return process.nextTick(function() { + cb(error); + }); + } + + crypto.randomBytes(16, function(error, randomBytes) { + if (error) { + cb(error); + return; + } + + bindings.gen_salt(minor, rounds, randomBytes, cb); + }); +}; + +/// hash data using a salt +/// @param {String|Buffer} data the data to encrypt +/// @param {String} salt the salt to use when hashing +/// @return {String} hash +module.exports.hashSync = function hashSync(data, salt) { + if (data == null || salt == null) { + throw new Error('data and salt arguments required'); + } + + if (!(typeof data === 'string' || data instanceof Buffer) || (typeof salt !== 'string' && typeof salt !== 'number')) { + throw new Error('data must be a string or Buffer and salt must either be a salt string or a number of rounds'); + } + + if (typeof salt === 'number') { + salt = module.exports.genSaltSync(salt); + } + + return bindings.encrypt_sync(data, salt); +}; + +/// hash data using a salt +/// @param {String|Buffer} data the data to encrypt +/// @param {String} salt the salt to use when hashing +/// @param {Function} cb callback(err, hash) +module.exports.hash = function hash(data, salt, cb) { + var error; + + if (typeof data === 'function') { + error = new Error('data must be a string or Buffer and salt must either be a salt string or a number of rounds'); + return process.nextTick(function() { + data(error); + }); + } + + if (typeof salt === 'function') { + error = new Error('data must be a string or Buffer and salt must either be a salt string or a number of rounds'); + return process.nextTick(function() { + salt(error); + }); + } + + // cb exists but is not a function + // return a rejecting promise + if (cb && typeof cb !== 'function') { + return promises.reject(new Error('cb must be a function or null to return a Promise')); + } + + if (!cb) { + return promises.promise(hash, this, [data, salt]); + } + + if (data == null || salt == null) { + error = new Error('data and salt arguments required'); + return process.nextTick(function() { + cb(error); + }); + } + + if (!(typeof data === 'string' || data instanceof Buffer) || (typeof salt !== 'string' && typeof salt !== 'number')) { + error = new Error('data must be a string or Buffer and salt must either be a salt string or a number of rounds'); + return process.nextTick(function() { + cb(error); + }); + } + + + if (typeof salt === 'number') { + return module.exports.genSalt(salt, function(err, salt) { + return bindings.encrypt(data, salt, cb); + }); + } + + return bindings.encrypt(data, salt, cb); +}; + +/// compare raw data to hash +/// @param {String|Buffer} data the data to hash and compare +/// @param {String} hash expected hash +/// @return {bool} true if hashed data matches hash +module.exports.compareSync = function compareSync(data, hash) { + if (data == null || hash == null) { + throw new Error('data and hash arguments required'); + } + + if (!(typeof data === 'string' || data instanceof Buffer) || typeof hash !== 'string') { + throw new Error('data must be a string or Buffer and hash must be a string'); + } + + return bindings.compare_sync(data, hash); +}; + +/// compare raw data to hash +/// @param {String|Buffer} data the data to hash and compare +/// @param {String} hash expected hash +/// @param {Function} cb callback(err, matched) - matched is true if hashed data matches hash +module.exports.compare = function compare(data, hash, cb) { + var error; + + if (typeof data === 'function') { + error = new Error('data and hash arguments required'); + return process.nextTick(function() { + data(error); + }); + } + + if (typeof hash === 'function') { + error = new Error('data and hash arguments required'); + return process.nextTick(function() { + hash(error); + }); + } + + // cb exists but is not a function + // return a rejecting promise + if (cb && typeof cb !== 'function') { + return promises.reject(new Error('cb must be a function or null to return a Promise')); + } + + if (!cb) { + return promises.promise(compare, this, [data, hash]); + } + + if (data == null || hash == null) { + error = new Error('data and hash arguments required'); + return process.nextTick(function() { + cb(error); + }); + } + + if (!(typeof data === 'string' || data instanceof Buffer) || typeof hash !== 'string') { + error = new Error('data and hash must be strings'); + return process.nextTick(function() { + cb(error); + }); + } + + return bindings.compare(data, hash, cb); +}; + +/// @param {String} hash extract rounds from this hash +/// @return {Number} the number of rounds used to encrypt a given hash +module.exports.getRounds = function getRounds(hash) { + if (hash == null) { + throw new Error('hash argument required'); + } + + if (typeof hash !== 'string') { + throw new Error('hash must be a string'); + } + + return bindings.get_rounds(hash); +}; diff --git a/node_modules/bcrypt/binding.gyp b/node_modules/bcrypt/binding.gyp new file mode 100644 index 0000000..181dca0 --- /dev/null +++ b/node_modules/bcrypt/binding.gyp @@ -0,0 +1,61 @@ +{ + "variables": { + "NODE_VERSION%":" { + const start = Date.now(); + + // genSalt + const salt = await bcrypt.genSalt(10) + console.log('salt: ' + salt); + console.log('salt cb end: ' + (Date.now() - start) + 'ms'); + + // hash + const crypted = await bcrypt.hash('test', salt) + console.log('crypted: ' + crypted); + console.log('crypted cb end: ' + (Date.now() - start) + 'ms'); + console.log('rounds used from hash:', bcrypt.getRounds(crypted)); + + // compare + const res = await bcrypt.compare('test', crypted) + console.log('compared true: ' + res); + console.log('compared true cb end: ' + (Date.now() - start) + 'ms'); + + // compare + const res = await bcrypt.compare('bacon', crypted) + console.log('compared false: ' + res); + console.log('compared false cb end: ' + (Date.now() - start) + 'ms'); + + console.log('end: ' + (Date.now() - start) + 'ms'); +})(); diff --git a/node_modules/bcrypt/examples/forever_gen_salt.js b/node_modules/bcrypt/examples/forever_gen_salt.js new file mode 100644 index 0000000..761686a --- /dev/null +++ b/node_modules/bcrypt/examples/forever_gen_salt.js @@ -0,0 +1,8 @@ +var bcrypt = require('../bcrypt'); + +(function printSalt() { + bcrypt.genSalt(10, function(err, salt) { + console.log('salt: ' + salt); + printSalt(); + }); +})() diff --git a/node_modules/bcrypt/lib/binding/napi-v3/bcrypt_lib.node b/node_modules/bcrypt/lib/binding/napi-v3/bcrypt_lib.node new file mode 100644 index 0000000000000000000000000000000000000000..ba87d282512b984efb464d9a7403b11747317040 GIT binary patch literal 190464 zcmdqK33yaRx;NfQx}kx@(F@V@>vX4CMDHE{|L=Le zJkY1kskh#$dh4xst2%Ycu2|y8aX1`K{7)twjup7_FCf1M|8ZwI9ETkD>>-Y4`n+`f zimb>>$6q$}##z3~nKP!$y!K|_q-&>7pAq$4cY|+c)pXyD(|w^)V|+Kyxc-JS^78un zZPXQ=HAnjE{}E38Z|wfmJRbLr-CgD*a{a_S1J_&s`H6X&Tt78`D%UPEPx3!APr=n+ z|5=#uAN=!U(`v120D5){^Rd~#rW|^vPPaTo$YY+l1DvPM*vhMQSq1Ur~s%gl)CJHub&h?pY51{ zGWEC?u&^)3v5gh&$aehw2tKg?3UeIgy&aDAiEKw@>e&IVC|yCcg{iS}N# zEc<|Cd`9$!>L?(0Y_*%rzT5psq4zl)6=%%6{@UoZ4#%=r2@IImaD4~YjK2Wb=nM_! zXgCfT9jN#aT%W-;<1gTFG@enZ0i}=VtK&!jH17u&n0RR@cv;Zl_?G28pfN$-+8^$W znX_h2LYB}DG!(dZ4g`0@v>5;-Eghh#!_7kb!41XJ|0n;<_*H(l8K3L-gsh*FIV0BA zP|=~L^+wQY4qM$#$`dk{wd1Bqxq}Ov92nwKXstaUn&tzb(XGiqlPhS}{=2R5q+65G6x|ykleJd5hkj*tYQNSD_;Fg3 zpKi^+MVq64oYwgM7p>tqr+CL6aQ~0fnu7mDYtk+0VZYOVoYoZnIITH5-J1Q_uZgqN zT5}-w+iy-4|2VA~l5WipnBf@aCM_%=8$u|#??ev}TfASWXp?C{GKJzlF)awEmi6|Z zEw85eJ!U-W&mV8bwigy9BZ&toyG(0m$XLGddFrdg#6yka0*;XNx@o;Md9ra_t;p}m zr6uiF+vK1zv?g#0E3i&6*hka`%u5=^mix2BIW1nSJy=GeP z7{@tQ)u0)ZzpMQ8RQViT)M>_A+*15?Gxlj#RTGI4Fs*5R)3l=gax>OiI5ABdlBkD1 zlSA>4KY!eKsLOFdp_8%vAwR2+Bt+7e7JXP|Z8fd0Bi5c$V|i;Z5%nKhYOQ~kOh(J zW&X-wqTIRUVDQqAcS1$&dQdPTm7#dV?+aTS)qp)189uP^AU^KfQ2Y=xwi>kkG|Q-) zl1wJU)>olKmGgPFDr0>9Z?*UO?#%Wc9*SQA((ct>5Zb6KL(9n%737JjWQX9y;5EUk zCSGH6g_&^u_VsK>nnxmJiU_GPX6!g}MRC~LVJ4>etD*8uYmZt-jPYsyf{3*tVzroU z;9-yia|;s*^jedp9{becsB2saz@%X|q0-`s*Ia$2t|BmY3|Ni~mw3|e-jPhM=mrz0 zFPpO*=D=;H<+^8iw!XAKG`CS(~N*X7f*9c9)YW0`9TQo9UK-gsOi z)-I!e-3q{(Xmv4e679IDuj|0?e7~8<>rbeGdoaaA;M-+c!Fs1*Cc;^@ja554?}0rR zi9hBq><_tOyf7&%RDZp{Fj(`gKY9ViHr9D@K}+HV#_6dP%Iu`j+7;4_mg zUGCCB9}23PIZl7{Sd?CDm)@rVeOVweGFu(F%i)N1A7?Cj1EwbIf>kt@H-C&y1!76h ztkZyEL{9@!^AGh8t#=Mi_vbkjHrpD7_Uv!X+?-y(AIPfu(o8r5W^CKHW~@6fi~Z@I zcEJAZh3_Wdi^QMw@1XchB({L4k;KT}k%agCkCMsBOCTJB|N2XE$b#8@h8oN6^>`L# z)6NSfgG=S%aAR4mJgoUTY1IA#T?*E8BXt*3F!mbX&u&yse@@@#`YbcqWF}cyCDK1&Zi+Xy0JtHfN*C_8Y;zjr*25kCn678R?o?Vf47h_j}pMgMB-~u_kAI zf7VyhZro7}YGK5$ot4KVG0$zv#$h? zCL;cF^>7==0!dqJChim3SQ{W$uZPH&?EiqgE_yR$eU|1I3R1~4t=H5j2pwa2OC*sf z2VH~D9*PRB&HKo|ZjZGN;;$Xr$XMQFEW69aiY6x;`>nNJ77_Sn@T@-UQJ8{u6%6*X zgNf=sxS~}L6LW^VqUgip%lv2S79RbDXr!~_IkrM7rf#A_deux^FlmBM< zrT=F6XHdR0Q5gWokd&+7rD4ninCuurYqMsS&vrugr!0<-WIQ(-%Y^ocQQ7r%&!MCx zw=gs7>z>7}Bvz&p95@spvFqzr;n$M10KJd`dOnr-I}(5+9O^Ye`z5SEqnpOeI>805r)~U$+jwmZSxGZ3<{jDnX3`XcDQuZX7r4nkv2YN?X&Mz=v!a2_t}iy_Ijn#^Rg*`%^--pgrx&k2dO24b?- z(*W^Xk7BkI?J!~sIgrcx0^Dk#} zFO-PAcTu{AV7-Is{&sE2mW>xB}9&>loO8Iv=oIM*6OPSXS8DvN|U%Pt4@%V9k*8 z996r+IbWf>!XX**M*2Ja;Qggw=b)h3Mr_d!>+f*(mxaQKO6f23#t-XnFElx$Lh0}7 zUXHF%x|8TK9~7k@)DK6mcLY~E99^^oYHqOguC_-q^=-t`rif`J6nj*(UR`kmt_xdf zCee1Ac+Hhpr>!;E^Rex2=#-9#wI-6YDO|G2h^+wy%=pnTeU5ntHHo>B)<2+USt57< zdTyoB9<0v-idoV97^X!tj13gbDodCGdCuNcYXO&NJm5L!5k!}40>+v3rggMGxEe-Y ztkvBDT04)DC-sg4Jq%`%RsxzTlJ@Tsu9ZK7_Kjzm)=&hdf^O4l%A|)i+6|lb9CX~Q z+hiK0%^8_M5*R~|5O(Gt8vz0LrmiuXo$N5J9L&IExd&P(ZPJz&H5P3uTBF>sXH)ez z_axa*=%TGR)sbv4~ZJY0Dr^Rslnh>ybAr8V%i4TOcwoDkV6ywT*=1o=+;JUx+cvBi zJ%@SZ>d$tz4GVye9jk{Gpv|E1n0yRNG{0t!&r#Lav_^W8-s4vYT)q?&N~y_Rsg|bO zgK@@SZHoS+<~EO`s*h>)NqRq~uh2FO7PN2+X$<&<+n4a(a+!ZbHeHEYe~ml=YY37m zXHjF<;XomdVA6XgT2R+mbt0YWShv%-x6!oj(q%9Kw_Xcu3tNfw$`;c)jN~pFb*(90>WkCA0zy{v z`SdS+bQ95!r9^GH=KfcjmJRX*&gf1PZhxTYQ$QD*V%NfX^!h0=525Oi~vv_=#^%9ugh#r1xf5-!M$aoe)3CTuY$)4Fi zG2?Ua0F!!7zGo&_ItMBn;-6g`_;SP=mQNxNglo|;+fUet^?5kf4bX^H6wZOu++4It zJp*b$*PrL;>K8#c7$BF2q8>s9J+Ey(wi?`sT#=2t-Jg}2k__M ziS)@H@{>|w>&n8gbrYQS52z{tcR1-?uFhsB6W*7w9HbvCs*&Nav%Na|sdV;KW>Y?3 zJlu=}l+*tQxvBvPa_ylHQ9zmL9gO7~QusJh7*h!H27wf086JA+rpM?HD#I~{<&*Rd zv!PvpE)MxCqel@O(FI^yW#=`aicT)15gmrsdvXPuUDs&yE2Mfy%0JM&o87~^KvQg^ zSq2Q?-B7GEJ9;vrcG2TwojK8?>CXQo!nsa%uVy3>h35P<+tKCS@9}y(=G0^c8o3!W zL*+|HC8*K{(OyoL_dYj~l+E+wqV7k<>+x)ae&*pbNC$cL!8HQ+EiP(?{Vb`UDL%W> zel|vzeTC0P@R|M-d02=E9BylZoKHf(4yzgBM*lHMmj4B~IS162zPob@N?DEtH zc2O{5*m^nX{V$ZVb2G-#t@LMY(Ogj!9ZZq54~Vj5Ee>0Y`1crPyo>ir#>jGHM?| z&5=YnA1*T*P?{K?mGr)XMl>F?JYQ~t~M==4-t@jHbW}BSZC%(;4W8y=9q#7n1Yyw z_7qHbzhb8mKf*=Lf!j;ocvL@ohR>$h&z9?F4SY6+&-8d8ioI}qqx8oWeGI6yPScW+ zK{*IJ5GU+`eyqwho0UgZ!@j&9(X6$N^1~Pt0zZ23P#{02-rG8X8$Ot?@ddy5oJOUFdDE67T5(%HQg>$ z&O*hel`eDx3h5SA&Eq65MytDWO=|>r2O^b1Bt8sMPXwKG6oRh&3XbtZ{%!2S!q!Gy zF!ijLDbOm^rQg73N_e~KNozj%-5kz1^hMte()sJs`E@$qE`FHKelndsnb{)$7$m1E z+QE7)*Um@ML$FPrP@gjR=ra?y$g+dav@S;C5VLK#*e0uod%)=tEg3=cub$F0Z;|5T z#UwgkY!T7~{08c9Oj;%XG9ZTGb3`O@i93Wyp6G&Cp4K3UKcP*RaW(>f(%UM}t0rKo zU3&%ENh&5>(ut;itv{SVybt+(lywVR*>$Z@65iV+`jxjB z7xgDl4K(Jsg60AB5(XTDFd1&Be&D;^;aH*v;V%CYU{Qb2Aj1U2e)jckS~zZClM{I# za{_QYHT^x*Kb4^T?B61r%pXE5J4N%@ZN(0w?q#++*5ol`gB?b#5Eg2sSVf%dhY3~! z0fen!oKWMKY+r%8>nk>R4w@grqmu4u7&a=xq%eF>H!UO6?xoF3+c(qholCoS3pBUk zL3yKpZ^IMuPcZtA@^rya3PB)H^P|FY3|j;>z;U>^t82dVvD+HTUsxq1i09rHlvyiHo0o`lmW#kBs8aD$dcaa zPm+tOrqGsgM2{m~z)0)Votz9-IjBYhJ^xPvb)7wb!RSfvRr=WxrgfJ_jutK*m*y21 z$_a1zcS}K6dC0)p}Y5jI)d=QSF?pt#$dMEOtDad94R37vzGyt>Z)9+ z&(!PxL~~=++8m?bSde!5XupU(Y}4fI-|R&bnrfs+%CZopx3@hEf^>+5@E+U=5bHKd z!9tOOw(0N${S0d}@=C;hfLc&rFoKhCeaX2J?@d*iL8wJz^-zi-p(ht@2)mE*-7M-o zp_ZhvpKZOB6GX@10&0qea0mp6_bjvJud`&A=#3PSYX-B$V}ty{1_5zJ zY~^X%g4;#ngReyjU2~Z>Z8><*t5~U<1>O3A7+kx?kv@4j3*jA2P^DRe@)*n+ONAF?T0 z8ZfP0lucG25>!M@G5T#&PQvV6ys1J}8ubJRz)E27}rc9RUf33i{C+=uH>N?D9!HN$*qiqO5*Iu~SAs zBqAjr+8Uq?3mE--3DT}2){a#^JmbWN_Liz1m8y)o4k^zbLeFW1JshFgKM+0{Cm@(GjwotdK$}W~g$( zQtlFtsF1)*5UXfH>!3_xM4cbPTH9U0gjZrTI) z5hExLK^aFPRqBDqH5XmT`nAZ%=qJ5T$nz?4QQ`hvG(mF_hIcA!)m)@mRqQuxYhZol zW+XT4 za~gr@gAR)dGkM35AbIEFqWWV9g_%J2vPLzCix&y+y7`2_{hha@naQKPI`b)==}|W# zbG>?ZJBmR1C(0_+=UA&o@w+rcg&IK!o82nat2%eO$q{s+KmQ#ojsYuW?o205*?yv*A9U7Ys#@AOEYD}^#S ztxv=KytJYLFiRxU6SS`GFcBhg}K-d{<{R;2wCq?;?LluYvlR&bJDR= zvB;x;2q*l{Iq5LF2`?RllRkb_#3L42D38j?M$SuweMrr8R2{?nD%?Ks&u(1Q%NUb1 z|14Fz{vixt%@zJxs)oaRw}p{Iy{a>(>r96_6`3jSs8GvwZn@;rHR-FfZP()goqbk1 zyKB0Xwz;E1&DH>(G{9N{$R8}a3bSsFQ8%W{!Cf4t1>ZWJZCZBRoAyyU7WR8h2bWk# z=T@s1H69MYk9H)9`d))uiEaVY6X_MURMNXe&>MBXOAo!U5HJQz>sZ)a6kQMmOLa4@5)DK{9nu0;HW5Ln zY@|vqk03Zl&pu+URt!}L~hmLZXa&U=XCkjJ(zX>c76t~(j*_xfC)(B`smeKBcrmUETS<0XB`{tO zsfR(VK{e{kE*ZA=YCW*>Y{5;bX?&NzgFt;X(4#hxJi9^zy_UxG(IY9dA1wP0;Z+C;$$qx*hK#96a8p%VU;%K)RMovK>X(p#^_a z)=OZVg+a1=>Gw$BdJKG0y{t3R%ye%Lw5)SA-P=7FToj#v;gw+x)X#w$A5%;Nc0izb zPCyoNV1-WjwbTKfGDad6%q)=1{G_);XZmzkByOk{yu24JND;`gXg@4UJuCd2^ram+ zn`WdJcty5$rWXe${yaUy8iZrn9 z8}CLNA@Ams-5^P1(`EphvkFxe+_@ztY?lHyaFDj82EJ^@-tYcCm;N-h0`XhZ>IvA_ z<31x`kM)9C@ND4KsOjJ~RD(q_>RYuy376&HN`B>oO;h(JnE3mR(rMR(C22A&X0zD^Ki?gb}>i>dg#;iwFKEqsm%4f?*B;Bw}$k!wgzL}{i5e^hUrR$$GrK!5<*?hNXn4&WkkVyeM^SGEjl)K{CTJ@U>> z+*ZkU#TMwe4K@zM*=1wdJrCegv&T8hxzyQZ1Xu9YAiM^0K6YY{Xyn@_X2NSlzsFFyWG!q$s?|v2Rm22%VtoapZ@>L4UnPb5Nza`FgS}^2*ga@3L_nRC|r+ zhWf_`0=r0H?PnJPyl2W}j<4&KaJB=h%~}^(Kd&bxQdU;P zF#nJw9&KM#I@C9i%z@o)T%l@YHs+iug`oOVW89*=>+DAM=#R0fo$q@`at^cXL6=BQ z#M+5%@Z8Y}#c?|ZX)XpScDv7M-0?VV6l*BvwLHe~LOU1*7^um{0vRsMcQp+Zti?K& zK*#3$vTw~dt>GwSdT>QaLb7Dn8K!j&?%8HA5URtXn*c=Usql7{8$%|G_H= z)Vv0%XU68WdyKl#XiQ}AWZ$jW@4$Y$e?Yg}exbYlDLZoDZl`EGwt<}LQ?0_OV^;`& zod++k0fJ&Md8{Yme#8MH?#800QHk;fus6$vBJ&zZxvcak{gCG7+s(b{JDcmK3N7p* z9}RetZ=H;CBRnQbic48u#_gB`^N8r|uA&s96pN%06!+OEPPdx?_7TRSuY{_L4bOoG zj}qv%5*fXEjNTGfpOfx9S@nP{3WiIu=uvjfTeA;7R^=->J=HT80?gW?`CbIzABv^= z22{V)Y%ApkK*#FR28S)Ot+9|rV5wneLMmZhwirF6%#Q^rufzjfr*0LYm~btFBH%h# zVq~S67~7!EI0Lm!62~G&-4d#Qj=IIPY(_at7^JM>dzs_}^iJ&nn?rD8;?fDZ?jAjA zdF(hvzIb1`aQ0wAo)?DGMTH-ZU*Zn;gh3OWDT#;t<&ew>Xd}upE5Lxp~^a8h(>~|JxquL(|X|8oEKf ztBr?z4c(w-^+12PRhj_Y7pI{Atf4>onc!|vr77q_wM;|HK&^dS8aye5^w=J-yVGDh z)T0_~NUEh@U2XTPP)*GMo2$XRDXWCTZlRYCp~*G&>)?;F4g=y zQxz7gueS8iyPpPoEQMoY3e2ZEGQd83OL{pk1y(HWda9M}65WoWYSb{7r(gyUW}iQb zw|&-^e_jg$YqMjUFzf$#effSk0u(4MQhfjQ<=?|!Vm?|6h9>Lox4s;;LH1o=zFwZo z`f}C~w3F-0Z3kIZPPlfFf4Ha*h1|1NSW69Y;PvGrE+B*Dora4l!)S=H0*3=Z1a5i~RN~&czz!gQ*0upZyY0T-lk% znt*8uZ^DQHOoFQcDtojY;j7!-Fa~G<1dKb5rjDfU$E{Bc0D6XKkT1gkP?fK1-7nCT z>t-Cww^GG5?ptB7K_6=E24etH+NTYHGqp59PNB&5mGEsiS~*gwE!H9X?RZHci~v@R zJuHt&C`q)Pgiw`T@|k~L1aqlI z6dARDk1?Q%&f$CAA2J(~{0T~PwQ&&8#O6H#GvM!(lD2{46MF!G32hJT+lV!Av)JEj zzoQY6_}@i-O25a}2-&qcpk6?yA>1D@6Qwn<0cvOsn8vUsDE4A(WZlOa;MzOa;Ct5J zjYWr{Qq>zONz8zBi_NZf$oJ{m?-N|eaJ)! z?<3b!R^a~X@wT!Fs12LQUmfV*Lxb(yU(xVPcwd=h-=En7-H?VJr=e@q%^Lb~4PB$M zdZ1^fp|=zZt{QcbhOSOQZysm2q)>%4G^d-I@R~GXXA11e6qrvPlLqThXKAptsg~CD zfbAyjWudD>_0?dHrogTtn0n=TyK0|WlL7oTd~5Xn))eqrf>ShZjcU}Hm#5k_pr^vR z3>*(?ut6ytpJI1}$b7iNAph+;`><5@24<)K$cVB(s2p;8{5%kja+_u^x2QfyqYqOp zy;rxN>cUzI95UI%z+jUtPYb!{ub8%HJ)U5j)y3#m@;nX%-HR9f;uDWTlUSgnJ$kg3 z(%3RrOc4Vm%D7me`jtRpdKHT+a)t;UnP&&z`7=`eobxiZsTjLrM^z$%r(+}7D1P@+ zw2d#^TKCr=rT*H=A>+)1zkjYw>8t#F!x!68)yaQP(I_*XQ9RcsPp$tu;u+V}>uG(1{_uFlAMs+-lFWEUw$!tCJYyUc$->rjJY)CS9GJY`=Rl92 z2L8y5XI#8lSn`7kJ$9Aq;W6oW#vvPRKDT2R$7n z@dP{0c*fbLC7~;hwY|IR=mUh)jYW^31j8DSpM{cbu?F1o4@u$@&qw?e(DRp zX(C+^vqacnM1h12Mr0Iav}7XguhQYRfEY?k?$jK*i-rTTd{kNiZNk{rOOFel8Nt}5 zs<7@7iC4MtAEsE@HZ|ZhlJaVrJKI!m+{jj7{!FE$sN`6M>ajamrsP-@!iuE&wGX9G z)r99<$nUz99XR@c9S}J}S6=s`(Don^^I2P`dojlD1uQdLMroT5W{+4qQly&o7OK@G z9O|upt}p5ZgwY?4v~0aELsX^`uRH|gm50?_UCNG@$3FNc>?FAR;NC&Dh^qcxFZfef z1L_m_)F4wmeDx*5v8Yy-tS;azhRn)9IpKRzHW20gbkk!Cbl8QX&Hb7O9&Sv-l(G6- z_(?RJpcs0F+qPKsaBD)L2rWX2@n640VS}0^3@0^t4qB6Dtn=)ePR^`pFUHa}1y*uW zsg9qLcXW&crow|~Q4-n9FY86z!G0*h!*rx#g{nP9gp?%;3++|21S*3UHbN~qzRf6*j@efK5{jSO^`(2f*TU6!WAa?ADuu%=7 z@dH_T#Ts5|b9o9$NX*Amw24x2g?;~AnG|Hi`g9EqutF8UXhVx{)nJ9{-V|7&a;3l$ zHE5YSTthxBkowUS054H*Kf~YG@hdSGpIYB8tuYfC>h1MX#jP0y&tO5#Zof$ZqcMNg zkdahldP`$;sGv~Yr*qFp<#tBWHC~Z{WV#0HodRnH*b-ID=GlKDuTxtPAgvql8Uds= z6dXpvb#4?Cqk&XcE_}30;@im(3}o0(I=)>AW@jzj@?idtAlZDD^9oYWC@ybUjRs*LB4VGp`Rk^nS zJJTT$DW`3GH5pU+z(MlMiEl>%=mih;2Smqxw!j04zQ&$54)M|7BRr)@g`POO;T_5x zsxV<-gC@o>Myk)m@)K=?e}Q@Fvf8;3V;Ixe)Q926|Eeu!>|ZRi4kv4fQeaw4F4K^s1X4fh4d5ke-qZY@jbB^7PTRZSJYDec zjDj0R*z&bdU6gM6*wdt`h3YLz%Y^r%%k29GK*KwrXJ4q!)Y#fnnK7bLd!7PF>W}gh z{$zS0oRF#aqgy+#{YE#(-~t_EYykt>c`BYvd_p3}5!dGDM~eXFF~Z$53A` zLo+jcJz97Jx8VFQV;PP>%*`M8GPZ6f8|&6o{iALZ-bb!qgt5cG_G}^vDVFL+~K^c^*#}KI4%MJWSFjXCo zFrF~>7z%V)4(tsq2W*;|=ka00=zxhQBZAZ700{kO`)o70XEpu6i=?nP#~w>@o#7vTaTKXy`-dOn<_GzQ z^AANSF#W&dAO2ys?#1PHFMf!B_&5oNdjCK64?l;tQEPodsvpTee4Hp}1LgnRKU|1y za~y806+wU7;ednthx1qcul&QG*fpJ=SyLd>icR^4oK$Kl!sQwMVV0k=$@UMQ6gil1 z4LJ!9GyKErVIsgH>hTZ9;KAw`1ut-=)}%MUzGs)e!%Lh|rL%t|FL9fm6#uT5_#n{i z?Ilhj;_vbjFaA9RGw9{x?$}%jH@2&@pA%sYFY%M(MbvLoS=tmhKVskCcaapNm)J`~ zi&1bS#vG#kuNtgSO;3Rps&}6iq$y`_#|r`(7f5Zz1OdE6Eq|Q9f5xwE#XP=u!R5MO zc}77u3uLC=k}HvovonyG8tlE1((yv|5WtqG zL)pCa`)jH9mzMqueZp&Oj_+%6jX&n%wA;UimnAR28Z7HtdvJ8nW6N}4J`$hA1 zN|Ur}xQc9gP}guiOb)EAa$rr_Nzpu0GwU6!EM6Ukd*yN3QF`h|`8|YDgpNFWa(#|Dm9gMiTIQi^^~I}o_ETLmLlbP(!2t1PUcTJJ;bTnLo8Q$SPY0@xY{=N15z!9zVmpq4T(e4PV$w zp^$POrFZuG&H;E5?BAV~5t#o6?_X*KEVF+Hj2Dhqcgwt;3txWkzFA{2USE5RJIc8B zz*T$>WNJOlxZ^B7KET`b{W^bw9F_lDk$?D2>5UEQm9;2>=;F7}@XF}obqG+1d{{%j zAmPm(Zr@)$*p};9|JBd{^Qre&2-qAA=2PdUzzWsx(_oeAISqE12CGyCz?P`G2L3L< zudOkn8Aa-Kk>gWE-YN#LxFV?bCY|X_Rl5qA+D}2|B|7tC+3t(`N0_-_a$cl5gngEA$0Z!))IL|fo&#ZD$Y}x*2C=U$hAWGih}Bb0n{v-g zZ0z6-IHdM{R)VAAYKr|v-K)64k*+v(l{2(W-xM*=3pkxP(;kO+K~8)F`D`Z&n+;fKeg4-R8qmR~{_>X;Crv$)oI@wl z9*^;%)Pw%#rQz>L!~1&RwJow}SPK5Vb5lc4bTb$=NO;|8bj`@#Yk2v-s8;RM{Foox zSFp8~>%wd7Q7*++i#H;PvgLeZG~s>i2%L&z?F_ES6F5-xTW(h4Sq9jSD&p%Izd|17 z$EG(}dD$ux+i%D!Y9TaK*t#fe&Bijr#)wtA90&A7uy&L59&V2d{9zO=L&iERspq1` zC~8c4f6-IRi2~vZb!zNtAh6oF1S`?C0h}%BzaP4D1l@=pbp13s?^iauxBBdluG&Vo z2b|s0y1r=L?tHec8yLHX){$W)M*%nlbE6Hm5Kg`7j;cg{@>mY7!h>iH5$e9{(#_ z?7c|#@Z$b!>|cNoW0dZ771x<3q%z-@Oy_g7pgm+#lvp8LBtN*Qe$b9$68GcXWrIHs5^PmrpY`;tml0r z9Do6XAG@I8oZ4^79qa_jqqr~PKRgZxH|5~u0_-b|g> z_c+jsnWy0R%u~#grdum$iTn~}afRNDUJPUH6dcvQ&5WDfU42PMm%1JKkwj^h?4^C3 zC44ubgq%)@vl(ZBAZ!Y@aX`UMgC~W4K|>wIeyLYs+RDK9^bfBoOBYS+Mg&uLMC4e= zNXd({5pqRz`=8Rk!Djpxbatb9=4bE^a9m;MXQUp~l-;?KuTlo(21dY8>l$=*s6k5^YSO=z-F^V%{~>!&!0xjCGDm$6v}5rmU)4Y;T%{9AVa%x=hMKA?yfuqSNzqe zI}br+5Qep|1JL=(Q^K+T-WHAz@9y%FPI;ulb%2RjJGyeP?F4UsgAyJdPY|DjUTsj{ z-U!LF+IcR989l;Trv0!IhY>i)S*C-?EKXvL7xB=I0LfWyhd@n|H8>GRE2`iA0W?)} z^=a3 z5W&0~nq6Hhm}A=~;0yyCnu$j92+u=#f(`aI)1<_kyfFP>e4?Ht*fQYm%Ek*T|D+uBir&|4#A8CXO{6L&ODD^HozDR* zk_duDZ>#XkbYcgZ3FdpG0;RYLAF;w9Eodskv>AaP3=OXe5FT{U#m03 z$lMW0f$2IAlCOE6k<5ti@Mp5y;5f6L-6&nZ$Djq>26Ek}$?lH6XIv-Y!sivG9 z03b2I&?40vG7Xcn!QTE_uC6#OMf$)PZUcm7j95?%2&3A1fWl-?c;ITYq}jM5AI2e@ zi{9KQ!q8;At-LHTDOgb5Xa^Der(2BcNQMTCKh$cBc9FYuJ{=+5}We14XRS^*6?4s=%? zlZYWhJK#mHZvBS9Rx}1lybFyjds`tR)CaxM5Xh(3)q^aC(=edzpe-=v(D4Wl>kw6O z%|x*7FEC$me&u6S7(e50`j7nU8^>=j9gwJH)_Wu>UOXXPOZTQL-bMY<==$NlbnS=! z30+%-uGOIH$Dr$oOuBwd4!62NSIm?7o=Xx#a3;t#`EuCQMj>rP4szM7s;=X~C#^-B z_^qWCr=l3>JZ!b|6wGg16(5M$BW+^eii*093&qOo>h%^#%Jjleib9=}SQ)q%7 zgWy%r*ZuG2Agp_YgYXf5`3L(KMlOuok!}n?u{L^#DHy=`FGlYU1q%uyv9*{=M&z-U zzAkl1TE^4}$Zdrn$JQ!X8Kbc@F4{$o_=Y~8d!=ZUfp&m`=Q_%pHN!Az@A6A=Ox5WK zHQTy_$0i&`QC+C2AD8La|7=tz57wQWnvQzCpBk@*ykWU6>uFP^Ji&257C0T!1&8WF zvVoG|R-SeZ&I|C7i6;(Yd?R+1*7~JucvvoQ5M~vWMH9?KtzXE1n*z!>QzRQZSwT@) zuH{zJ>z97^91mA+mY@r_j$45#=rF%}&(y~jn?|6C#@l=JBXClXR4Wys?gBQ5E%PxE zu_WM=uA%vg9@8gc>1Kst3J{Gn5ef|^!d-GGc`)fg^w{WM>wkhcL29W<4C>=qHxMjE zPD=9#%`gxWut&7#0n%_`D~ZdaW+mulpA+*w<}rlioIxBF``MtAh|-4gp^*B!h8k-_ z$+KKQ3G=1+i7mj`J-~Gxt=}8u2~L&mAHi8bWmZR9&_hWZvEC?JW5#(V$4E%lkv^W| zj-dpN@mUb~CopBg)+Sy94On-Qde&{LuezOiX|v@qVJsu zp`g!fRGJCP)#r~&cken&Ghw+J?q@GdYpv*z&dn%={$LV;wVQdE1pEjy9~8Q+4~)AH z?_wmcDi5uXwc>LECp~&o7F|8uz(jJrIzHRci3-6C5q@v#dw9omokxqx0K1HVkOdqAwLzp&n zx|r!4oj#lCued0ea1}D$rPHS~{jN?IFiranFn*@nblS)Cb2@!A(@*PkKGTot^Z=&s z*J%&amQLp~y+EhkOwZD3C(}Hi8g)6CzEY>Zg-g>pN~gP-9;VY@Fnyj*?_heMPH$)W zIGt8ZAEwhEGTmFJw=wnEp(sw=n&IPQSvmocEe=buhh7KVQ%EDxF>etH1NF zI{kM(|2@*F2`AT{?<<-mdB=xwe7whOYVu6}3Zeh1tQNCrji>Vvh==v-J7+);>Uj#I z2hzO@Z%?jMpDrdlM-O9qv;{80&BtIRL=w!{H_2!(6@&gn{ENSxt1z}bIB=dPc%Par z^{tS6@S*xoG5}_xAdz_lGkGvR=@l*D&*8DJ@mY}-GG94q)RC~*x?HLYf)(FUlZh0@ zWI$dgO>dr@ejh)+SJ0`=v8y<*qJGE-tgvZw1MjsW`in1|ECwzH<=fei{=m3+3toc? z1eRyTE;t?%-C@+dh^#hOf84MM>M8&sa6d}j1iw-J8r@XS;|J6Imp?=25_z+5QFjQL zs!mmVkOy!d@-Nl-CCon-`E9PhL!zS&M_^EL@j5lVFH*J1s*BV!NO7M;tk?m&3fkg1 zX=W5Jz^s0j1z@m@Z7IyS&61N)KuVJ zf-#{Ml>J+UfyH|)*Y8liu5m#g{Sd0h7{8yHY>&I{LWcSZydaeE#V+W}MpvDu`hgJY zDP*XJ;rPW)+66J|KsSDmwiay)EZ(S^klMZFs5fIDWT`wrTPvh`t4VVr+S+tweSueTJJ}DJTW>uXF zq=S%0vu%U=GxE&3-QX&Il4b(Ttcjg>qv<_!(b`(HCg~k<1Lq@j#pD{Ey9;TnGj`x=9 z9bV$DQCMOjBvi6%3?8+)PQ?>e+vYkEDUM~E>!)@~n@~43a(11>l5Nf`*4*>l${Wk2 zp?JjUOFe|$msgy6h=SKo!L&->oa{~f2>AiUpNA9_9HU)Mc%z=$K)D7p$@;Sz4kQuw) zk5kZK3L*9Z09cb8#zJoOz{3jO6wsssTAb%3==6J(0r{|n#FJ9BC9h*`e`Re&_+DWn ztE>QzCB2_A2lWl^sc$OltLUi@9}S}ovR8dCpjfgkmdu{}XEHbAMc~cOd+PSqIXuKI zk=F!vgPCsPeC`BKk22jx|7_KpTP_l=SA;)v(h9_kUF+fV_3~W4xAA95f+6@ZsZ<;t zzASOya6sIDJVJ0cd_ct~a}hp{XJMiPMEx2d`lLjvPX^=4yD61qy0>xO%6@2B+<+^lJ|PPwyd(W!NsNR~ z)kz)c{FB81^ZqrRe}m4y%}qL9#_EmIpc}LkW*KW*+rfrDaQU_XP(C;4k#~Rk;6Svb zq@^lq#%{<*u`(Pgc*3=8cqy#)QkLRkmq3B5TSOVM&567q=VZTjtD!}W| z1Bx1Vn8u&;zQv5M`iC6)R?xCEtLrPuYT})!%l1}(fpG3RLeZXvN)N;Zr4t{->aCJ+ z66D-C%yX21VR8Y)1Pqh=w0)Dp^)W^qU+-n1L)ANQ>v7XteT1i7+cV0m*O8?*;IaLX z(rj`-XjDq84|PPGC`4tnYE*f&{K{Y9;r7NS&bN$*S z!4(2gEjt#1mh9^+K@#pvwfk9xmI_!oTC3+KzH_`B zh3nQlM^R~nTIyZD$Gw_~h^2ZS8px)=0M-5OgE_ z3OuvfKT|Jb5HOMjb#GSj6UpO<_*AqIU1zJW;r$VEA3Xz9E`mqp`ZEa-YfpY|EO4C! z0PxuvxbT)&+@6QaYF8G1vxMTXhF5|VICBm^!Ur(jnpTuufzP)NcK!Bd7~9q6q`X&< zz|8zPFF9)$4lXanx5y^0;8OnN!NL5h%OV4tjpe}sW&*2zIUPvii;uZAkworV=m>wf zggy>QJIHqCF* z^j^GGJ;z}1CeWo{?VH%$wa9x0wm@-gue4!mE;x~!q84wBI&1N%9d##21Y2*x;Wmo_ zkUW-fU40eiQ|zRl5?XYY8?R$L=?`Fy6|tH|5XIVRtJn9h67}k|*1dG9d?Hk}Vrrn> zIPEF8q5}h^76CtRliqm%Hm!#h=N`TdicHuoqVaBG4w^nBrGXOLBv*iXv=E%_gklEu z5OFf$qY*SnP}j9H6Y{a#jMuRmHbfot*Cw z*i*m>E0q=y@X^polX|CqV6U`UGl-_xT#aTWCsGOxKfkt58}@+F_iw{R zy72Xxg(aGR%^CU=SJ?ZfO_T=NsYny18HFHktky`}6#vn*9Y}uV6!EUDv5I!I#oPwp zQmMcIc9R+mD~16WAiudH%Yn%n0PP)J4v;zkT2^Wq_5*QlKWrbIBM*X8>t4hlWCB~4 z=W@@%&bl|FCnwjbM+8&uuh6s#GclmvoHX2H<}{a7JLkV$Qte)_WgnLwLxfS+zB4pB zwgAT~VPsV!wy_xB${L+je`L_w8A+TocnUVQSPG$2*pItgoKZi%KifCKl zeS*mE%E9T!G;U8qQ{B3$_wLiw=Ac#n?dDH{@i7B98$6MLSbxHI_qei+X@N;N*I$5m zN6H4^!WtL}0js0I2Z$6Q^)t#?28tTBmRrAmakgJR2y5F(sj*_9-^2M-mX=KM}WL3R_u{FsqXUHV`tKu>^%-MA41~4`HOyPsB*DYNq$F1g+}a2$ssM_Aoz&!Obk{Tr{lA z8sNvs#}7wHA{qumC2@Y#fK_JV=q2HtiSCk{obx{_xyij?TNt1EqW>tW)Q8WlL?#XM zU==q9iYsJ|%nex>GaN<|PL$`Gld{7l%~4Mh(Y59Q!ICvIKgG|ihr*T&a`=CcK&eO_E%V-qCph;|E~bz?E62pgwVa3?e=D=PX~uK!CVB zpngsPC|%`XSIS9v(j3wTU%8}=k=KpigK@hJgn?7M?ks3&E@qV)5o_#hsMlM@LGO9M ztyoOQ*y)#AFt7nA&oGwh%tE7o(1-f~?wO6R2n7SUufTn|;i<0yW<09EBj~o8CHl6~ z=pSc}8DGdJsOW(+85+h{FiYGeCIuY03<9*~MyPsP(;-yb>O|#I9k6jh!1|;hGw$hD0*F8sI9SzG&cmcuS)`}NCraj-f4fVn{ z-H!%sh5ORrB^bv4ztEtjkQCXQ0Maq5bOs60LjV)&^#NZv=VEtR4o+v9R5}IuA0LP? zX1r{<&{fC+Y_Hp7+KWC{l=sl**E}*dO&@GX@?gsR*F>yiB5Z_D*7s<)+P%0uGO!V< z;M+1lZB{{Sy8NMlM}H6CW)-{L?I2!Y^@S4wXVAhfSKfyGMlDxLLJ(nLEZ`q>YK7yY z?Uwy8u!B$3G&a}-&?!!|&Ch2C#l2Y(gv9w_UKye)DO@g_ z!cLKQPfEUy;>+-xP1uEpcYP{rQyLkq2dGfU_B-a=@q_|(laz<7uo2lw8Cam$r5xxBOD}-@X3w-xMEgHJ ziF)uVe+3i;w82fj^u|%xH?FrY%62RfTZiM)MgK+n=zhFbq@J70VU%yaOsU6`KlHM> zaPXvAH*c#NgQE|fRb^Jt_fv}9Sa~4oAdIZ1s+&+;>&c0(yjOU1RmU`O$^*95Si}X6%BjQ9v9$7QS>hdEiaF)rc`Y zM1SzddMW9B509|bfZwoO`XbsFwiftcy@dyU8!Xvr#QuawrB;*mS`h0&Ih(A|X5Gbl zJjV^YIBP-3y4o3xec+3RU`FF5&*<4fYpt2H#tN--;#0vhk3j-KrtVn-P{cDhw9Zr2 z7pYvNa-*>HnoCPMX08ufYf4LAuDXygz>$jrRsK>~)~9zJgN=xP&I-mym?4%!Kg)7j zgGqRb{ZI;X&}u?s?-++}NE|=IGN!fLb9B5|ys6l>y~dxf*Z6C&ei7mM)jOjR&IU9g zWX3k)HOjV{A;maY7pK%@sl?AX3AQj7Cs!i8Iv%b`yP64K4?;Y+dbk@t307liB7O;2 zY{3Msc=qa{SRLtc>)>(*$iZR~+H;Hzz+{L*X!cPnSx@Xld=?B30jOgQ;9*EwSlo;Y zl#8Rfa}}pOJ&R*@XM3Xg^bup7z=3v6z+?#^Vz>=MO}XSltCHSNM&M3NH7E{acaFQt zh3-_jRyhI>+Mko6B~DKkyn?vVO0p#9tVrcRC^0lA>3xz|&>-; zyD)X|U6AScLP$kpcwu)9?q_>CZ{p*g^1EH9B7B!!bwA#ciR8e8Zi3W*Z%1Tc6F$Qf zZtCnEy&zJO_Y_XQFcwvysBvnRaVlQCS=dw~cfkhSE)6Hw;L?cy9r(X(GJbYcy%LUX zTcEKxqlek&N240nfdW-uAm51g8jADLvv#0!Ej<6r7?@=Y?7=?}_y=nI1A%`a@DB`b z!~Y#suNqHvvxcsR(4EP~akXd<<;u(0MgH1l$>cDrcA>+Q@cnY!?~0H98u0%@o`?S@Lz~x^ z_f6%?J)cv{OALjYu0Q;q9p}N_z*?M3T{?x+i~*Ch?PKL#>qj7VPcm9WMm$9Q575vYWY61*r&hN*lJ&*bCX04sZ*z1CCk(!NzN71ED{na<)KKK(*(J}` zplsseg{riQ4gMAKY)OSZK^^KS4F%%icj6EZDmIkRHj|ElU2LBP>k zs}=)W=J-))1rUBy|J=_0!Al!>XMA_X?er8sOBO99So@Q1yx3859309you zdSRd9C3D@*`OY|gH8I5az@F&rPWFe_skmw$V)PG;c4=hN?2c z2af5sms*!2@Y)5P8@&mBEo@xRL7;;sI=B^y{ja0=PqDF>l1lgeDY&%i*F45S^PY3i0@g-kUnY;3^e>6C_ z+8H&~pg zU`FkkD8oHpwI?$XN1^tviO{W4yslZsZy_4B{Zk-r0Xd}WB4EslUL3Z@0s!s&lFXb6 zL6%J#jv(g-bv@=pFgAaH+o<~^^9NsO)IP$5bx1fd&aLi(4}$tSv9#?74xXPMHNumw z$ydj)r-|GgY&x(uTJNF(RcD6>&nqx$?@iTWApzI{M8ig^AW`9n2gaf|s4g6L-BrRe z(HqgK==I=={_4e@LP3OxvBIn=NM;`qT-?j=#OA#GjX!Vl=QaMkjGrZH9e;HU8(f~Y(^3K5{m0)eqgD0Wx?_ZdISbbDTy3<`V9lbNg7sa%~0}=FH@P-!NvUIiNORa{xCK z%IKZKgyw*X-NT&!J_}rn5Y@q0AkMMgp9PMT*VF%JEO4PrkQ583GY`Z9&7b{vEb#u# z81uLJvxPsK`Lh8(OH@05>GNv8EzJY+`VSm??#F#II5>YE7<^Y(@9?Bb4Cx@X1{((u ziHQ!uYI@ZanCD>aOA6Fv@CDv|9Xv5Vnya$dfk}9+DQ8O%3&r&HpyRj)QVl9VkMUB3 zUF^)vVr%(vXnHdT)!?0H_cHw*4Kf!5tAZS&B*9Iv!c7PtB1HZ#yraKVm zY017FxCQOW=)m;MVtsmw^>pCugLEJ?%^m}-Z@BN4Cnoo-tW-qsRpv&aAO__9NWz5t zhA)j^I5yC(SF`a6#fYWf`9SM=20}ZbAu+fWR^B3gNgV;>zy#D5WFatP4OSyRC8;mT zEweW8HH4+eKu}@vUH)#StXJ!??E?7uQkO~j5TJ8{8@&EHjZO<3Ty>8k)`$EJED~$2 z&{7909{7R@`Rae$rx`z}Pp1BU2%nwcCh>rYQ0g)>h063J9}p9tmGV9Tcv_vQy+v?IP%0n1{iy7B~ue0RXg zGw!<7>2Q>cprk}%VF~?1oWlW3IH}cX)bS8AYgjrUWh{CM4{(ZWA-2aqillm%nkH85 zLrkQBZrDcAJQI9swIoIsvsQeKxEf0hIJ8R5K|oAbQ*|t>sLIbkKf9ksUHf;Us@15m z6Ml%QMjA+p)eI!~LZyWdoKzG$)G6o-Tu3ipvg>yu(?;!;DBU>-KYS$-q!~FRfRnuVT=xK42D0%batsJo(Z|*xHauZ5m;={xQ04(P$JE3|Du8WM z4SZCLym}@e%S*eNfKvC=vW!{^EPL6$>0SM`9EQ-+0CjmFyP)W= zs%7eSn3F~Ti>wUe*oSrM;p2q+>Jsc~qO6|Vt*zH6J4@J|PaK$e3`A;5k*0tvF0AUbJsKPOCiniKIskcUDQW3O7IScdW#7R#8m9MDNs*I<}3Wg-#&8eNqlIF6>qN> zwst)@ASKGcW`Em3%JJoo-?=3lA`EY@;XMpS9O5@LKz4}?E3=du1M?;}--K5k{fW8) zEr?ZD|I4Ub2q|XGH)Ee=cO9pO*^i?GR56lPb+szQwJjh%a|{#rE`IqH1{vao370dJ zOh!))PZ|gFV_e2URF=JDot3#{O@p|k+el#Erp0C}+W&vpd-M3Hs=M)jCXt%rk zqs?EFkV2b;&$5|ogmAIY=FNiZzON`TCi8CfkU^XC%S=BC#m~o>?NZC7R;>zSUc$4& zn3H%Ij4?kvWdLLD8)IS2@q#fODU7)^AQ%%nNu2o2;Y9=E`&I^PE;t%%;*xI*uh?(X z?!ZtE*c6*OQ09KMU84@U9w^|}qQhGk9 zvA#s^jT4;MUem4JEinNF9WwoQD+=AfYAQOD4X3(I8&0!#1ikRNQWqItPenH)Ol<9E z?r9oJiUytPdJgb1>i|!tHnO4DeB@1mRaHT?w;7XZgh9Z~!!2}{g_Vvmn9@OatWxF}DHZe{vmsKCmbE^ug8saGX$|y0%_qo#`drCQ zso7FN<9{xVTv-avn_trq({3LH?uL(c2WP105!=ls!mr7BE_`mHR={iV`OQzs?0^Hm z$6sp(6l`5DIB+wQ0S??N=#v?<#vcVB%__M4JQ?9RmekBP$LT zH%|qUn6^8`Tt(g|gZ-Z~d*%uXd>vv&p}>7S1R`wKhgJHE@;617W^5D()7#Pc5~(g` zV{cWn>!1TL;p&GG3Vy?kQWqq{Erjj-DqSwJ1w&#s`~|pDgn;HZq|NGCMZr0qmWz~< z?Ft@5>*h7Sz9iVdak)|f&&=r@3hEkfFsM2}y_^)O!}G}(lS#PV(}b)R3jiiUdNVK) zvo}h0Dcb2FXs2Jv8Km8DKAEk0qBDOb?Z`3X6Kba6RdX^6#*TVbcJ||gcJ`q~--8f( z)oZMH27{A&y7+j84#Fqt*mTg|6dly}I(2 z-kKGMwA(edbo{>nZ8LV=G0()COM8HYDp>?0r@b(JAeN;e<1ib2ErUg!$r<^8ng=dr z1*r(7zf@}_!nMu3k|Wz|{NiCgdyS`r6|i9EYJr`%Cs?p^uAJ)vJKhxN)LM`eoT%`Z zQgc%!N;$b62g)lAH`fa}0{S+4pBK2%+AtxfLxNDyNODZPnJ4(4eR)SNUj*vPK*?m; zq00H7g@{CJewkSN0LW?qkmf-EvPKI(!~`(&t5Gd1tL3ovwJS!3ANEv{k=?8t!xkLV=XHkI^S$F_Jf!81bVP@xam#SjGISY=f*~ zj+u>`Tw+2ewHo=Jh+Yc*o}29SN8C=3_1v0?oo+$DLbzp0+>POLQD zct4ef)YvCdW8%{Vr&iC&Ci8>G$Si3Aa{9xuqJRS^x}=B`lB097_So^Z@@H|E;kuID zp?8siWMeT={cu*R&lb!%nCryeS_Hy8+JmEc%$CP&>|I-j;YO1i%ux8LFqT;Z0$|?EA=ggloC3SB4rqg`!<$?L@v*ttQ&uKozhok3DSp0+a2t`v9 z_>fg#q`6u$s|kc4PEDYdcGLtqnZTxFWR|r5WCABy6F7|CID==Z8I)&x@MJZG@{D#s z<}jzVo+%YLC@KU!eRF$%5 zsi@XMMd6htT4;pcvU09y?h{$nC*qamusF}Lq%6!oSXYWI+1)HMou8+*-bIaaJZY`V zNDDuy;&DLDeBH|73eK)RF?0cxtP9=|7&jLoqf&wz;wRW@>JZf6KMRWQ!H`PDp517c zz>3GmlS-FP^myoA2NoH!QFZ&~S*jEm=BU{w)b z>a+!CnwNw81ETw@N1mWVD(?!qwZ~Se2sTg>!v88^W-4F}!4xfacmo2(c3R*Ks&0oI zxaezMgm+C40?7)bqbL#1WGWy)@5mov+EBd9pM{hRTB; z3H#LSStrm_y}%tRxA?f=IB&HB=vfBzXnM;s1wFwL-s<7Oyo0$et?d9yhrR)LIZ2jv z348lh9`vRwkrqw578@VkF>}zNP@Osyss|qm!teZAid&Let?fRmR+rZL2YN?qp(YMo z01ffQ4s#)b2Eaoh3P?&57D$RP>v;tF3M3_JU9-w7 z@hER9!{iB$<_$SlF#=t2IY4RYrYmDORJ9YERV2M0RO6mf48;+fd)(fLU$35A5cHuP zFTi;L*+v<$d9bq>1T)8!YJhQ+Exv=lMdnI`M~SQmBB6{;Yxr9Q%sML2PWm`d{g!k8 zt?Di7UCgRxaN}3n9qE`I`K+DqzsfdDB+6GlYKMK5>HXAoPtS@k>En76D|iY~Erp+8Ul9jJ z5lH$1J+ex5(z9056{8*~)y-L>WSjO%VQ{pC{}M}av~#J_>nI(b>n4lUZw@}G8HZ01 zIBqxJg{=@&F2hIlgb2r7Vl-wO&O~7#I*r#$N{lPAjS

4OeU z6o)_lG<1A-&PuXa$(r6a?%c3UR^+m5m}Nck9T#gDoic3R`@cmN@C=`fj{@Z}NDd9t zv<^>+r`_wU&|8uM1ud7;X- zP&_X2p-OO%2c(grDnn+;>TZ9Z4? zxSAdy+P&j#@)h%UI7G|d-{(OE(Mv-yZ;54y?5RZ-_?I5|mtk}aB^t!2Ob_42Msp2F z5fwZ^g*=^Sag==?%=-=o=Jyf0s)B^(f%TG1U8OW|+duQko=(0eoi&;yw=4_qYKW0c zB^pl3B&t7t>0*I43LXL$1sn=k6mTeDQNW>qMFEEb76lv%Sk&hQoe6=Ah;$27RQh`_ z*d?K0Nin>h&eQBMAKML0V~&phFX~aMkN>6~W6N0BW82L084fOgMmMcw-xV;eqjAAVRGcTPd`Di_UM^WIMu6-#!5!?r}7!Ng2t4 znHXIqtOE6l+kZ*(Q#UD-U*_NTsx$U+3%tw5^_&#GP^@K~Y#kI%IVg@}NFG|ikbtR^ zLdNBM+9aCVu_abkddzX4pq*9nTa+y2FjIgCW%4g;{>W^CO0ryNS5OU^3LjkI|$yJwwyDuH6) zIGWYN6cT&9xK#2R?Gm2cB)bq;11bTYh$LG^kktzuetSL%1r95Ly?y3ca>?Cc^a{|G zw^%~)aet$oe&e&Cu_pfg99Te&m(v5k^9!&OEGGB4=cgLdCU-EcaXb;)i(A0o9{mj$LV-2y3U3%;>0iLlNO6JCH7^TPW;$&)F#R=e$V~D#%8}%o5lT8*)sX9ZR)LfGjK2>|c$AK^$C5RD~ zE%6`c#79)rht4l?Ils~?-;8|Dftg!#LIjD3i$|{NBY%A!RGqbY<>ts2#^tVtGbVizw>z2fYg@_ULP5#>f_Ok6+`kpiU_2}E=&^gHzo}d!75iac zGM|~7INE?Ws5SLalK`^bBl{J)Os*vBYS~alq4?xlR)p_Wfo`GrC~q*a+g0n_2qg8@ zoDjaS=GvU_L{7Y1IrRLV=7r?}UI#@?O%Cn4<9*8p`<-ijdH!=s{LVM+Gz+@qkf<3M zK{+*%2mK^e`9(yD_62!9bLX=8>t5~+B8t(;JN6dNHTzj9mQAU>u+7`JIQN7| z36vn339zwPL*Hmxhj1^pdR8(ratAyTqq1;1uHp!8soM<2uaaR`_5_nZX6k4MdpKQu z`cGwM-KqOTjd`5)v4@X-UUI{Mb6KwoOXCf1cvz*qLlzeJRzJx~@bMtYe0TJ~>5;Q| z!=tj{z|Y@eag{b4c=~-F%{{T?(0pl_hqS$Jtd5v#Jaa(pK*q%(_Lb z13}c^tmx?SCasgQm!=K0B0i5ujodad!lMCNCe>Ji1&Jx zFm1XxhZORXg*x5k*p~}3!g+IfcXPCD^kweEa6ky1@mm96H*4YLMI4dcB5P5jSEj^; zs<<{+@BE53_#uHqm;vpoS#z?**06cs@tM(zGTKp0snJtnaT(qnBO#Mw2 zOChRQDbFfR@IjG~Y@jr5sE}~BnByeFP;!n^1wk!_79sZMl&x2}A3)_e3W+_lmIu-g zIN(f0JrJqlFjt#?Mp&I@J_4yL!^KMZM2>c-nVKRM^B@ttShV@nB!Fnp?n3f&#E z5}reK!}ZD!pqtdt6J#nryI6Xxo~x~r#?=T()oUT^{d+9iuI#%w2m$(y14v5#>KA=! zWw*`JVdYbCCvNy1=6IN^f*PQBp_Y9 zn-c6bI2+;u9bNTEUply?(P~+AtxHcH!tKE1RV8FW*rFXj{a=e?D)39OrI>wd?ws)M@y)NW zZuGXqK{_WZ;X3y1RK|GaxLilG3Q#G$WH;+sd2{(ERlDqaRVNqti_9$K1RPpG|{}= zu6ZOizq|FEX%{|m54vddMR0}&%1-6mADfm*Zo&0f?lsaU>uySY*|iQ_pN7F)F}S#o zn!l?mOj7mL;)@s0#8J<8n{R=4h13EJ-$>QFuAcVoF~8*6yQ2IFn#EmGH||qPC_+p# z8q4bmm4{HghO({8p0hPVftFSH1)UUXn~1{zNK3Jgn=Vcue3Ci`FkoypCzAlCnX!?= zB(V_wIMpaFTRPd&#s9Fo$r2vGEkX58a6e0?g9|=(b&&lPPF}qD5qztTks{;nCHT#Q z&}(HjkEdhjY@c2>=)Jv4tq(gI(jYeQM)UNGGNY*mQo)VI%w2r)ihF?q$V)H5;oS$r zea9{MP?Xu<8Qpo(XZjz6)0k)3`>}ZcyvTAEB2zClkxqDl>p8JS_G(bN_@WrqM9cWW zC5yYOq-@Xj$aYK6#G-qsv$#hR^4d+z?V{p-TRY=ECyTN!r25~#wroWJ%s|{C!vKMY z9*a%YU(K4e=CKtYq>HCAl4uPIC$UI4bUXMH_5hs1=MdDZNAKu$gh$#r#ekycnhPVp z`>%(^+TviDg(wIT!db#2*^1a%E{#6g`m<$9715aHpl&m@R~mCfkg-&2>I(TC>t+pJB?7;oDM{c8OCatf$B3J0fb2Iep$TLz|! z*^GZ9TXw$%Ba}_CSmC2mVyy5n%X!>#p1{fAYt~gmxK9wsG2L1Hk$>#1<%@hTIoNt*-n)x?1J?Bdh#6I`*o0%rd3~RT(?R9j@dH-GAW8__q z<5|VNmy3qb%|rPMCxbu!t?vAw5deB+fD34EFnV(y3GyOWng_OVd-fIqBNhofF_V0^ zd5eIED#1C__{~S8=UuL#6O_s*yi9$nflzcThCr~d*{HuUAgL5Np1nyXdvI!y)f8#^ zP^&lZiyVs>ITZBmC*ViYiZZi~TG<)4Oym|m2+Us{nDzuakFOB<}#%=-glD=^6@{%Q`x zx}%Ykd}ebq`CGbZ?RlYc(oqlCW&jh!{wwl|Pz3bI;(V+o4>E z`d@lnIn7is<*5Nti%_V)U=fI%Go?<=!(b`buBj!_JXW`6 zo<@xpc(FHgf7@)Wwb_cr&`s;!d&hzdKN8Zz9QU_zLh%Xegs2I6-q#w{R-p;9a&+T3 zfktBuJ=^@^fz2PLnxB_OY7!aNmvrN*BHg&0CJs{k5xEw4r666{7O-ii3pOm}5>_F9 z6wL@Va!JQ+zegs%c9@<$1R1g*SS01!qp6B6@Qv zJ*n-|EfGGb)d5I!O49*}er4Df>HgXh^-75-S_J-XzCkVZcNRH}K&ci8Apbc~-v4mi zfU)4D*sCT|&EMtBALe*Gm3Q#wR}q`2j{R7TZI5&1jNF54TFAhFKBwSY5Io=Y2?ZoP zv^#p)h$4VeYPd0VM8eGFH8gj3bk2ycETr)$NV$D>EzZ87F z(8kxiRPd*JkMdH%&s88NntEmgpFMb;5YbL0U zJ+V?sOR^XKogH-Rc{!1OY7XBHk#56xs01MS176|FH2Yg(+Ikjm*jf$8t{)FfM~0hy ze~~3@^wNHF?-*Kb7*=1hDgHJf#T%t6g%oP884$iw`JOR&z_SXwp`blAl+ z`Tlc?u0mGGf$pxK(+~)*Me6DRy}Dr*ZAv${u4KX9t!!9^t1KNQTA`8zJser0$dy`4)DE-PC_?ILjhg3XFfaud z6#i%q5mB@FN+>A~i#l((M}ko_waaNShI@773`NE=yEr>Jnu>o>M+1y4W`#_&vE7P1 zUSx{Y9sDrV(A@YZb)fdwClA%Ihc1>8^djLNzJPw$xC6 zQC0Z#Xy5qoxYTi$rG`-wbUZvC|G=WT9!{R%Qg#d5#1gBUC3}UFy68$v2vp2-H~g~- z!lmCBZt1-TK%yFJA&eR$xqOs1?a&%8;>(xb5T7cl&rKgOB7WoBE@Wb&U*<$kF|JX} z*6GmMU9mHnEUw+tR&QcL%X>05% zmsfjmZS2e>bKvm3#+LIC_C;CVZYfZryJ^JrrlX^F!z#qdAmf{2vZQyOr-9n4>+ zCq69I_kRj06SV?oFIgfx>}ClvGCg*tY8}i5m-$k$q8j;Q3tZ@Qm$J~C*2gZ-liJf# z|HV>2rrVZy@C1yO;GC`gO3^BihY~$+FAhpC^tkI4X-Tc|eyDQ0(8}WlKQ(fE6J7a{ z;lA{GJ-tEaena{@ld0o*pT$?%j3`fVPQnP?>+($uJ>;r$`qIs;I<0Xllfg(=e-x0& z_|(3DF^fdnD+zxLOo`a=Nw0CxaBC>#Z)LB)@}gGN^iqX616_&WO>Qzr z9b&`tQy9ZMJr(q{EjU3}M~kc&IK3h@t34yAs1vIjZ3`zr$B`jaM-{WFJ(E~2h<{>d zX)u2yXa0g=LFTKnCFE=+{es;fIHedV(Hh@kOhY_&ZM+BUyDfyZw?&lW#{`68Vn)}v zGV9T2xXR_7W|x#6a&}COyQ*0^G%vBYR62kG4Y3;Hfz&>AD@(zbZjJeW;_`cT%!eRt z6=;pG7?qRa0faJ!K~GCvt>1W)7EBF+HcJaaNWw;2T?6(5^_}BOx5!|lb#<@L;4Q)H zTns?S$r764!n#bUAMXF#^fVBk0VWGK)0f^1>#3BrB76XbYr8M{F>e48rNt_N@*ylr z$yEE|-^?*Kc#f+Zk?F6INGD>q-s|!VoBw(2IAHLw^oEJIKIDoFlCQ`&+teh&gHVm}cBBHs7 zH!qj^S$<*jp9plYE@t~MNeSHU8Z&7Ow#fX5mZdb6-aOC4tSuFoD3lhpRgAV;O5rZI zBYs68rX3XJ*cWrb?nWxaoIsfnRD6~}N9^ z#3f|6GyVu&Z8r0!H9bW*#bBFIo_KCcolbqXA2+}h4TOooO8JkbvJ(`XtGOxN+JU&dK4+m3iV%b<=0r_$og$Ue|#_l^LVE<^DH#b{?`{b#rC!6pZ*pd+W3% z4Kx5DXuszJpZ4%NzjI%}*$wx+ZsGTR@kL0qUX}K9bm=~f5mh$F^)y>~u-^zZtw=dJ zn^0LAWK55*pwO+pvNw#^x9#HL8vT{uH zruYhn&$$Kpr^shgQjb3tSspMr5ncfgb+k1>*l{pcj^r4|B-`7~|3B`nti|Nn`~^KZ zPiyou0AII8%8CEiH8M;>2d)wAmDbFxGs=~{o<5)Uvvt1c#x!#+6Rq7VOfBMGJ&*C0 zY4BBkuE*oDB8|T4t%*l2g?8gOc0)OT^p`R_}~n3|QWC@+r9(1A6|RGF;*Jj#7ggcMYQz z_Ns_A$po8|Xb?n)aa5uw_T*|WHm)iq6OW=5p}T9Rl}AgQh@*|T$9(x)(%=UrFR1Op z>kI|fxEkm27Isod0h|q%uuh=q?=+MU7I?_OjO6+$#(iq(2rE~6W&^ZwXrohNR-j-8 z0q2&0XH(?uyt97? zGNW|}pBr49)d_j-M2(N7Y1Q@M5p@}jDun7d3t6nO`_#8xtlZfA z#yb!U{s&}kl2xEQQzq95g``w5Cw|XHt!Xcz{ebmr*RC|dy!wXzwfo38)^5*Z*|qC2 zvTIkaEdYq`@_cD!sN8SC1JF}1_6a#+K}D4aRNPhXY_4qA8pWt3woi@5dsec=1&sWZ z5Vc=_hDYt!n0U}w|G(R>vI9=HcYqf4P{zO81J3jcm23f#Z@abJ4>PMXeD z%7S%OMh5_Gk~H!GV9hE4b)HV6MZ1Jo9*99%#OUQM();QK*5P@+UVz=#fx2&$D^Pa< zb^S-6u5;F5pl-%MP}h_ET$ZJPItNQ$ZP5LM3;=bZ_=+Ro93fdC_j=^L!>FS6_7#F(@?vRnWgx(f=h z&8B_f>jnU}|Fvy0p{EG+s4LDw4hju8_X}>3(l}fNhT#Q|1lh2lS1UuIU{>_z<#^st z#sr*s-hwN@SU{Li_{hINnc8{_$gr4atmua@(||B>env}f^UO$BS z5fDbDO?JuXem#U)cQgnCD&n*tOa`u4P)6X5vqNGBLKs!oVGw58VG!mcw!VTes;+;5 zFcw5n5QftGA&e^aa0uhIAPiDcK$zNkbqmS>m=u8l0|WDdxkdiX(ovx0%WL|Vj(03e z=MhMlHZsx$UqxmUFp1ij<5bjn0y~GZ)tAwO*~PBuX5p3rLd7<@QykQ+X~p zmp9}r#U_4WDMLRqw#e0#UNpRc@l4;na0s;Dup^S|+=8sTT*Cz}9uNr`ZA!21?8A7i z$h|f0lP~S|H*VerG#INFxd8rg~*7&#eWuD?n7Y zD@nf*jF0pc8m<^icFTjATv5{q=PCtFq2HL-p2o0{+lUL1M`VS+Jjm(LuttbL8Di0O zM^Q(2H^Nx5`KKAVGG)uK8x{s6J0%T#m2eY^&4~}E)0s!A?u@eTT2s6=B;pyKL-Vnl z3f=K7^QNoVmm3kk0$qJ-G+UemYemYkz&!8}`Q^5ddR4vGWOKT*;$}*2*!;s;oFVPd z+AESP@CwS^DfJHt#w<~}R>)y4lPl$t% z{gg){O?er}@27MnSHao6&6(s_Q)Av@s(})v46{kZAOrT?b zd-Q7kTqq4iT2jO(?tsb&{qa^d>1^4ZNLqU^Tb{EZ*pQGRMaXnJQkq{Sf4xev{r{qOGLX_cIQny{)EuP4CBm5giA)FYoTfRY zdijg2`%_Y(U7wPs1`CF+dFzpO6olu)90@@RBK%VGWEAD{#>N(tR7mCajRA zi_iF$j85S{tiS{28xP2_>3&nbqvgF=IZ7?D8uDWr!V;ti)P7l2$~0QWo{kN8uOBQ<~@|y-7sgwYn;7IeGHMjm=l{j6aiOQWci~9`rsH}K)N=EfZo)U zruB=?^b{_=HpfwSiauvO$opc|K+9&BiOsMA*s-n_?FD~(O|;9KI>tQovrKl=d$+2I zEv|Ug7V4R?sY#nqz%KmFCF+IFe~=56>{3Gh+X~rcg;Wr-+zLrpA?P3#-fM+CV};ZZ zve*iF)C%zuGT#a@tPq`$Tda`Vt&m#?Szv{H#|oK8$gNh$3@c<2A>Xz_rm7G_tx-?m z3@h+VD{!eSTu-5H1)g99$|+5t#|m^wpc-k5|30^W9EOmp>0&hu@BdT{1HzJpcUU2t ztPmve3V&pUthPdC67r-K@{|=an~+DXkcTA1npakg zl%H5InjA`wQeclrQP>9tg2Bo>@hcoMEYf$3H%;XKAj4c%gBN@(9gnyX!Hr)w&%94X z7#*bKl(@_KssdESXB;wxzJ{EJmrwJv<;t&%|3k|^P5e8=A2$T^q43F80YA6C&aw-a zueVIFK}jim626YLzLxVPRk`HTFrMJkTxu2nl=v50{%6HM&+-fGG;dOVB^NCs8H~gY zt=<^}K=f>ME(G3RKU}2l9+X1^XYBWf3s8)EaAyEhlaTz#y}wzzkX~=@zpC0R~G&V=zLT?~*u(2_9z; z`dF-iZjm4^<7)gQ5j-Mjit7Nz$=gJn5b_I%-=pGrgh zAYVqSAX=kBM!a6yB)rqj>Eb)DXHt5J#3)-cS{Wq40Gi(2Fi|RtxXsUhBJyS>>dp9o z!?r>om`!b|UcK2Iw-2<&V5|+s+F-2XH)l=V^xL65LN0B!P7C6f_L|T91;Ar#(uA+m z<4-vd?3Pz)(KK8HetLzx5o6hsTi(G)M6bZ^Yy65~_1`J%g>)D@zwMGdhhyl_b(a)i z7w>GPUCjf2^FKp^M@Mj>gR&wkc|@CYr;?7?7!G2aqC%pSN$jnpWNj|B_fZD%(&Nvm z2C*3h_kCRiUy;*4?K+tTD)970K9=F{fb4f9^3-3HGAyCqHmNc`WhsjiwaUB40o6e(PnSV{e8*F>6BO{(82s0CBm;;rq97( zzx#5EK~6Nf%eF9C00-LD%&8%k_)fc4WXx2$Q@$?U5S32gn37w!`mF$xJ_mBvqWPZD zd#>b!`X0*-b&_|lpk){(=ZW*g$DB};nyIs=$KP9nyonxN+*^?jUu3qQA*XRpW+|Vor&RK_~xfcRgv{bYg2h0^C?G+%RjB#ep z)?)KjE3Nq!u6@Q{P7luU6qZX5PUnne)Jo_<#xAFvu_M;YsP*_e=mCJl-%*hoW!Bk6 z8QG%Dj)k(8X@@#-q0IBof?%+TrvWN}Sf8Iy*}^7nEH5Gilv5d>N($;SQt(0We^nw9 zCJys1N=!Zt(j&q^duky!@6@9QbG3UOBuqynLYGpJYw#-f`SKd&zFb~~8-hDuA#VJ~ z>00fXzL4{PsE&ooO9$}ZMJ54@U2B8;0dX_Y-Q|gxC5T}2M#xtSBA3M{oEbDw=;{l4 z_AfkL8{%Ehyq**ZCTNEM}Rm_#o&cWqQFowGbDKw}TML%Y&64 z45&|OhZYN^nhbDY9U=Z>eke zfF7BL6^25bYS==nuz>aRR0f^zqXkk|)?W!% zLLio^ zF#hmKA!8#pA^u%^bq_j-5ZnVK?oM7Tv*!7tu3~wx@?+ie@q#f&w8xsbt5rSW9jS37 z3wl2Km*!Z>s_bNlk?B}Vs@%x><@{BPXK`i7^HE(f*RD+<3x8CLhWn&iPYi?>eGKYQ z$VerfxCGP{19n+oF>8Hgl2V8kty4+AN^XaWCC{hGrqX$MN^LUkOKQ_0*&>^e1*$fi zZrrCfw$KK;pyNXukezR7d$d}28w-vqfysdEzwHnRg$8i800I7N4UaG0ZU%al8ojUa*uEDY*BcNPHMI| zxv6W!ai+d0jw^MUIPR2RoI$C{;tWomCr)1KEOCaUD#RI@8Y@nIYNR*?sbj?{OcjVT zEaetQ!=R;al?KG=e;iqBtOgU;Uhb%cR5K_Lzh+-*1QcweMJWdcE75p`)B7_X>P}Q5 z!IjXKY$3mK-@T@4`|IhQlzbpZiqAEy)H z;qsCjliz1k%XoG8YpzBB3Xv!FbboY+XzgWK#e8^m#{ZXR@{rpcE(6%=PsYKl06Qyy{hQWL}(k{U10(9|j7 zXE-@N7lDhlr3iut%ZzR95{3`gJ z#qT_Rlll4iJ;v`({9fj_k>9)g_VC-suaDo*$8#LV@jIE{nfxx`cQLUy-}5vZnHFa48T!-3fr#x!5KUm(j}-2x1VSug+d$C#xGSucPYRaN8F;b%ESF zyTOjldum8oxl@nJr3SIsliLQX+wF0PotT}Fj3GA4y)%2kZDR{^hvDt14S+2_VuBS` zYn3?z3z#F{cR8&THtugpBsX)fT-U`@!y(%y9-iIgO-18okr1n(8yqq>K_@hyZ=q5# zub?%e?CV%9ZTC0r4S&-J%#*;=3G{Sp_h0t)CBrSoz7Nv>0muw~*2OH!6?48?lgS7t zMNUsl9PrF;+{f6pXNUsrvi>|97M}a>2m)7}FGHpX0t^`pR{^V8qQIkf%b0CHY|J1E zuukWH%{U;{s9GzRNAoWUrAc+RHCzx8#zGHgkQo5@2CNEB@iPdkkcd+AqL;0XMNWlw z)K44O4%tYq?;oTpWh}A-k>b&UY_cx%Ig+K`!JvPSv=Q<`HqLE6NSxq_wRu8KczdU! zCa@)0@dhhCesfXA3IWuF0cSAi!aQ`MVT~w*DyYdJ8)0hSr?kfMW*J`sX3{Dz=qEud z3Gyo)^cvA_v)%S&=|kG^%K+gbS*6g87p>vv6hGcbUD+p)#=${EV8G9Ra?)EGuGGT6LY{KL76buAcgp04UBp#2v7uA)H5gyyB|)m(od zl#0g~AhsGPVxd8mf|?6wC|w+>`lmAW)by$mTh9BHkbSf`a zDmvHUdgS29D2!HoqI8Cw`kXvg2QWyU&EUR%%SBKeAaTS;(5}oyz07n2#ehZW;^mhz zb~{-5qtnHsf?z<~&A;A(o?wajI*vsLKr|8+&yPQ5p{bM69_3BkL-QD;8t5d^gF>Ev zB%395E2M*&p5o*bNC(rHLz?p5g_h1PSh{-BqrfH|v_m=oin4BDj8Ef-G_*+lplLc& z0U4#GDvMh;zv^T5k2TN9R%9;x4kQjmSE%6%A$myzt)ju%FE6Kw#m< za4$VoP8EhTa6GS&hhl*jLv!|VqJ)ee+#QPj;2GvskJWxCX0D~NvA~ls7%scIFD+Wf zvD!~U@kwwO8e2N{2b!r2vy-F$z#_!0&)7I?fm?W?(y2MW0}o13oV zYGBmzK%uX|9}E1Nl^|S>ywJp(U(}+%f^5-W-3P|o%Z4olJ&)ZP^}P&Kg;X(m<5l3_ z4ZUKg3;6*8h4qVYb7QMCEZ(y za?1*O-ojL6?9SIv`ejZ6ZAKeXzAm1%OE>$CE~vMPJBX?>yH z$4*`5KXBpFz+fK63FOu9JT!GFF%|=qVh5*!SO6P@O@I7PjWBZ(n@s@eeVZdv&8 zfTlVZfa4kuMA6NvecYV=SGhMUd_bN#RcHo-y+lSMK0^b8fXuTgUA*XGfni$Xmy~3@ zRXO8D=aqRFCiPE@EYa@ygh0*<@6d>MWg-5#a`^NY755@GjX@Pv&{=jt|3kVcrbo1U zp2L?$OMa@$xDu5r8G^?MVlqDP8-ZUZzZB9!)7HD7lJ#U(5KC;zGS_)imNL#*)2F|4 z+H_CL;tz7JbfxCVN~~tRF*zRwc?OcA*r)}~nYE5V)b?erbC&vtlbJ`O&k$LIjzNu_zkUT(O?eTJH9{sK??qBf4HH}8hfwpH3 z?i(??Kj2I$5&iy^(?&qGyyZ+1gXS3SL1i`sYAp8>6Pm-Jh=Ji2pOLiSW8ohsV!#OC z9dw$&DBW-+E(45*jBrPVfR4ejOL!(=ZQX6cMDYpecb3n5hS78N(3Do7B5%xa#nv9!dm4y|Kg<5uNd;=*t%*zFhdACVzA zG=(7mr*6}Y`&6T47>;|VPd%3--2+J|4I1CC0!dI70FFKeH;vDXLGH=_+Za4m^Ph}? z7QIJSPG%g=)?S24vo3coB1w^==p}i%tE}aT;gHr`xco{KfI+#?9uyGwo4}IQpkO#x zjf#i{$f!7ZNmz_krHgN61#q4pIW$XUXu5ny8Ja_Hf8Eeza69BJGPWL9&Z9x5ANs$w zMs~5bY>N17V$>SJ_EAd!Aq;hf7A}QmE^DOL*CzPj<+i_mM@7@`A1BL%Ci;VI%)&+u@l^TJmAzVcoXA9=Hz@xF2y8d-_=y=4uz3?eI$ zRrq~iPrrFWa05<%5WGbtq1f!wn4agu=#9ejNcT>_=#sZn<{XHo`y!)!>wGmizUa3m zIPi%yi0=$xT_s=Rk7yO#i|d&@UVCgkFy7~K&oj*5So z(dDxSM?EZaOEt{a-q0L`fIyM6e zd65UE^!SrCrSb+9TBMjt&NTzW0lZ|7-PvV!zQ>ti%%BYS4=Grhc|715)VE_w&0@1g zi|6s_t+}Hj0%dojIq{p>^cRv|$6AX%aV0PZ3on^W6Sc-|bcDz6poPrnzSIaME7HZc zT_kH1L2#mwhtnDx#MAe>w;roNOFWctMZEk%8r4y2z`*%ji;gDDh`hmj;e)jo+j+aJ z+w3a>Z+Wq{doC)2=K-){kvG(aJW;_b^8=zS$VL2d>2*YN=&@^SJzr|k%~Y6rE?s={ zWcEU!Bl!y7LJpAIt}r5a7|mzAVeb7rokr!)VJyl^K28#Qc&>mxDqUh&k~MdJ!+2eU z`p$#d6CRD3CatN_E{#$sObR+1v9Z)&rH1~niH8XL<>6s&1h!IfHB4X>P}Tw*7-P9# z$_rXFG#1#NE-nRA<<>DIUHrRAs`1HHrm$D^9UKL};q;1hKg5?_xOuy6+K z?#S^+?3CEl3CUl71*EAZ^EPucIs}71a&jgH8Og>zGKwc(6 zHXb8GI#I@DE^{$p!UD$vnXAl|Kt_hCSdQ+A&PXBdgo#sK1FWf9m04w3ARYTOHL5mjDYGWZZw$8-bFmdQ#9T5=T5VfrwaR1bD{WHlRq`6hDw4{-yTuAq zSt6qt>z&HOGd{8V#))!#Cbau*J{JH%Hzc1smhP-#O8D1(5y5qpay|bxSRL<*2sK0g z)eA10r*v5JQWxX8|M6?~!6YhMRZTo=Wy0z0E7@z9QR|X-FnFV=tYtth%Hyp|lvpTb zru#{9%f*p@++}CZ)$Ya~X>{^40t2+BByei3REolyf7bXraJ9xJD^JH~+MdtmYFB?WS37I1cJEm#bVsq&w@;fj?yTQP{u^xyR|_x+>`l@%WnfexC0ekjLB%Hil14y z(TXQbrD>PAw^i6oN&h##H&V{{saCqeL%&kxeN6w7R9@ubjl<#>U?Y%dL<@h$4Y5-0 zA?F%?Z}Owo+Zb||(#%$v;RE>LWGL=cMb2_NtZF-GBF(sg>WjvGsltlJkD$4PchjU# zDR4dhPWw3O>KOR|1&2yIzXrng9&wiVk1To&ChqQQ-0FMxHKXs6??W@!@MCCZ zwhm~ijOsHIotjnrq_~}RTPLhd?kFBA9|tO0)^;8of2Q={Ad6A-@YMQ5_5fJEx!1r&4Ohi zj_!b7oH~KKp;vuzn5OdQqST0XgkSTcpP2BicFZR0OAb~!dBdkpc9hb*6DB*nIEAEL zCAIU+R%#gPjHw~3966Mdi$NAKb)HD<%^l~U+_JaxIrkG~Z3GCiU6kK}Pbw&D^)>9Q zCzFIYF$|#ocI2a#HxBvsbiCj}2mkUDiLT@FTj-?fmK?9#^=K$^Yh6XyHpulwyYh?; z{`}_X&RqGHh`hv3Cn4f>`8v(~_UNu0=laAhmp?z@&xgR!oaib+Ufvoz;d%R92NwvjM?C{E z%(sexa-d$aN+!iTn!?P6Xp8d$9!JsFfZflT~ZaKHBSx+z>3rzk}Q@YH@=P*`$>#`LQqn|`3U zKF^mw!AOMS#jRKtKo|nkr@YLJ{2Y{QtnTcp)dHx5Z1wSagsue+`i-v$>BR_0?TJ01 zvQKM~6)w=*nuUsLmH=TvMDZ~ag1--wS&-M?wgdWEqR$_-V5H-?)e|dd%<=s`g=Uu3#p*z6O6;xYE!qQ zPO_RuEzOhawWb&en30_qCAtO~o%tQy-Gh~Dg5acYhdi6>MoIO))8Cov!@!RALaVrg ze6eGaPT7?M^mzO{q~sXxsGB4iAcK_FQ!sA1Yz`rdU_-{?nci43f20}|U+lR3?_w}W zF+?|;6NHhGuXxuR%(M7VQ~;r3ti%2z$dGj(=T!yEzD|~Qp(_qPJUrxYkY`#SIn!PO za@p2n)4KJsx|zA~o3YyLb>G&@=GY#a#~7RPMvz7fLpIqj?9R z2+OV&yjamq^7Pm|OY@|^?r~C~-0)SM%>!Bj;iu55Tj`uY@vt;MCYu-kQ98TP=r;ep zS^3P_+&jQ|d57?O+-RDbZ+DYC#r>SemAPFFHRpVbgwZ)jqtnE9htfpYr&-D2sHmCP zd{SVnZE)--5`L+-M6vt zfK7006kSE}{|egcZ|bc1${xF0`}vQSo=)-#);RNx$Q=rAa!nG$x&XeRwLA-gdo1>+ za^t?&c#Fy(5Aqb%YkuvL^?b!IE#*|xF4<6-h~%Rp80PU?WLMX;ZZh0AREq7;>ShRF zyc^JhweECt{iHlcWS_v$xi%BA!aPaMeRzy?xJF4lShv~*RyZ^hfEf2q(FS8|(c7`^ zvF12g5Zlcf9F}5F>qX(qSYEA2q3p_$LytRqv>k2?XNI4{uRXfnRh|dOU-P#cozV@s zu#sZOS%fk~1;Gr?$>aB$#bJvESHbyyI*5 z`xs}Q(|mv?ia`Z{c9Iyr}-K&Cb7@gYcy~mcVvy;6hXDC*t!Y3bzth7b6h7EF_rw04- z-%91?zvXM#CFim)y4M>X?Q7WEd^c!KfiHT{8##tTOHu`7a5$WxU90z=5I|;Te}^9& zs_bw6{8sGEXpK(@P-iH27J+MkP=NO2zs1ogO>Q@G)>otvCx8r2kfI&N!J+MnGb- zQ^F9L76% z?<-%5ceP+ZF&)Tay?y{;$IJbHxNh=i=62qU>oD(Qr^((dHRt|D5sU^iV5bsDDSo9} zFiqKnvteIGwM67r_CsaH{Px(U0L%*@PhG7{oO&A%ATgt@-ZP|b zsQC*ObtpB-+tZfATPXw&ttF> z!W$IEr)tPn)sK-K0RwNlc`MuBsJ&50PP&YN*B%$pcU=+6ye2|FLX?dx1ju@$cix>F z8Eyn#i+1fMKJrYo-5CqK1{3r~iz7ur6~c=`zb9}TXMzm=G%3E+T;!%RFa9c>7Qsg( z%|HyO6^gA@r?Cb1`rC{d-;v~&(YFfffZ$?&0g)j!3QoeU03AU;bk!5{Pkd%tT>FWj zB-O9$TksT`td|=eG;LBTr>j__2b|MZ$T&td0BHaUh*>Jx30T zU&UOAClxtLBtuy{W8^~aqW}f;Gpg5XImkHgD%HWw2rN}68<)Y6i)+q}(V$=~Kx2FX+7!R)*M3I`&`#B);lls-3p){mfl zCrfH(2B6NMW;c--H)LiOnL?!2D%NQgmgLVZ9D-_}9BHE-nV#tzvobxRfUa!S+j2dH z%TKiUepkd@BG{9FEg~oE5Hm%+yihvK#>H5Q4oiOLOiDYd{LakK94sXw#M|2!DOoqA z2CE;JP8g6q8K(@yd1m`P?i0qq@47l@z+fpZqzT~BA|~4}&(mVDx+ciNoh|d{^)m^J zWc5~wPnnWmq|SJm9YtlHXJRNIojOD6X0=H5)$64JlD~r6jdVhu+ZK@X;uH&`6>_2A zrmP(>(+rFOwF1bnrpxKALaJQVJ5txOYi}Hx<4{u-P3J_0cpF!?Q)r~nb0AXMQ(uoi zCw#p5F(%a&NJ7@o+{uT|P4!iZdsF;^fz@+hv1fmuF8;Vo2Aa695H_=Lu0tmr4d+_t z@2tUyVOSF>yp85LSSJwVPAq8(&`}^L19mn=gC$A>n!fw;Vu(=2e53ePEb)!7_z+(D z@@vE_xxUGSohbuQ<&RHw;sPWJZ+lXqK{D2*tLc_EUHnG5Yz6lZ$R$Ia8`}jeVe2I?@WVHVZXM zfG<)%y>#(rwnDUbP~=8pKh2lAt@0&4{UF_V6F#x3O(f}NS}S{+4CZX@nZ~;+^WI0!e zZ=MBq2GJd=(UIX)_a`-m@SaO`7|-@41>nX5d7oBqeXY>pXpViu1-GjxHZ9L`3Vc1y zMPh0^Q1sgU#7A{k4netg&;wRbEnyg@_JhG_?8_T;-TeNo5IM2DPx> zQ*gbYcic1*5w;dZ{4jqOsFvi(?Fn z>C;lWjD&CHZt@tuQX#Ch=f-?0Gtyv^j)gHG`-~!E%4|E?6jhGA^AVdi&HN`T3vd{( zEi$&i0nbSne-q?GiZ^_DPswO$tE&f#UxpF?5b?|S;vXvhT+5#?exA0F@B$+z+T{#a z$$2e_-w;_-IXYAM<_z^Ar!G0T=4I}AlfPu- zwnukgWY&sLK$yCgsWSlbB{ zp2p?30;Z&8$wvuL@YRe;6l%X#ckKS`cW?g{eU!UEBb(G=vUwdZQN8G}ARy?TJPMc@ zFT5BhT|A8$PMz@Ca>CD*a6s(7Ze4qB^r?JBl7?GcXKTzo2G`RXLFBx+1h4gIz#ul9 zdA3zFPei!)a&%L4N*rk`GT5J-#CW&4-ylf-Wd>KBulBIIe@*0T5|2@&aGkW_yOc8S zm&oxIzX&tklF*k|20ln}vKLnp1IZ>`eDY{KuyOYgc%}r#+&4+d$wq39N2b z{W>2nC7~rdRBCg3m%*Grr`b9(X)^%cZW~XPOOuR)zLizt0o5pFLej-wS|y_wuAi{^ zv>t4F`orH;xO2L=gE-+5Ot0RG-9&8CgdoYsgK4Ps8_U|92^h!lTro&4vg++ZB;5Zg z#+-}<#EoIt1I}~Ql1PXgNDcp@9FQuQaXn6!76ncsH1(3u9QWlI-bQ~eA1CBw4ZVw~ zpD((yk>rI7^nkIv8y{yM_8Y?!A+&f$1mk*1KzniKASw+Q)6XdoON%Fo;IOA-{!rr^ zdCnH`o#aD*Gn%Lfcs9=8Wg!%R5~-OTSJE0g-td*joMb~CFSS$fC3O~oFyJ|mJ0X0RX@DQw8(EbIW zCpAiKfC&;V8-Uw{td+tt0wQ~&?vo{Dq{t{dNm2;PX%^5U^L(pJv~S$qC92F%agb-^ zk^m95&HX-~^3U3#vaXoA3{Rxr(uNX+LSkx{ZhUH3YkXN-c_5fbI|D`< znX&_%xEDpcbC9+PkC-Cg@xtX5tU=Z+;y0+h*D17@;NwW=++WRSewHv}pt?NE{K zpFBhP$bv~DkC8?ytUp~mEYm4;A|Ig>6K@z1K4anyqhLSIxnxqqq?%~~R`f`twrXOi zCJzAWSzJ>U8G4y9>nl3EBl10jk{B;4nyxBRyM&(YIX=^~TYoXIXP3Fs#sAGL+54%# zXG8jXc6YXC%PGa&jpP)TMa*Dq6r^PLnm=3RohU0t^+a7|rEjt8&2;f3N;h9(87kuv zmX1yhIy&B6M9=F)%n03VJA}6)_}?BGuf4JXC2oldIyW?J4QYY( zLE0hdhGr5I{dNa9$u=Xge|uRX-Cou~w*K_6ihbIv4o^!h><(W&Z>j$ijb|={TI54o ztMsFN+68kQ-nzojob|QNc1o7d@R}eh-l<(jtS&ek?ZirUx|4-|B6T-0n>AGIN=Iht>;X~X%zUi*RV~*V4>2Vhvs~A#t zGCjVmltq}F;J1LTcLvy1sXT`mr~=?I1%=U@tGET%dd(tuow7yLg{(h+h2$^4*2*vB zH`u9G85u$^M^YwRroPWnihMd;hU)_N!HUwJ^NjioOjr_ zo}Bj(@$qGMqxSQQA(SHxLzes)Q#q-1$^A87ikY*8vs#=8C58gdCgH)>KR=9 z4q9>H^F)gXltD)t0BAa&f?7@u8NS|NWjmVAz0ljnEObLJO${N5hCWMb^$VJ4j=f7hfj5Nxmu}@nt_3|6lND z8??GgA#1cYKik?&BmV0;GFc5`3g#tmx2sm|$(E3vTYX>%=S*rC|O_(kI zvj&!=ire(B<*^^^{x}8}ruy^o1O5Hkg+H5nwM47Y_xvV12=<_^XQ>bD&(mbID(7#u z1Qt_vJiRiW^UQjwO>Hd3aJh8ghgSX5#Q$wo3^6tGHQg@s@ck*v;o8vA7FVVLZOQmx0ls=RTq;V{+Ur3 z5L{}$Nyu`m|NRkG;`9ELNo+MWhsPM9;8Xw%N|F9N>(|_=(cw2{+(I{In+w4>r6jv% z=^WZ94K<+7mY%w_=t_ZVy7W9&JB^=i*0;&Tsf7g@ZXV<~qVseR8}nv~ip`YbbzY#V z$+3vOifXMJO;0c+isBnG;NUy zO3Vqw1Odf?iiR46ghAU5R74k*xFVOS&_$)C?gvhgr%6e&sC2A+2vJ);PUa(0Zsw}a zQc6tcyplp^=9wRfAXIE-g}LSe_+V{;Qu>M3FCcbJg&MT%(G;siWt<4(K^Y5kC5Ga& zi%CtjYPwNdV|KoT2KSw%j>rYN@u(!W5y#5t9kh3xF|~BOG&b6syRby$kIbr9`^0)c z{$C*IZZ!W4BXqerT^eS@o5?N>HvQ_G#U0Ztj?y?ahfr%v{f07&m+S-!a**An z<_%g}g zoyDbM#vUG{7F>>z7ppC=Zf-ar%MRW>mQJnZaahACW@e_CcW)S2fs!(`%1;~?L6%!G zS)M#BW|g{ut0w*Au!#CQ=V32&mGH34Ffso!ezH9Q23B<^p>{T~qfI|2U#7-$4=Y#^ zaIIXU4v)wqj}_rOEMnleP5e`)BAE){BN?i$Po8{OFijJ{O!&Q!-UqfT$AX*Tf`%IC zrOjc~kp^h~9C17xmhmuW|)L?cPM2?j(E6hVv( zVaQ=52q-ET@iC!dRsER z`PHt})z#JAwR?t%paGz`8uW#@DCFBM@Gg3`jf3 z{Nw}D#tU`*Td^_Y100uJ^1dRP&IEaC{@{@qMF_{8PW=Aww zJMf|o>C-#dS9IWv4!jAN+aJ3-*mHro_8jV9&+ot|I;1b?z=a*S2pC_~;Bd8ry|@G4 z?!YC$T>m`kU@z@pf6>8S-oakc!Cu*czjk0rhy1l2xS<0#cVMcOj7P5hvK?5l1FHga z{l)LV-8!V#>cBc3SOCn8f6ET+0L+a~7hrC@_yVJ^!(fBBgB^WS0Xx52_PF26N!KgcfxMvgc(qyh_obAi#L4s0j}HUO4%$j_(9{3bmg z0CWA{9~fV=<6zk#y#p}Uz93+3{t*Ln{htclg%n3+0CU@u56o?U39vq-ZvgHOEZ~v$ zb9NVC6R;-%bLDRW7J$77xG!)euo1A93@LvAunRDLG!}&Q#!XX2gn?F0w zm)RZC7XWkRmjZL+O9IUG55E&>A9oV(SboR(t>gUE@%ZjIA9S3bI*xx!s9%$8F6HMJ z;2$Rn;<^uUV2~&<5~*`k|BXi%D=&m5s&3Xe~GOq$UHC<~-`{Gy_$;iwFhoT8DMA~iCmsCh_dpjQfd8K6ul z=#K%&Zh~HB=w*Rk`shWYOv6zD2I%h;^qPTQ!db!b{sEC9LzEW>;jDlTjC<6O62>Qs zMKQr#GD?UqVIu``@#s1>P!N*f9~;Elgt*h9L)y~K!uuQb3W`s_I|wX7vb#ukp$r{U z2!lky{t1!sg20$)E~_9gGA1q|mTjvLj|KnOf1e-9Z=XFZp7c3xLRa?A`U5G zOjLB30FR-#Fgz-RZJk7pl=eFnx38mp>hwQS2&4R?{X;}S|EW_uru)ZE#r3uI>%a6| zTkrpoj_dyp{oFn^N*|OQ?H`2>jj-sj_%Jkc%ofE8f@5L@+_>QE{_&!uu=sYvzkMoH zj&$(+m(@wr{kIy2xBFlE@jsOR$MGH?8Y@CKIBkbL#m#4ezI_F#_%;rV@$*mg4})n` z8b90~&thoqoQUQX!3a@YU~HJUZF-;tPI%gIj`WXvt+;erqJ;j@G119UF$r-3wEskLVsuF3 zj6yh%WrLQ3`cq;m49(b4R2+IwM1Myk4WweISW1Nc3Pv#j=&xAT9!U*Eapc8rKJ}mS z$5F|s#6T((}d2JNe!mBvl!K(T#^=FV~>iAbHvLJkewBX9~CPS zwdc{Icpv}B1QLf0kj|@pJiaf&@nfXJ+&LBvcyz#fq25L7AtRG;*0w+{{O~(_OU1r* zqA<%-GKgel9tNv=v3%wIGh@jhMSv#*DtP`ww1;7zpK~ffDA=u95P_Db;W3aNUvUh0BRTF9CXq(Mt!tV$ln?H5I*ZThl-nAjRQe2)%HAbR4lqbQW+9 zQrxcH=!NUqhh8|{Y4pPFx{F@e{uI5ieKP7md=H?7ItBL+7aWwy{TrtvfH`#m=F|Hz8q>I)hQ8V{NTnhKf$nhVN< z4lV#L0xbqD0WAeB2dxB^fKqB?d-$LNP-{?6P$6g%XewwrXa?ve&}`6L(0tGW&?3-c z&=Sy6&`MB>5A6fx$`=4zf;xbDf`)=7fo6d220a8?09pdt0LoV<^?D^lQ|`#?GZ9@U zO+sq}6GIcIQL$l^mw!BkY(f6X=<(keqIj2}ba6NmrHv+kksXEd#j*y3C_kGITTnR( zu|Tfq_!}h(pxpgqDMxWE(qzgN)$f`RNjWBjP+lT2<$)F~#>LE{Mu-C0f>9oHZ=Q_v z#j<~(Sd>376e%hr49|b43=vL@%0aC~@d+Ubad5$o%8VC91&Hu`hyyA=1{12IeKf95 z%8u?7<%wQ6xVu{f1z9AsqNx3k(1DS5X+$`2K5!1sl$+Zp(|e8WRfok5h<7CZ(~wU94dpPAm!of=e9Uh45|;8 zkFVOflsJ~FuPu($i&M76;dHL3eBAC)sN?{o?uc<8I-)ZSR?N7CuCSe0@xE|G8iLdd z<-_M94|J7-w`&~ghgnERpm@AzN%;wEskj_cPb|uf>ml2YqM=@V|3Ct%Ka8yl$B_Ej z-OnZiF|#`ZRQH#r@!qy#056+Ly3^YWui^FZ*lxij}KYXRKMfE_3~cjhi-a*}83e){g9)ox67L*}E@y|AB*t z4j;+OKYHx=iIb;J7o0hJuJHVYi$#|%U%7hi`i;RY1y;q zFJ8WSUH<0nyNdT8K30DE{N?Mn?>{6})it$s^*F9?wEm`eGM!}QWAOwemhJ{B&Mn%VnXU4|GC(KGrN}fG++VmNI!hdg{lQMVS{Qv3g|DUe^f4To1 zot#IE95vcyjO$o8_i-Mc<0p7cL|>8UJ9!G}|9@})-=hB=dXVclPf#JK7&IK6PtDL- z65mhaFq>V8b4KT55+cxf*^C;?-e)Uy}l2bF+w>ywH)#4Z301%>9Jt^2RnFY#3kKHJBmtE?!b+$touhWVfJ@BDO-9d=B5^>*geej>k!)4$r+-P)F*gikkqW_#f8P$yQ2cvdz zCA9V0f67l*v|~{#xV*UaT*H4U%uw6o(RFPc>c?1A zH)&B2+M2|FXwyHJhw}!aGH_4Eqbo!+R2uFVavd9tEO?C_k8ymr@#mgu+g3cxL{t(= z*Kxi7&lU!ua`1?aLHjHKy(j%cJGmkI-zl&C7UMSK-u&|@Xxk@vq|HJp@KN2~AAnoP zt&wtdWBup;!m(m>yvLwC*t1}UdJgx|Kg|MUBy;%;2i?Cvl2VtvT0=K!yfvGLg5EEZX1&6tWj_6N_WimR5>*`cBI@v$SRO9p(%m4#oq@NscK^_ybN`O`$Lk1P`tx?b zPS{3OM7LhK6RuhqJV@8ZypznxDf$f$X8O2ZzVZyM2l7{&=PEiMJCLHSx60w}Uo|Rs zuP!oF`I2%aQ~&Uot|+&I_RD7zcig_{Kl#PMgVS!Pj*zPh>0Wq$Lf+{n0sMy_P-*$lj|xYbHyGp(POlud*a@97-ZmZSk^PPPvhhJna1btOby;(ZnyLK++U(Y zUau0j?iA#EHD0`aP(O35`#V`G&Fo53ZAsq=o@aXF?YJnlg&LRH|SWe zrJi&9w=-pLU&pG6+{PLu zb}z4wy1d4D{Jop&4ZRP(ePZ|_ciY~h=z$5eF|H+FYbELZ=3k3a_KlIf=zL|mS%ZtB z`gfhl*}se=wRf03<~_4#J}F!}B5W=%x{<%#M>{!T_iBoIbL`CUg!BdLx-)koY)>qE zwKPj{)JXSG)xFPK)~xIQTH|Y|rQwJZo4-4zow{;M#xVtPs&t3nMwtpG#1=@aP8^bC}xk2`{_Hc#M__QJ9a-k?pDdnO&S$XGi)6^-ZBRp zV-npBKIAFmQN|zWeNgFvuj<~v4g7tUF4PIXRJ$}`7Wz+E_eN(sX2~huq~*_MR~Fy< z=w0;d&9M=@ANlhH{p0jw@252uR2-4hRB#Beo9!`u_uavcZ#|--)by$c`@HwjxTL@F z=@+9#Up72Uv>Vgsh?(waf%V}H_wWAt>-n8m#n_ zNA#p40bdU+>-*zn)6P>BKi>4HJ|8?dHT{GYZCY?~&8tH_6@zLYjSK%hv`e^8NVhZ3 z9d6kTNDAFLD5C6IUR_~WkOA7Bidw(aro8*!-*N@6TN@^AJgD4ygg0lescYE!i+zlB zuHHEC;Bo1!8^i9adOus2vHixD+`q@_Yt52$R!YzP8PC_XZB}=Ae|S~kWd6s$H&%u{ zHK1cFPAoZ)F}1JF;B5&Cv&XLaG}#7x1O<9t3SR*(9J68{%fcE zmtOUyH7C8!`^)crUE=avA9qciVA09mcxh}&@q>`^hTJ0|b~g_n_bHB4*}i=}b;)I_{a+VcYloFhaIbtm^O~Mcqf**P z*YMHo(7-!!K@_9=5l(a=#u|5)$=UK6mT*hU!jNcfOgBZQGCk zbbs995ogP~-s_~bRJ_&p#e~WU$KH=?>Y-q<*Zsnqrw5fiz6Gq9^YzSKJ%`{&)I!ai z4+-yXR~uU_O2f^s=#d!IRQGVE{jBUy7Tx9syVVXhuPUfn)mcp=EYmG6SIt1^puroi zoIdlmp4lF&A2*-C*Rzbf*D5n(YVPgblenwjh20CL?!0Xg%O5&_#P$mX8kcj& z4RX7AcyLzX+15j|2I;w(54xj4pIFl>9#%H(+~SShoLaPRk7{^n{o%n(^Xzva}D14CMdJr17b`Q}Ujb|(7ghiEJF9u&QiE3u2~`kXP}vFq*jWBzYn1P&;a zHOSohSnR(+|J~&?2`b(6Ond7J!!-(a+C5knH@8ao`suCfCa=Tft(F$_|0cgK_)GMd zo-xl?u1i=DKV#UI=xYN{XAHaMWK_H=`AGj)UGPo{a=f%9{qbf~=Q8b&sXNW>>m%%w z$1HH(|1sJ1LrdQ+;wR-l?%k4mx_jmMNGj;v@fYLvL@VazWQ|-MUp#SD^Q7zXw;x?8 zaEcBrn{<2X$)TTGH6s1TU)6qocKylip_}n*3aGXHyW>O{HPOdUEz0V+wvNPUtiN7AG$P!zI!`wv6#`_{xHg{ zQdMhikC@D`8E;Hi1-aPdtXD5r8{#{>$BE2(7sK+ISC95)>l2r|(v-&| z5r=YM4T4l9a5rEHF!vm912Fc1vtbzNPy7r34vN4%2~vDu?m25MVD33)0WkKx<6s0# zuD{W9<-mQ2nX(4vo?~|a7J%IaSP$3}m>g~>(vY;bAJ~1t-XB;9tPdOtj4wQJ5Cfy3 z$%Z6g&hecJJOJ$Jz$U;Mz^1^PfX#rjfh~Y@frkO-1LJcR4h6v01gRom8{lGK^3)wg zl>jqfF9mi0E(dl5t^{@lmH^{p42K3_d@QoTiuCVjKt&`bf?XB361X#P18^5$MQgG@)PS{s`M^fN>cHH( zXjfovT~q_u8q#+Ib^+D|_66<^915%joCMqxI2~9UcoT3h;9OuG-~!;@z{S9QfJ=dO zfh&Opzzx89z=}4cfBFJz0rvwo0`3oN4Xh9B0&D>63v37+3TzCV1UvvZ9oPhT6R;_8 zF0dJJ5wHbt3Gg7`a^NAr65yf0lr7obVZeM~D_{YzHLxYH4X^{SEwCrB9k39X1{MP| zz^T9vz!|`fz}djg!1=%lYo7I(}5=eZvvhIoC_=hE&z@IE(VSSE(J~lt_0@61xN$1EHLNq zPykjOPWnR$SPNJg*a%n^*czA*>;kL->6Ic~k2&@e(2DSrE2aW*V1k8gA zv|M0W-~wO;;9_7U;8I{^;7VXs;09oAU`3j2za6j^Fb^*9jDTf(}C@Pvw?YV!I%%M1Y88H3|s=N3S17X4J-k+16E{Uf5Qc(7O)bq5wJ3_ zHLy0Y3$Puq5SS+*7YJftCE!$GW#9}hKX5jezdy;J&*cX$;_?HRaQT7Dx%@^Xzl6&V zj250#@x6^QFdvv_O418}m4Gd|^uP{WdP|btlS>aQ`B-GST=>QC$KiK z5STp0MIpaV2%8Z5CUM~W!Y=S+Q6Tan;eh?8M9GyV_NL%~pF<)_(qTU-4#CLNfmbMjN0qG;4yg1|y#32F7jfeWhP)-QcgS}@sV9y^85wIVk zr2RqcQ4k*j+lTu(0;R`(L);VC$47b!mx8wk`zF2E=g=|Z@nJ?eQQZ9x9w%nhNa=HD zczl>q&J=e)g2#y&db*Fh|G|DY%p-AOC=f5)Xz6nXc)XaQr~bJ6DLig4Paw(J@neRp zFx(L*AiE2C@{I&Mp0M3Z%Gj$p20iyk0v=Ch=qU#-e>B=6%-sDK9$#kYsR+)F#~I#U zu6{h;%n)(u@wmgx)sM#?ZV#6pk3-yl-1g(~Xoh-|D?bkPHnwy9fybvAYAqbzsCHc5 zL}__=yyEt8<>7IQw-2TV)EZo$cf0+C``d~ATmbF?Jg#wlT>s){0`UIh>c`{U47HBC z-^Ak`GuMB3+~fMV?Ze~W45jAoXYp}>w+|kZMX6`AJO#bGA&QsI54hjj#tlAB%up_F zyKp))G>RNyza_E#(#Esdes5!Ze6>%Hk25ngO1bSKPYH~Y?l*kg;qBns9mQ^^yL3CT zXTANnAfJPCm-Zt*PVw_G+&IU_D`xJvz{f3Su6}&{;{DRGAMo+qe*6Tm+uhbKe4Mv$ zCqCZWGd}MBWZdqy{CFN{&r$6DZ?oh1p}jq>LwR_VgG1PddpUfxD(&L3Z z2iw*@{JaXT%@@*>`NT=OU+{Sc&sW^>M&^e$do;WK?dJ!a-m~2}!TsljV*VW8MX}d{3 zIZEd((ob#2Bk3n+>G2oGjuWz-?b|`x-?p7xyF8(XNxR&n#|>$hm$aRvU2g5#fyY~0 zy`){ErS0b0#qBRL-#WExXB4`IXghLA{UfB~ht%&bod>x3+vY=Df7^H?^*c-FJ5qmJ zd-1bXc=S4=_xAH!9D2$Gw}acWq#f?kc9Q-dBOQmN9#5&A)Z-3&7th;xoc(h>cn07` zD5+Q106{ukaAtyG42N+>Gt65$1&WKe+jq>pyP3CH1*Vx0BR2Q96!DdPnJgCF$MT zwFlQeMf$uWNzWZ^-0?QHogJt5l+GjE_HgHUQodum?Z>_2Ej=!X-Kj%<&kpwS((ysc z=VndZAGka(>3N&5OS^G^?c>^=_c41)x4-Q=pPcFOxM&}^bz^Sc=ho>>AyK{uxlbWi z+vqb7;78%gC<1#aM3ewO11<+X3M>K62Btho`Ky8X!1sU!z>k3~fgb=n0PhF(1U>^S z1ilI^27UpY3e5QlGJtP`JsbEUa6WJ`Fy{v`1}*|S{%;~UlmbT(qHMcOTo_hX*Pjf z5$u&<-v!(Nd15Vf&PT3&4IKxES~)a4GOp;7Z_Izzx7Bffc<-e_R6A0)7N+1iTyA8u&1< z3-D`TU*L7Xp}^(9NxMT3~kr=KP-Kz(!z?1I~y2s{(8d_Do; zz(;^Xfw}X067VUorvn!PbACw9@45->>0nocqe&1K2%)GoU;pU`21Tf5L%1!LAFe1$KX6?zx06 zz(!z)WjLw?>Q@81HQ3h!y8w>|E`{`bU|+Cv_uU219y73qf}J~Smx6sD*ptAX1T294 zV+))P_65M@L&^551G_+a26z*sUk02D?8W7W`Ue3QfSs(8QB)Gx^}${Y_7Gq(*!u&Q zf}O0Qq336SSAjhj(whJ`fISLW(T8l$LSSpy9`?ys6sHCDC15uKyB)`1Uj$qN^;rU2 zgMB-&3ou!|L(ky=CxAT^*c+I8E~P7Q64*BYrvpy}-UPf6STdAsp9XL)*jEBmusy?o z3&1`PxB>Ea11<);50`%!DPI$~6zr3LD}grwHvq2yR-8og?*P^U&gJY-es^Fau=@h@ z!QKPd8ti0M0X;`!Mamxzb{DWO2KEKs2OJ7K3pfdQ8?XTC>j|6=_Nl-^u=fSt1oj}{ zT;L191;AH;i-GfjOMwf3D}nz8ZUDXkYy|ad11tKH?VkqBt?R!7)&l!wU`ychz%Iah zx%|MJx%5z9FW@AwZvjpRo(^0I=?4RE0{d)W?l~0);9Ri(11Atv1e;;5Y zunU1DU>^c(4fZTxPvAMg>5$$KI27z*z)8R{TzcS6z?*>I0_Or}02ct~02c!v11<$V z09*+?3b+AS3{2K%@h9C#Aosie=)Sc*2cT8n_8bIW5AJ?j1l}o*!@#@4aX5HYIF11C z3CEFWwZDD&k!U5ZJ>xZx_8fy&A=>lI4tBg++TM;=JKHn2+RfFU+-`gEswY_$3~aX^ zNMgu)B(ifQV84_ZYCTsz@xyT5I_#Ih>ks(za3qj*sQ^?0{&ZSfImBOsR|iNS{+d8( zKjXHTv7hfEcH+MYLaW^z<96Wk+H>3YFp%{${NCPo)#qm6!y=0n+vo=Dd~Mx=^5WKNCCtyjuJGVbb;yd$@Ez6OQgszF0agiJkMparNO( ziM21kZJd+zv!&yZtjplh&!xxfF!0`{m=aPQ-kf(lkd{NF{Hk>{Y^MT zy8j4=O2-Z1DD>IB_T@!O_aCuy-bb!{al8J++e5w!q^*9jblj5scy+ve`3W8Dv)Yx1 z*V{$y82dkPDI^eoRG73siT{xEI%B^x_SbRVR^o@o{mqR}vd-EzemLglA;S2bs}HZk zMoPycu}7g#S&~5fA#Lp={+hPq68pdLamwvq;)iN)$K~VWmISiC&3U!4zZh>n`HmQz zg819~rQ?_Q137Ot@vr0hxc(ykj<(~KFz03#J|+m ze$M~H#b7o=vj^9I#7`C>oyUoPk@Ko!|2p3+~1OA>iJl+GO{Y992+JQpnJK_=IW((b_fPxy_S;MR`?F9hNFaXrw)POe0iI9D4AaKs2;!K_ zPdHTC9>Vw&*(4B-lg|KEN8W<$OV#obHELD>`~;g&BTM)F%~V*ilL z|C1X!*cGMb-`s^X#PFmR@(20fHfn#yN;+^`aa5Q02AXsJe*Q=MlG^dm^)rpE_UF>= z3iQ+%@lcvxpALA%{OHu)j*~jHOOO8Sxc%kQ{OIe6{vin4?5wpdy*C>kcd(a8dCO)T zZc3S?zb0k2xQ5k5T%|1v++XcA|IhT)_YPduf%($R@V|xTa}ggV;uf5grhjz(ENb5} zDeo9xrOdbetl0~d*I+F_d2%gQPY=ILtdy6+2COAl z4sXON**#%1RzYLk7Odhc8@FN2mbcErnt$!d4wiXmv#|=^EX~1MB;&o4)eya1SToeW zv6{*&*p0c|dchv7{4-wF*@Kuf zR?a(wmH*iBFxKpuvPZCrgRZlhe`IMM<|3p1?EaP*EMhg|!8=whO`VToyTg#3tO~zS z$1w9J__10ZdY)BJ%U;KEe0gpXtCkNRvznbZ=md^0p;xmiNc+O7W#VXdJP7r6vFec8 zN;GrwDV$z1{RFG|FT0$^Omz!mRmi)-s^wmt0ye(WELJVG?y>6d!SD=@&xlH8HTy^@ ztEhj@;`nU8bXLVBuUXA+9(E4LmsG4|)pA({s}7%S3vs+yaSf~diVv)2&mDdq$EQAC z!z%Ul1FPAW>@MK=e4h+fvkz9VT2y3xkxjp01*?{3<*a63?pef^@8Qg<@O&hzsj3@T zrR*=V%J-;bRpO+638&B2vSl^3B!Jb7@a3%XKOSe*a?}gL>9Ut``tn1jtd^XZ$f{&V z3ahEnyIIW_+-5c7R1K?WKVHH41go5gmV~jITA#tHkXOKJx#ShAme=I2vfDAwl+_X) zFT&Z$M3u5xrF^fmYPsSos}6g+vh$JUUTao8SIl5F!*>y@4k`y&74N=BxPKk1C4XsO z$Mt!ZGOU)UidaoG`irPZ9;@Y=kBI$a1FPbk-Z$9(aB^bR;dU^q+4jp>wcMD;D!=R@ ztDc=2SQT{FDaQFS)E!tYstaVb{PI#(spNyKqVdOSc6K$Z;;!A<`8PkxhSk(#(^#e6 z%xBfJaVM+drfaO`f2?Fxa$e;Yt}k_-IjchB@vLTFh+|buZ(voDTfnNI^%<*{bPLfi z-M?{uhxv}IicbTn%(^@t0mi?u_{q-V%0LP*Ik?+ z9Y?HswoGMJXpq8c`S5K-9ST`Z9rS|L67^;h|EkwLoWEqV9jn<+Q&~+do6V|t#uipH z?w?^*GN_Ez>;(<1dX}q{u=UUC#j2&O39I~NwyZkHxw2ZGG?`V&yHHleqmo!n&0fkX zzj_0!mUeqtmCQNGs^^KTtO_e061(a-$P?&JC$Ch&<))@9Yw*PK<#!=BX= zdpB0I`%h(6+$o$@&u2-jI_zJ{s!+Uv)ohDBtfKjW)uIiTSKvnqM@npJV)cUFap zEv#m*QF(yd7XmtO_+&vMQLcmDTKJ z`&lhNbDHSeYpmwWK4R5V_bsbMgCwj9m{wNBBUIVzt$ZhKR?BUSSmh5G#_Au}W2oJ1 zSS>i#w@~?*{iDw2gTvI0+E02gud?&{yY^vG0d^y59@}q^u_n}4qOyi+j@_&&*8BdaX+f<(2nc zG*qNXJG#?TXN2?Xt>o#k0V!1);azFlfYiIkXeIhprsk*nmfh%q*{k{dh~Bhg^;wl| zOS{u1g?nf46o1=ChFrg6f6ACPJ$SXyRK|oJA$mEtq(`HDmQ|-oLI+J+*lTv?^82dv zy!|PLqQgDt{eC;Hxu>hs3rggLSJiva4T=MYn=4t+wr@<{UF+6^-r^D0vhSP$y>L(YVahxN=*)uV?Q zPHs(7Yp}niQ>>ppflr6WN6qzGUv5wOM~fc5Zsgb8e0h5R^SWLQQ~0#zV{e0BZ+p-e zD~??JI=M5wiwS*ZvAj2JWo6s)vYc!4l^sv9DsGJKD(7Snti&Tdw z(@&CDpY&d(O>fygW1F7YC;RZ0`JF!mGIY1V@nv1Mn$xO71A1okGp5sKBw44&45IV) z9&`(zYDeqLTJ<8luf+bFw(Y>DuEXgbE8|isgEVQa;J>QAt{zMW&q}PzT5C@iWUrLi zx!BMKz6ZW{d1*(VwUU|q@~RDe3J74Km~Ph66|-f)Nl zEgAbxg_@!Y`_YhYSn=(IKBZ4*Kd8RD{65;g1Z7ooBR%@ozR81Y#GPq58HGG##J0xNq?+Tl(FSR}-#xv8VHVmz3Y$Z9!)#7d_ay z$d0~0^>TIo9s#|%erjLg6MNc9vURcAQAPUA-x(+SrPFl%hCVgw8pG%r2mGFK0~;>9pgsL6rrq|;~wEELQYDwk> z^t}eYb{YN6e({~L^IS#!XuZR!W0I!I(LvEiADLO`(W-SeWrwd?(aotd0~Yl#qW#Cb zxe+k_vppGq1L^cxIVSGIt>`rSPvv_m2he$X-6hxa2hhP&U&b%*V@YRDJohz5fc{h0 zZI9UHCnWa9Z~awt7cz8Sp~BUT+70#xPXDbFX++cOl>3fR2{fXoneM!#sq))?l#X^Cu0a z>v{f1lJDEmZ|kr4@OP=wO^VwB#75`~PeUH0TA#J0_s%u`d+;z*TITbQ&DM+b=nsm5 z?Kf}R(Vh>_kC3CR={Mm*gUf47=+`Ev3|`M3K;MjBQ0y^Fice@#&Gs>`!+X{bwxS~z(lz|k7PJOG=xlVl4XwQ*uO!r~$$qBs%3fhBhN5;v z^%Z5d+Ha&7`FXyBX{{YyWj<;RgX3rz?J87Uv{!+l^(JOkTpTi-RPztoAU-dlWv&3l#y|?TbZ|$vNw9dK#whOEcY0I@ex}>WQr&}jPt$blW zknZuR@3P0=(Kvk>v1sjNV|uxvxu5&<&h*ux@&l^&RM`(1ZkjG}8b!|-UB7SZ_W^W; z&~@Sbu?Dnxvh!P~&c-y)Lqkzu=tfVy_oTadk}K^J{49Bz=LlNwj{E0F``zfm$0-5! zbB$?@1BH##jtc0Pv+e~7rVgZC3=h>bw5+fQF0b!3miF2I>-B{1G<{xFK`BLy zg!#dp7QVGwvNPI+mh4om$`g&E@#d6E$nMFv~->o{oss)Wnqv0boGzqIX_l8(4Qu~u!*_hPEV>; z-M3EGjaJ>e=kAk*uJqYS*E)-zSklVpE4xPC96@XLd$+EqsXgtqz)wT(Mqhev{4;lY ziy>{Y&oOLAL$m!r#VVP7WAy0f327-~509gjKL)m1hK-{wzL#B)f8$86)u~!w5^6@L z?0%kJrC>|f$6hiU(&#|{yY(1Bxi0$o%}5lQdGm{YGhKi24(R0kW&{msx(#W+nUl}6 z5Ag$kGc%oBvqU2hEB6~wum77-)J>6GQuxittQy&H<5MeB^p$s9;dU$Yg?Dy#>XB9^ zXWK1%=L>SrbSmbGb&p;Q&*q`uUrgCx{`ys48yU0Eh#othHZn3f731z-Ze*VEy+8Lq(#VkYutw&0(W=fC ziyN8P0@a2JaU*ji@tw-KX^l+zrzg$%E{)6w)2;Kb4{2nK-k|%Xsxgn;RI{{DYGB zOB$FnYl{jE;~N;QCI0BQf$1inbeZyKU|wE0Hhbgn1}1y=0-oytWM@MIBd;Y)<;gcN z4)b{}a%aZU)XL3u^eg$vr0~qw9>4IDx$?DKvG3uZ%yOGk5eK&ZWQunu zrMdm}lj(eYi@IX+Pv+?UAr&t|elk%Byvw(IelnwaOpm`m@+Y&h)$y8S$WNy5)u{PK z{eLn_DchowG=4JTG?i)PGC!Ff!s_nA>U!pem2l z+Pjp;9Iam@!?zQ4yc8fhG z)H6YPDwB(y>X}zXe--!)u4fzqc!|II)iaq#28JH*R?ozI$4 zB?VJI)-ikDe!UV>TE|S{uQN%%S;wsTHTrtt**d0Z3NJ$LP#tsNcE2kB?RCsI-!#hy ztLhk`oG;HOwT`ik@ErbaW*zf(Ldw~VfprX7|EyzL7o7W`Ke~?jqE_poWK+kCmCN<{ zJ)n-!Y5O zwBS}P^Jewlm-P8sX6nTW*-!Fn8CTw{!}E96G9e|$-r20LWl~*uedW??nK|8#jD9n_ zmI)S49&$ITmf3Qz?#AE3TISZQ8*j_JYMG|808zuJTE_kRK}$p1T4t&4WBp*WT4wy@ z83rc>wM;iQp!}jP_ZKLj^cwjLuc?Mn$lK$z`g;u{v%jLy^lc4uJ>kHbvd1;d$Afcr zufJKtcpLXC6P~YOy4fAousd4A^evuk-G5IF6F6VXRA+Myvq~}eVc(U=Zk?uKomRtq zA9?fs)Px$wMd4M=%CH)yVZaW*yVGl!OGze8IumM`Q5xlkl1J4r#|kG+t+1_Oe)SIj zHqD}jxyUzheb=vsIrZg}Z=zNWlX2gY*RyjCBb2#1pqN*~gj(dBUsPYsjJ@BQGww?@ zvoUpSk$!nKlkI87Q+|xpG|j!~W;LU3HDy)fg=)sZgm+l>L^UIKsb;Ot{%R(xYlX36 zRyDInwqR=Px@txvKfP;ydNt#IH^=(d+-hd1VOoJ_Ts33Jdy;ZIw3?|}n=0-*qnbIE zxMtpVuWDv(|1s#dnwcu>vGFur&Acw$lH@a_nit32G`?HF1E0{mL_G=XrU%=S?cw5DscQp^GE3IOBr|mo} zf3J#}T&~mH_gWQW{B4wj>$xiC-P!7pl%rM5hB?~)XZBSwS?c9Id0AD=ww)_dy)vtq z=O!5o^Z%-1x@x7VYo%2&i=OoA`B!2UlN{%?Og_4b;SFl`Tp3iwbT&-z>oc{Ax%s$d z*O~EEjNKjOE`BamO#jhCR(4^k81>?z=(mbFRP=4xa?>hiu;uiRzWu5g1DAb0Ewz!I z4OL7kpLd~20kNc&FKLx9JN1wM`c@}lGPZq)sQW5m)P)PYAV<}-i87DRe z8A%vLS(!8kfrPP|J$_h!EtH-O5+)UW5xuOU1g~SDz$x-`$Ak>DL^00j6H55ATi)oC zN!+upI3@94FwUS?@Fz(ppp+xgr_Ato5u2k={JNnZE;t6xObCk=c}sW%gn*2YvRM3ug>i?(q{7XF$ z(Y)rL+nEIIO9#yWO?vld`n@Ux?;kq(deigR7K_ubwM)L`y(!o*&meqcM#|aE^G&Np zZ_ereGjh%Gx7Fy&0-IMYE)|3YhK-vvb62bH?s@j>YFEdv@9cYORjluw)kVuzxV%eNB>h*P{7Z?N zPYuToJparhFG)B}=v|}l{UP(9_kn;-E|VO(RR)IJ-10ufKR95>wA&}{RywNZEt#0J z?o(`vdbjV#2NcExPQSL_q<)}Qe0O@<-9GDi@LF%B8Dv$-~cT z&$kv;jky`?r7)-0=G4;UNBTcsRn}{Jw!X7-OFnhJJaXe%`6Ag<9{Vp{74C4joHcDm zVE^!*c`;3)C%q!+FUM>&V!lqW(;eR&WxC#S>xhwQx0TM%$dfI3ccH~~@GAu)wcz7p z&3jJD{kuplzIddKpUQ*hO%KnUFG}`S9wgMhPUSpFjx}{?)!z|vdzWrg!>2u~8u|`; z{Y!+@C#P zziCazk(sLwf6jiqE~4cAs*-a38+jH}hp#%FnXszqZT;#XldjcW3)0hdB%L#|XB_s> zxw1NRP1U{ohcD$G*XnCwk=Uclg;ukK&pEm>%hhgYzpspsIr*gKkk;n6%kGwYzpRuxHoirlWN|bFWJlJ zIS+d|J{dC)eI0WbeQL(>nSygvXy3f@f=OG41@-T9MoH3pO`!LIy{{)PbH0=`-)l$SI+7k`!|n1 zs~E1ZIQo-#_Sg}|$4{CoB|PtE(DQ2k-sr<~U+)`JApE{9_}I7AGgln=`n$)Lu*eBP zK5w#)Ufv@!tf#S?Qi6lVH3l-f|qUvU&M>9e_9YY<8~hzCL&bn=8v;M4=c)sg%@PRVH3sUA5Ne{#6*?1lR0^^{&T zYOJ|mZ?QimZuFo`-srl%Y46|LzIb-oYRIgU#rs~?Y#HT0VtnzX;1&j-#)Ca{?$~q(e&cktmxsIeZ(W*Ejb$V z>9g{TE=!INpr`13)J^;)Z$IwL3z4x}?~j|GETdEPi-RX5-E=b#KhEEFwu|QVq|a|N zy@ogMsWnsXvv1tbYI~F3!_*9HdJ2;-_f*{1Z^Egr8b7034}S^Pz4Cee$ijCEi|6dU zZ#Dm@(eD+!A1?=8*)t{b!eTvNk<6xpp=x{Xez#k1@%-@I9|IOyL{Bw3rKc(OS1?W( z{qiecbBDiW?rn_?=l#F*X&ut(USOvK8PnoU+Uyj&sbpn6?)K`y--SzVf8S>sAUrtj z;I`;RHOKP$EDyRRIp6b4Sifw~b5lfvri3~iKQUnV<4;)we-`K!^;DU-b#u1g(s`SI zuFsuoAFw6esv*(r-A2E4-_+B(hIThjAKK-K?uQ#WuU~E0eAYK<&!W0Xfw6si{l2nr z>xNO6wZAAIzcbO&+^fQ+Y_-wA!l)nBUwl9RzCOhNn^9}r@RaXm-CU=?I~`QnZ93nn zT!R*zdi#Ck=Yt1!X?jFHFY);Mms73Njg#lXfAqHV5XtG?am-5YQ+mUF@sjY-V=FuZ z((J2q0`846SUEL&VS4kfQ#USM>909|ZT#P1Dt|2+z4+K#AL`;%rR^>~562%3=tgx} z&~Ly>C*RL?-l}x%;okmYrr%mg>|n=v>sD5M^?UN%u5aXIt5+$)`0s!3t6uC7Jt*<+ zqEv;wS)L6&y-R;o4&7Mhe_&tO{>$>J*P0K?PI+-lCDnZPh^$3JMqZH%dE9a{t527Q z&*ProN%!U_Kl2N5a1MDV8glhosWm+_tHH=#@H?vNqaLm4YM;N_WXHP?KMyZ_9y-PT z(@%%AOdSKa`XR<)nLh=Zjz1sC%rcLtEexf0KAPEI@lklKVDRZjr=9csDNplyn?9pP z_TL!qq1VkJefh=S&FkIs)|}k%@MW!dq(ye{SEa&>i#ML{R1$S)zU<<^2Oc;4vcb`& z#rwl({byU0ZynT6@-(V7mi2nJ#8kN{>eQ!zZZFWIq%Kc)7j+5!zB0#fXDj2e;_%W+ z`SKMfPd%xfozmy-fv#{hy6ZF`!eMAutKiJynzDmWZN4ub*VXRlubaJnw4%li z4BlmMY~#Dn@xP*N2RToD`SYV=XpnFe&);nJaUHO?QGXz#Gp}|=%0S}`naYM1v4x@RG|h9V z?@W#4Tr@AO?;CZ!g3-9-@?weFjqf)b71VTP9d{>>cIXkEKK;vhzd`qlwmx2csjxa| zsJi2jq&fP+Z3Be6DjQz+iVW1fe4=ZYtU+&N?t291X{O!3RldGo#GAGB-~g5Wc74xW z_}*pbvBjG^pZ%~xHTIR!&z)u?le(6)WS(9hpg1>3Qg{1f%&svLf4R(EYS^;&iV4hZq{*g z8>{5W>bEz(HApoZXkNSTuv4g=N%-~KZV6p=s?3wDoVtd89KZJThjTgdo`FqcwfGU* zzrNj4FFmhsfAh+lx0Aa&jaZ`5RXO^rYT1C#2Op0M$qVw!UGgmb)5Qy|S1N)gr50{q zQuI^OFegdqF6<;6wXo%WzOHq>?2(a)x0{DO9US4baOS~2lBJHR`F=B~;TI-lC-y&5 zd499gC~tpr-;1kjeaG!=d0B1!Bfj?X`DIJ5GzNRcuR31rU7c{^Oj+|Ne}gZx7rpMi zJh@BwqPOe))~uQwJxV<3gkX*L;}veNWNyAWq;jT2ZoxU1fL!&g{qYH>dd@$s^kRQ` zj*?mM;8BCF%IHq1Tya67q@TUnmERiOySM!r?bo~asHjyh$qB0pHCp)W^ov$`n@L;m zzRj61|BeT5hPvmkh4TBZo|MTpEcwvca7Duw$0banyqoaKN%>rVquD(q{4)_YG zfVnR$pJshf5BzN1FZ1PPuOZzpU-tR__qkW63Nv!%Hh-Dbd-7eS3b~kcUdmW|wV&5$}v)w)R#MDdL%>`Kxle{N>JGrM>@nMR6=~cDE?lXRP_G$Rwf3&RI;+?}a z#z%g<@i^w{xHmp`FAf-HReb&BkU?9Xc3lv-cGa&gA?1|N!RoG!?Y+8R53Ld}AM9mz zZAw;1{xm16T^6ks4bCtRg(QC!&kr_F97mh|BI}trB z+~T0iq~z$#!UwZ$fm7{tq$34F8UgGxroQ<*PsI&Vk4b~r$eK+HR z&!(*!TOAEd`gC71f*fcRtXm&z{{S)!WYc7cF!T|FqRp^Zn{4Q-jVn z?ldrZV{5x_$@=|cKLvRIc=*HO#igd_xiAS3h8Q5>Iv_|?E+GaR5j9*5kBAW>q!vpqGnm+rjFSX{MU4K@rdBlK zE#4nX}Ki_`<8dsrmSUe@}bt?Pr2BuKdZl)_E;&-hboH8!c~L{Mzfu zGjH7T+O)?fJ-qMJR?ptIu3r8{_0`w6oE`YEDJk$xOUI=Te|66vw;y_==kS^flh0oN z=E>w)XKcRPvaa%~+=@Rgj=TS_cYObi_m6If8ujdk&#R~9)kW+NI-^EC_J@P@Yij0y zGIsg{=M0(G{P2cL#(golVB+7_Kk`M=XUESx{A z*B8E*u{~$pO_n2vpL>7r54K)c|IU(czdLVn?A(jrd}Z(RzE^+md2XBYtG4*{+b=Dh z|6;)h4Sgn701?R|V-bf&lD)mtvSr1X|Oj=7gj7;8B>-821(+);VQFS+NVb2ra> zZvV6;Y42T7?Y(UCt5-hrtJK-QdUld`Yv=gDUta&^^9M&-Ui;c=4UiUp4|NS8^Jw5EYsP|o2N9+q5BVLXx z`6O`RZ)!)s;H_Qm{J1hT^}b1uzq{j&z`1GOHzt01{f`%v$Bw%rt>H4q^)LLqJ9Xp1 z?T^oWch>9&AHKA7=!X;j{QijSW6OVcclWW0uWinMwRG#nudII}adgy#8Q=W;$X(kW zobtE7toiKBFV5VUx^vE4-#yk>4zJF?IoR^lA1bbX;QY}ghs%ys@5sOZMAx#P-8`-) zYk9)zUtGWcm%sQhHsOZU=&tvcJoLj)4}P<1_pj6lU;b}joc7#bj_jWsRTO{y6l>&@ zTV8wl>2n|1_=^AHk*nT#KVx+6{ypiz)=Q^7;z)mXO}V)hjun4@`PT03 zJ-2s!x%-B#yT4q%_CRLR9k>6@`oVPvEnmL(hqsDO9*Q_^D^yXHK8cI{WQY(Fku@yUmi zlb38dqbvQj_1P~J5BoUnnX_B`?__QH)l+Y7yJSqvrBA*6=I9;c{u|G7$2 z;q)J^G>&)Q*8hIUtl6jkK;r2?UD@{)%uvBnwy7$)6;x63gObusB@S0*0bgz|;>*9@%Xv+iq zH0(NxN*wY5uZ=DTU;Qf1&npNmt26y7@VN5IUF%Gr*Hx6SPwyu;ybaG+>zC$V=i>9& z#-{x@Km>;Wd`PsY#I?@tDRsL%liy~nlQ<#Cl1$eM)>uClVcYh5ZW-pQNQX&1~nZ)Tb^zhLGCbDgf4=@-l?$X|c{`ExHwcg-{$TAe2=EKGaFrDd*lg@mG* zQWTJz$~1OX2|sV%*04Qsjx8*mtE?c;otID2eilrs9TaPj+FE=Radc`@f{bC`Z2{7+EYU*_c;c zGE1ZaAkVuc+9tvXrm4mz_OT5@LeS zyBk~TY5diGEGpgL&hxlV^2TVeAabk19!_j1#8WN>kP#L z@k0xWi}XU#m7zW{Kl|c&^LqDG?nUN!I2NjeaO=DsrHN9afc4C3~v(*f5bp#T+XhaBMj8EiA^U z;#k#yV{sh&S8w4zPdJWSmG4gSg-*ALzCY}ER~Rp8#71b76RPkTkk}31!t-?>vnt13 zR&{cK&OrTibV6HZ7%wuh5!%A@6}#bEc)ppWspYtPfKIr+r0xhi-U&Z2Z_yW9A=}~k zi~aB|q$9Fg(j?YcRd9e#xIRrmrJxhqvfxkWgXoCOkaoBql27;+?nftSyd19_pcAey zsrQ8)?}8sJ4q_{0J3N1}AHIcjL>45C<8JDIfKIr+#33D>(3S&#Iv+$wY=*SM{g8aZ zw{Slae?$A{c+~)%aD7Sb4?ErsKUf^ZR>*dE{$fA0B??o;-+*o9s>Q1wTh~>7eMZye zRmYA^DY-A{b+PrII49&+6cszy>K{u(X9rFOv)s;nBNurOOXyuJ@(z|u7iBM3EH|^i zW>K@sU6tiiHyBe=`@4M4i{l~L)RYvJh0GfplCiQrXi;v8Gpop%SDrW3c%=xxhpJ>r zmy%Mjp2=k``?5T6)}oc!Q*#zAfK}f9va3Yq>o2=nWF7rv%Z#-9kV)K3d&?GyURQrT zBW-s-S(cGDfJ|1_nQ60RcvivwdO4zavcJr*r#=fWN48I;qgP}8ZV`*{u203ZSx#E*P-5YMW!I^K&&+oUIG4zxdOkK9X%)6iL zYBTR}S+=Q{*iUBaIglw{5HqXN^wIHCtC}zQ8v9eJSf$oJO`im3Ii+%^riyF}vZ;Nt z^F-!FmSySHOA}e}8LRPKIGy%TA-jTe*0F(J~wnDwq9P9;RqhQ;b)aEKGu@pj8zHQF)ASqex==Q86T_0XAf87(}t<>D-(mG z0}1|k_sCpJe3XjMk8=;t9hNne2Q-pq+tftMBx_7?NFd4`nH!N}%_@G|qI&2MlWnxG zLuXmD=vW^>g|rVtc66du_9MqCwM}GrJ8(ZdjV02eB57Ka`4>s8cuDXKbl6DCfm3At zbu7_V6}{4$VqhvL_VW3Tw8kwBn0Uadk}u-lV#pd>b0$@0oyu4%ItwZ1xCj+viC0$2 zEaj`h=cGwdYIxchH5_@gJ4`-3Mvcmzphl&QSEDSWRa{`0duXmw)h8I|FkesF}gY#-7-O}CWR3M-z#5{+cOujyR?_idu2}5H9*K5~ zzgeirMR?^kFuBn6P^8U{sJSz0Jd=1#QSlrb zN*##3$Ops9IGmlxFRbb@>=2WLv=o(KIY-&tLvo|DO#L)e7Se~3FM?SNS!095BscP% zNJSGaooF!ci3&sY*)4EZAQqpdQsp?Hz6={i z;)2x4EEV9Noqw@^j8fM{|C{+jxewY6McU$PFqDe{>!a*j$HZqa zkI72Gw~Hgsw=x!lw;4YwXaAaiM<92kE!IdJS#Mn%*{>f;*@LLuB0fvo`I4Q;tyX#J z%k*<PIBD zH^TU!xHvDdI7bY;XT8pZc>;OXi%;|h?EE3WwWzV(YG#D>xT=ecK5R>mO&K;N?z<7{ zC2QpR=oPm6qjlfQrtd93rSE-0Rx2r5@)s~89+f(AVA9Cf5@Fi|e!guzp*6?PB*vD}y<>}4+RwBhYE1cAYE0TRHO4YUh1x+b*;qsEsED2D zcdXWhiiY=(V^oW(5u-Yo!%$qDJLKyrF>2$8*{II{ci2H;Z~V&t_SRnsI!k48pc6{VV^BCn2~XEWn7uJ`;RZN-V1C#9`y zfGLN}w&J$ctKpH>T%(^SO^#M)SjMO!!KgrJP9Sle%!eJvm5)|7=LFSpJ@@6IDojk~ zSpR+6HhOA+o@Izd4Y`HkaIQ_wbw;ZZ**3;Y#>~{A`urHnxE7=0Q{#ei)}0*19Ona@ z>f(O&xs`M!omc;T0yIG9bmx}b$HUH5MeIbrXP#SRo>0$RmUCVe;&eG*ZilIdFmJt2 za7_Hk-&j>f#`_kPZ3@Xz3pC72 z8??m+nXpu4kVWL*`!dSiLPq-|Y+2Ug$cfhNz4@(-&PqDX97V=-ulN-csbW&8JGV7u z@)^|S=dmiz8Ka_zw|%Qp7eEBAImY#0n64!*TE(ReRdK1DL;caYy>`n@ zySX+~>omjQ zS3qMvFm0@X&ks8;`vqagW5!0Sv1udJSP!#y_ZhijS5iKR&P}(|53nPs=fzo;5%eQ` zAA#>iKeBO6x=`AHMa2+Ta}LV6!owVt57pW{W-B%3#(3iEHB z=c*l)7s?|~L5r2iF6RyNnuBYNY?+fqtAtdpIYvbhzh{{*?Nc&0GyTG+v?w(qUS@a5 zx|%9Cr*v`7}q*o@qYZ7HcU+`j8aMEBUKVQN$4b@ll0`Mfa(8m{2zw@LsfzY z{RFv=8i~w~Y=la9((aB)B-HFls+y%(lCD+}O?h$*^Tb zciL;{LzjGLF;MmoKu-2$OFDO`N-B(_9Zgi61p={Mj5hA5+(=^VKSujQ=f-Z@CG<=3 z7ku>JmHJ9#+MhSXj>N(;O3tah7D9dW;@6e>F?5_f1!X_#h*Cp!`di_D0)>u;_Cv?b zcE|NK?Pq)%*V?IJ?Jg;nHb+}?($=^JPvTlIX$Ng?T&x=B9IeJ>kK%eHL5=$~As8Q^ z-5L5Z=-bdAfOf`*b>3*#^ zE0pK-L2V|BO?>MmV@#o5vxlBqw=zC1ME5jkwMME4`lB`1BCgVwW7Jqny&4+iD!#X* z=KMfKVaL2@;5Ozt%!3?%QEE2Sv&)=Qh3{LY#HuNrr>4+OrqE8N&`zeLvfS0UuP}^x z(@+(^(!8%=j%AE%+1wXmY?pgNNi&D3N#$p#NzQmRiF*W-cJ#Jc8T+#1 zNXPiNgZ3GAjm}~Vyul%v#a&3C5oTP8C-?Ct zWOJ>?m^>lXh^v1j=MA~vC4Q+P+-EYz1;zw)lh92nX=MmtfO6*`g94;RR)r|9UQ*V%FVo;y%nc zPnqo?o^~*j{=~Vuuifc$E^UoAl7PO^-lTjVFn*Tz8$W}VMEs^*CZd<=dCZ3~+X98noyfJS|O17!7 z<&2BYGZ-J^)!0w%M!$AT-HeG=NZqafH`gz zs{SLiIc}veAxJuR2zlj@&l<+JY~%V``YyM?^cX^2VlRR5LFQIT(_~IJk~y7SjrR1- z=_F0M_$U3!V@*jK(C?edykE(9zd|woxD#ZYH~jh>zdpmS zV6R{9A=H7A+<1D)SU8G~i~)%TY}<#cSYs7~nHke)H7oA;$dIup3inhlNr zSoi7EkM(}DeSAIWd990BTSmvI(UzrZM35NyW8AjfAz9HW`ZtB6OH@XMMg3%xMfuyz z^IZ78(N^i_V>theS$RfqWMD+saQ{%Zd94-Gd#xp2uDHvh4nuQ8KF4uuCN(z2k{Hcc zF@&)KOijp5nmK&#B>KiU`iAs_GtP^9GB#)nM0Z8{<$6ZiAwb)ee%g0UV_qA^G7slk zg>iG#%8|iPyw1Fzu^$TcP2*mR<#rVvi0rcZO`G;e%A6dll0UyyIm&Ni4E~X_XIFFY zkc+umi~1h4x800E_`YQ*_Y<7V;c0s^(>LzX)|)mn#80E0j-;L1)rcd^-DeKJs$cn5 zom##eNoV8QFhA56%cGHR8foRXr9)hAF+Pu{jzZU5Av<4p zSk#42a30onlFpNP*;M9ya_KhfJjB?SD%@P}-Vhg#=L+s4{%lT7cSI%`i z@r@$Ba^1t2&^JTlYEJS=k@WUxWvA}#)V+P>F!x9*)SkvZe5ky4ykt=iLN|rt7Q=P- zLgvH?`gb|>xjvS0*B?rA%P(mE&;z6qw8(F5J0eq(&f@%B$hnYMnZNr)r9=`Z@y9Rz z$T%6#Z(u`d8(iBCIe<*+l=2NWwoH53qJ9AN)~TG!t)v-eQ{&2sF>}dr%sa=WCS=7J zZAxDcruL1Iw{zq3Zb*C;G~-8HxxO@he>CQ1@oM;|++Q%_CBIz}x71;7%Osg|j$zI@ zT1`qDr6wKen{yiX5(sk4AY;+xT<29mUF0cf%(vt5n;3+)!aGeagWstKZX(G+UNzvD z9eL(4Q$O_lMIy2-+Xq@uz0VGX>!s;D!?i;hk z@(j&Dy~W&D7`TP}$=Pe_haVSzDv{fvfw2kc$ulyth5O^bKXkMYL;3gz$ww@7pC%7^ z21d4U`=7sJi2D8Om}Bm`pE|W146CXKdHU0o1-#@vhEL+Gw(c`7(r)+-o#P^V%wA+-;r>LpI zLG=7OmZB|vNS;TL=NII;26X#&^EGB^yiQ}K#@jSLrtyHr zZjFD^7^~wjQDdsc`@t2OS^7>fD5Yr~&dUtdvDU@ijq*p%~ldC7ps#-A{c$OFXr zrKLQX&+}P7>yw@G{C`cq9L#6!H)|OW>#ys}^Oz}RRY1Oe=KJ@iTW1;l{n>XZ_7W4d;DU}i$ zPjdHLIH0!bG{!Uer+K!1x2ad8KtS!lg#!mD#JS1y!qcDq95 zxx3#Ho{)2SO7igf;eK+x)}feH5hZifkNO=83#59q-;wYY5^tGDc+fnwz+tm`-;`}A zbyl#Pz~j2MsEq8Dde)Rn#7gouxSV|{8qFhvtd#iHJR(IaU9Y~E?&_Dq$)crVmr^g8Qsc4y&?EL{ z>kakv|BL@$MIdxu%-_!(pG-oO!~R+*7m{a8Js@}V%!L{95Uf0`uX?-Dy1paplsr=y z+S2^+!SESq=m>KFImXuba}yWp{nZ-X8new7z888Lu%_W2VM@jg=bj z(D<0fCpG#t9@Y4v#=mQfJ7DHBQR6I)OErF9<3^2-Y22-GpT=Kk?9zBbV^E{@pqYQ>LZaYFD?B+g?p)!&yDnISv$>QCj z3J%1EZZ7kOoc%dZC|lZRYg2A-iY;3%n?ROE^^-v8<%1 zyogn1H@H?U=LDKb5_KmYEq3KsTzjp{6Y}I;!$x@?!ErBRE#(?@i%7zwzecG)N(9Z& zEvZx=%U)JNcB$kimhsyYxiYV)%+(u9S<$mhN>t1%&0Nl@Q~pr(WlH zxkNi`f3Agva|~KprJRtld;$C5A`b;({}Np+X;yI{of(mba=zNC_P7Fw267WKBCn+HbR(AnB2l{l|p&O^%YM-Xtg zk*&Jg;7wQG6YW*yPP4-(^YAb(3W&t$vZ5=+&(TZ>*V=<3?Wh8?qI2BN44p z57Sa8Ws!4{RD`T4ecpn-1&gwjY7om;EV*3xmqofLOBxOjbP8X(f#ytEOI!=odjlkE z)VC3rR}_~QEo9AQcIg_H6)q~w^QcMDAvU|T_c%|6VQ)#v#v)H?iLCitWvm7+BL;ho zcn@}{(XzC+?sf`~3A46#$zE^WHOZb4#FrwLFSZ5qe+LCIBjTZuHkO! z2T&i1mYF7-_)3k6R#^NCjJjDx|Ks@*gJ0<~YpR*N=?r4gTy{zeIWKw*&D$k16)UzG z)XKQ-xuT-nT|s~qKf3lw>6}>ou`=pYSNN-Hkui-o>o3)`r>(3}pBnxeBZW~{e~i+@ zAtf!RLn-wiA&D_C@hnjPHp>Y7uiIX^#&Kl*(Qm&^t$%msgXi{L^ULf*&S0}0IeLJDJIpWRr{UJX?D3-(d zf0k4Ck^b)q7^wf>eSDxlDcytn>rF7_j?pH1V+Yq8Xm8-Y`*ky2K*wd#K!tz#zWHX- z!Ty_BIZnI%%U$_zF?jm7PnhnU{=F6dE$;r8Wja`kFlD9-r)?IF|1uZM2Z-c01J z!{Om-jNsn-Et_wdtc5$@HJQ356K{SzAWQL7#?~qWedxc`aIC>$9zNy&&3`vV85*DZ zPwFqw`cI?(TKxz0W7Y5Kd+NV;diz`Jj1*T!DRt$s?;mrhb;qtd#x1r1+s$uup9p-Z z)SRuRXMg9!6|T~$$MLlPFvr{fQ@lxUhqc{wZTEi_XW^vK|MboBPd4j-EkbXhau1wC zNd3tce!QYKwAVN1NW4RzKP$}lQDo`;KFxp3C4XkVhdQJ+>yibld8g1i?)aA$WR#UV zGct@rgX#6&r~fB9J)8RT!iawa$6~_C9df$nXv4VFh=`andFPAsQg?gCgV0JH=+GlgMnTbx(mvFFj z18qx%4W=iz&W)SlIP2VIwwEVvGx1}+|Aj`&t)_gq#w3l?G|tg@k;Y{juhN*Oaf8Oq z8gJEjx5h^`HfwxJ<4YP}(fGE;-)a1_#&0yn=yHtGI9cONjTdNKqVY=XUdE5WIq>1@t;5%1 z%Np&f@jPmTj^HLvkS*|n_e0Oad%hT+yIS-_k#PNsqlXAbtn^Fen0==R8zkSY}b4TIC~oF3D8LacWb@_ zEIEsH0?6IqH`BR;1}`s`xczL(0$&3jg2bOLum>{o0;is%R44Kja4Xl;N8zjF+Ij|h z@ba7cO>(lTMGiL1X5A5d2Y7D^>p9`Q;66xf`oTT?u6`J~{JtJ_9?Q_+?O@zo%Jm>T zc;k0?9v{96e3*4h)$p=t`%jSJ8#r;E$veOq7m^1$Dd1|BrXGdoy-ey}Nc@-Os`(e2 zJnv&t_gq4pA3_c~GiVROgReuy@M?ilPea}CesI}BT}R-Dkk|}@(-%|rhshT_42ir8 zypm@YQjp8D3-2yRhcbq*PnGqrq4lnUf5_tg6Z(R-E3pGFcs}$2yxP1J8S^cuv+#>i}OXV!0ynF7RNfQlAJ9He64=u;ws)ZKJGtypsDtwa5j3 z0Cm6%9;x7tD0~2PY$WFJDd0~a=@+t!_D`A*f`>P;))$@d^^CI4F>*7pVj_yb=B-o1q~!h6A?Te0P(O@g8Ie}YSHqJ1J4yb>ye z7hD5vgAZS;C+quuUB&NQ$OYem{P2Ruh2R6=@SBMtyd8W9lD_N(ul|2{jvaX}IB6TP zt)na;=PRQxxWSL0wa5dvv4##Rgtz^O_uN1d!{y*6%~yh*P$qK06V?fv8!4uG8P_z2?z zHvQmPkI@F;lfiQz#}51q@G`H-53A>0ijdgP1RsW^ypMowkl6144`}`ncpMUaL0Q8g zyx1Ka^gzW0KUL833umhRbQmZb%} zz(?C^9whlX!N_KKc(3LKcWT}TzNGm>;8D#7LA9HFv1tdr zdz4Cp7km*Cn|^S3i*6P@R|dZ7yVrDik~yq!DoTnf58~oM0*2|{LteU5pEqWj=Vv5&TC#;V5knlJY(R9)=_if~()6Euo(S zUi>zG!e@YQK{5_>gFin8kDX31>o=x+*m35T&~o&TgAMP{2jTr-H{_;V0q{Mj96kvC z8LEVzaf097AgR+_@Bv7Cs|7!Nmva(2J>blMDNg}6YQ7SDLGyla*78JO|#1 zKPjMns6`z@E_fRx<*f$Ohq2xOxnMn{ni*rkAS5;gFN;}Fo4g%d14$dm0Y8JpX39v5+6alf5|kIYjK`)8TmVVA7K4>gGV&_$B`6dA z5IFP<%7soE_*8;fmVICl+9$q&&$0Hd7XEqg=FzMbg5L%{4T=4IU=Jid2(B4pQQ4vc zE=s}{{9>>U68%Q-oN*R)9Qh3JaY%gdg6-q+e>dX;_-9D`{2Yv9&79b`gXd^|23QIW zL%$OIDO3o*7d$$VH7D?bV_2t`2A=^w42hk+;J<6W8|=}%;53KnXA0=n{5G%_lC*+8 z%?rMx`2ZNyyx`c$v@!A>4|*U;TMoVfNgRUUhAF%!2%QhWbI&y8Gr$#^&jQ_$q!p~u zyx?JIFKGqOnQErZo^DaMLSnxf+^=~*_-D<34xXK?^8(*J7hBi~fX8Q8jPX-&|18Q( zTlItY%_c_Z)PjFZF=HD97p7X&*T@%xSD$Cr^;&Sj`PdOT`1yCpYY+7Z7GGf2uiy?y z%5^XJ(|HzSJl_kBxX|=11H1`JCv6qDA8Lg6gHiKYTLy0fuZ61M3&FV;(az!1!5m29 zBY646tT{oR1wI3HlJ7q7OGsk%HTc>kW}oT;JsGCF9Gt(vlnZ{P`LDsb3r+vi!K*dD z7W_5TMPA)t^dj`S& zS5c4fTfj%N86V)iV99Fwcq?rZ{38@Z9t5k`nEnfvewVrv9q`@nS(F1l0M@RhyyzPU zFJ(LlCR}6kg0HV5hUgpxFU@8ABJEo6W=Qk{;PyQ7LtXye&H5FzUu(8=!3|I+I&QEIx|h5f!QVj=^Byp=(6nO*8zC8! z1%D4o+E2haMHbaf+B7g9lJ>9-v=&>`BgmcL-=PH7>nYX|2B36!!HY^5-{3RAUqf=P zISf8pN;@F!e78kCdA%t=4#s=THa{Qy0Fp8at}L^tTUqPS19|nirL0<5J_drtKdqE#0c`e^=QFlSo9%{hUyYLhJH1Ky&0_*X5 zz~mY;hJrtWByBag<8I21&b^@8VU|TO6%wD*!1?!BR0cYNZ{AD2!gqr=-e<<43haU^ zksEkF?F7CPyyXGxGVj_3ehEpw;cEkB&0z3h#zXW4;~rrj-VS~XNqN#zy$1|#du^RM8UO^i9nr-IKy4*2K6Hz3LPC^&ql$=ku(At~b>VA?Koj1_zl$|bEI zywbU{Xu(hT0QlQhe0YMo2ES=DdDYIjql0{r3*Pqx?G61}(6^U*hZh|Blvx+U zz!{nstkk?|{{_Zm_^cN>zr4g)4?hEJf@3G+ z1bu)!gY|&hp-oi7p0`1JNh>ePd;!|R{ZD!I;n4S(7jaKjUL+_l4-_52jhdI2 z=>0Lbky%KN6orl7oQN_ateUzG5I@*bzDluJOlDDN}!ai2j@-bW<7puD5!b9g~{ zKabcEl=tF@Tu|PLBXU7`Kaj`;<=sFc7nJw(2yeWv2VQ(I-qq7%^76h8S^q9Ng7Q8M z;RWU02*L}>T5I72WzD|ug7RJf;RR*wz3_svE?#&+S^F-$psaBhUQpJK3oj_^WQ7-$ z^~=Hw%GzG>Sy0x&3NLsN5*=Bm8q~b{7i=rIUve3=3Az-@hMZ72v>4g~(mwq=exHHrpnITO zq3fY*pk>fp=)({BeE@nLItV=l)j`{#&5#RP*u%XpXeKlX8VN-}Uwp)UU+8V<|84^# zKz~OBRrNo@7>Xce5IL{^Q<)r-_eXM`c>IWXQoegl$GG6 zTpqX8)~5UuYv)Zd(vh;vRZzk6z?=0x5|4}Zi9G1<%<>d%2sMsyPr?< zXZIG4OOv${1`v$QVeyx$7(ItLm%kYwErA`|ADmo%I2pCkoa(8j>5*8!{R) z8?qX58Y&yA8mb$-4ZenshOUP0hCoA4L$KjwLrzm}Q(=?4siw)>)Y0T`>S_u!1)J1P z`%cHsl${wnvv%h0bnmR%S+mo-vty@!XV=ca&fre9%f8F8D`i*4uB=_TyWG2~cGc|i z?&{d(-_^A%uq(Jr`RqQ2FU6PP%kt&=+`cMbjnC`r@cDgRzJM?2Q_c2fM{`PZMsrqk zZnL|&s=21w+uYIYZ|-UiG;_Cjw|%!`cgpUJ-C4VHce{62?XKDF-QBUUHkCAlM-}Oh8{{`^CkL{DMdQv$n^P}JMpd?|9bH7BtF{k zG7&$M@iZO(e0aDIA3O1~8$WyS^d!F8@HP>DlkqqmpEL0~2fqvPyb|B5@xB)SeUxAy zW$2_7-ISxpD2a`-BvP7W%9BosGAUCIr7EOcm6WWyrKY8}#oOX*>1f&4;&16}>1ye2 z3AFUI1Y1tFs8(C6y*07b(VE>?+(8tkD`h329zD{4a zug7=NXKPMuPHs+b&TP(UE^MxBu5PYv_BHP##@)pCWV4OPCKJ`n-8s7pcUSJN-d#(4 z_YvD};(Btojff@_%}gR$NEEAyppWQv61ko|C->NhSu*j;BvysQshSx1h)*Z6=^-vQ zVv&aGITVh*sTY6h&TTWYHTV-2yTWy=KZC_hw zTX$Pe+sQUtdt!TXdwP3jdro^{du4lddu_X~eP4TLdv|+J`$>LNP$^uO12%7>H`$x+ z&GhDY3%!-zYHzLA=iTS+^mcoDyeGZ3y2QHVy7aotx}3Vgy2`rhy4pHl-M+fcy6(E3 zx|4Ob`o#L=`tP)jf0*U(8X z@1cj=8WS6n8`B#z8*>^98!H>D8*3YVjr$rq8@n5O8c#Obni89mo6?&yn{YORd5VlF zbyamWb>6y;I)7bPU7#*lr|RwXj{21PjQXtl+y^S4>{>HAxKx42`HQAdS zO({(oO<9ac?xw2$-;YQ&^w|#jYZrYrNI$jHM^osZS*^LP?$)Z-npSUXN2|ZJt2NLX zY*lUcHb+}ZTSi+}TW*`Xt*Wi2&D++|=5On23$z8>RJ*<1(Vo(t(Vo?w+wN|!YOiVc zw#&yZWM1H7WbbUs+F8h0Udd>#s^oVC=7#ju|NSu}m!9LM?^MxyYUn>+dQb;_2*X`w z+jrBByJ^FTwBJH~mg=O%gvQh?Mu%L+2e&ao)G$VP86`RxC;UEhbj@V+ucg-qcPBD( W7ShZ8^lrz$Y#i(>1781++Wucx6{ofU literal 0 HcmV?d00001 diff --git a/node_modules/bcrypt/package.json b/node_modules/bcrypt/package.json new file mode 100644 index 0000000..621fc1b --- /dev/null +++ b/node_modules/bcrypt/package.json @@ -0,0 +1,67 @@ +{ + "name": "bcrypt", + "description": "A bcrypt library for NodeJS.", + "keywords": [ + "bcrypt", + "password", + "auth", + "authentication", + "encryption", + "crypt", + "crypto" + ], + "main": "./bcrypt", + "version": "5.1.1", + "author": "Nick Campbell (https://github.com/ncb000gt)", + "engines": { + "node": ">= 10.0.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/kelektiv/node.bcrypt.js.git" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/kelektiv/node.bcrypt.js/issues" + }, + "scripts": { + "test": "npm ci --build-from-source && jest", + "install": "node-pre-gyp install --fallback-to-build" + }, + "dependencies": { + "@mapbox/node-pre-gyp": "^1.0.11", + "node-addon-api": "^5.0.0" + }, + "devDependencies": { + "jest": "^29.6.2" + }, + "contributors": [ + "Antonio Salazar Cardozo (https://github.com/Shadowfiend)", + "Van Nguyen (https://github.com/thegoleffect)", + "David Trejo (https://github.com/dtrejo)", + "Ben Glow (https://github.com/pixelglow)", + "NewITFarmer.com <> (https://github.com/newitfarmer)", + "Alfred Westerveld (https://github.com/alfredwesterveld)", + "Vincent Côté-Roy (https://github.com/vincentcr)", + "Lloyd Hilaiel (https://github.com/lloyd)", + "Roman Shtylman (https://github.com/shtylman)", + "Vadim Graboys (https://github.com/vadimg)", + "Ben Noorduis <> (https://github.com/bnoordhuis)", + "Nate Rajlich (https://github.com/tootallnate)", + "Sean McArthur (https://github.com/seanmonstar)", + "Fanie Oosthuysen (https://github.com/weareu)", + "Amitosh Swain Mahapatra (https://github.com/Agathver)", + "Corbin Crutchley (https://github.com/crutchcorn)", + "Nicola Del Gobbo (https://github.com/NickNaso)" + ], + "binary": { + "module_name": "bcrypt_lib", + "module_path": "./lib/binding/napi-v{napi_build_version}", + "package_name": "{module_name}-v{version}-napi-v{napi_build_version}-{platform}-{arch}-{libc}.tar.gz", + "host": "https://github.com", + "remote_path": "kelektiv/node.bcrypt.js/releases/download/v{version}", + "napi_versions": [ + 3 + ] + } +} diff --git a/node_modules/bcrypt/promises.js b/node_modules/bcrypt/promises.js new file mode 100644 index 0000000..cd82014 --- /dev/null +++ b/node_modules/bcrypt/promises.js @@ -0,0 +1,42 @@ +'use strict'; + +var Promise = global.Promise; + +/// encapsulate a method with a node-style callback in a Promise +/// @param {object} 'this' of the encapsulated function +/// @param {function} function to be encapsulated +/// @param {Array-like} args to be passed to the called function +/// @return {Promise} a Promise encapsulating the function +module.exports.promise = function (fn, context, args) { + + if (!Array.isArray(args)) { + args = Array.prototype.slice.call(args); + } + + if (typeof fn !== 'function') { + return Promise.reject(new Error('fn must be a function')); + } + + return new Promise(function(resolve, reject) { + args.push(function(err, data) { + if (err) { + reject(err); + } else { + resolve(data); + } + }); + + fn.apply(context, args); + }); +}; + +/// @param {err} the error to be thrown +module.exports.reject = function (err) { + return Promise.reject(err); +}; + +/// changes the promise implementation that bcrypt uses +/// @param {Promise} the implementation to use +module.exports.use = function(promise) { + Promise = promise; +}; diff --git a/node_modules/bcrypt/src/bcrypt.cc b/node_modules/bcrypt/src/bcrypt.cc new file mode 100644 index 0000000..bd8c573 --- /dev/null +++ b/node_modules/bcrypt/src/bcrypt.cc @@ -0,0 +1,315 @@ +/* $OpenBSD: bcrypt.c,v 1.31 2014/03/22 23:02:03 tedu Exp $ */ + +/* + * Copyright (c) 1997 Niels Provos + * + * Permission to use, copy, modify, and distribute this software for any + * purpose with or without fee is hereby granted, provided that the above + * copyright notice and this permission notice appear in all copies. + * + * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES + * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF + * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR + * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES + * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN + * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF + * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + */ + +/* This password hashing algorithm was designed by David Mazieres + * and works as follows: + * + * 1. state := InitState () + * 2. state := ExpandKey (state, salt, password) + * 3. REPEAT rounds: + * state := ExpandKey (state, 0, password) + * state := ExpandKey (state, 0, salt) + * 4. ctext := "OrpheanBeholderScryDoubt" + * 5. REPEAT 64: + * ctext := Encrypt_ECB (state, ctext); + * 6. RETURN Concatenate (salt, ctext); + * + */ + +#include +#include +#include +#include + +#include "node_blf.h" + +#ifdef _WIN32 +#define snprintf _snprintf +#endif + +//#if !defined(__APPLE__) && !defined(__MACH__) +//#include "bsd/stdlib.h" +//#endif + +/* This implementation is adaptable to current computing power. + * You can have up to 2^31 rounds which should be enough for some + * time to come. + */ + +static void encode_base64(u_int8_t *, u_int8_t *, u_int16_t); +static void decode_base64(u_int8_t *, u_int16_t, u_int8_t *); + +const static char* error = ":"; + +const static u_int8_t Base64Code[] = +"./ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"; + +const static u_int8_t index_64[128] = { + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, + 255, 255, 255, 255, 255, 255, 0, 1, 54, 55, + 56, 57, 58, 59, 60, 61, 62, 63, 255, 255, + 255, 255, 255, 255, 255, 2, 3, 4, 5, 6, + 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, + 255, 255, 255, 255, 255, 255, 28, 29, 30, + 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, + 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, + 51, 52, 53, 255, 255, 255, 255, 255 +}; +#define CHAR64(c) ( (c) > 127 ? 255 : index_64[(c)]) + +static void +decode_base64(u_int8_t *buffer, u_int16_t len, u_int8_t *data) +{ + u_int8_t *bp = buffer; + u_int8_t *p = data; + u_int8_t c1, c2, c3, c4; + while (bp < buffer + len) { + c1 = CHAR64(*p); + c2 = CHAR64(*(p + 1)); + + /* Invalid data */ + if (c1 == 255 || c2 == 255) + break; + + *bp++ = (c1 << 2) | ((c2 & 0x30) >> 4); + if (bp >= buffer + len) + break; + + c3 = CHAR64(*(p + 2)); + if (c3 == 255) + break; + + *bp++ = ((c2 & 0x0f) << 4) | ((c3 & 0x3c) >> 2); + if (bp >= buffer + len) + break; + + c4 = CHAR64(*(p + 3)); + if (c4 == 255) + break; + *bp++ = ((c3 & 0x03) << 6) | c4; + + p += 4; + } +} + +void +encode_salt(char *salt, u_int8_t *csalt, char minor, u_int16_t clen, u_int8_t logr) +{ + salt[0] = '$'; + salt[1] = BCRYPT_VERSION; + salt[2] = minor; + salt[3] = '$'; + + // Max rounds are 31 + snprintf(salt + 4, 4, "%2.2u$", logr & 0x001F); + + encode_base64((u_int8_t *) salt + 7, csalt, clen); +} + + +/* Generates a salt for this version of crypt. + Since versions may change. Keeping this here + seems sensible. + from: http://mail-index.netbsd.org/tech-crypto/2002/05/24/msg000204.html +*/ +void +bcrypt_gensalt(char minor, u_int8_t log_rounds, u_int8_t *seed, char *gsalt) +{ + if (log_rounds < 4) + log_rounds = 4; + else if (log_rounds > 31) + log_rounds = 31; + + encode_salt(gsalt, seed, minor, BCRYPT_MAXSALT, log_rounds); +} + +/* We handle $Vers$log2(NumRounds)$salt+passwd$ + i.e. $2$04$iwouldntknowwhattosayetKdJ6iFtacBqJdKe6aW7ou */ + +void +bcrypt(const char *key, size_t key_len, const char *salt, char *encrypted) +{ + blf_ctx state; + u_int32_t rounds, i, k; + u_int16_t j; + u_int8_t salt_len, logr, minor; + u_int8_t ciphertext[4 * BCRYPT_BLOCKS+1] = "OrpheanBeholderScryDoubt"; + u_int8_t csalt[BCRYPT_MAXSALT]; + u_int32_t cdata[BCRYPT_BLOCKS]; + int n; + + /* Discard "$" identifier */ + salt++; + + if (*salt > BCRYPT_VERSION) { + /* How do I handle errors ? Return ':' */ + strcpy(encrypted, error); + return; + } + + /* Check for minor versions */ + if (salt[1] != '$') { + switch (salt[1]) { + case 'a': /* 'ab' should not yield the same as 'abab' */ + case 'b': /* cap input length at 72 bytes */ + minor = salt[1]; + salt++; + break; + default: + strcpy(encrypted, error); + return; + } + } else + minor = 0; + + /* Discard version + "$" identifier */ + salt += 2; + + if (salt[2] != '$') { + /* Out of sync with passwd entry */ + strcpy(encrypted, error); + return; + } + + /* Computer power doesn't increase linear, 2^x should be fine */ + n = atoi(salt); + if (n > 31 || n < 0) { + strcpy(encrypted, error); + return; + } + logr = (u_int8_t)n; + if ((rounds = (u_int32_t) 1 << logr) < BCRYPT_MINROUNDS) { + strcpy(encrypted, error); + return; + } + + /* Discard num rounds + "$" identifier */ + salt += 3; + + if (strlen(salt) * 3 / 4 < BCRYPT_MAXSALT) { + strcpy(encrypted, error); + return; + } + + /* We dont want the base64 salt but the raw data */ + decode_base64(csalt, BCRYPT_MAXSALT, (u_int8_t *) salt); + salt_len = BCRYPT_MAXSALT; + if (minor <= 'a') + key_len = (u_int8_t)(key_len + (minor >= 'a' ? 1 : 0)); + else + { + /* cap key_len at the actual maximum supported + * length here to avoid integer wraparound */ + if (key_len > 72) + key_len = 72; + key_len++; /* include the NUL */ + } + + + /* Setting up S-Boxes and Subkeys */ + Blowfish_initstate(&state); + Blowfish_expandstate(&state, csalt, salt_len, + (u_int8_t *) key, key_len); + for (k = 0; k < rounds; k++) { + Blowfish_expand0state(&state, (u_int8_t *) key, key_len); + Blowfish_expand0state(&state, csalt, salt_len); + } + + /* This can be precomputed later */ + j = 0; + for (i = 0; i < BCRYPT_BLOCKS; i++) + cdata[i] = Blowfish_stream2word(ciphertext, 4 * BCRYPT_BLOCKS, &j); + + /* Now do the encryption */ + for (k = 0; k < 64; k++) + blf_enc(&state, cdata, BCRYPT_BLOCKS / 2); + + for (i = 0; i < BCRYPT_BLOCKS; i++) { + ciphertext[4 * i + 3] = cdata[i] & 0xff; + cdata[i] = cdata[i] >> 8; + ciphertext[4 * i + 2] = cdata[i] & 0xff; + cdata[i] = cdata[i] >> 8; + ciphertext[4 * i + 1] = cdata[i] & 0xff; + cdata[i] = cdata[i] >> 8; + ciphertext[4 * i + 0] = cdata[i] & 0xff; + } + + i = 0; + encrypted[i++] = '$'; + encrypted[i++] = BCRYPT_VERSION; + if (minor) + encrypted[i++] = minor; + encrypted[i++] = '$'; + + snprintf(encrypted + i, 4, "%2.2u$", logr & 0x001F); + + encode_base64((u_int8_t *) encrypted + i + 3, csalt, BCRYPT_MAXSALT); + encode_base64((u_int8_t *) encrypted + strlen(encrypted), ciphertext, + 4 * BCRYPT_BLOCKS - 1); + memset(&state, 0, sizeof(state)); + memset(ciphertext, 0, sizeof(ciphertext)); + memset(csalt, 0, sizeof(csalt)); + memset(cdata, 0, sizeof(cdata)); +} + +u_int32_t bcrypt_get_rounds(const char * hash) +{ + /* skip past the leading "$" */ + if (!hash || *(hash++) != '$') return 0; + + /* skip past version */ + if (0 == (*hash++)) return 0; + if (*hash && *hash != '$') hash++; + if (*hash++ != '$') return 0; + + return atoi(hash); +} + +static void +encode_base64(u_int8_t *buffer, u_int8_t *data, u_int16_t len) +{ + u_int8_t *bp = buffer; + u_int8_t *p = data; + u_int8_t c1, c2; + while (p < data + len) { + c1 = *p++; + *bp++ = Base64Code[(c1 >> 2)]; + c1 = (c1 & 0x03) << 4; + if (p >= data + len) { + *bp++ = Base64Code[c1]; + break; + } + c2 = *p++; + c1 |= (c2 >> 4) & 0x0f; + *bp++ = Base64Code[c1]; + c1 = (c2 & 0x0f) << 2; + if (p >= data + len) { + *bp++ = Base64Code[c1]; + break; + } + c2 = *p++; + c1 |= (c2 >> 6) & 0x03; + *bp++ = Base64Code[c1]; + *bp++ = Base64Code[c2 & 0x3f]; + } + *bp = '\0'; +} diff --git a/node_modules/bcrypt/src/bcrypt_node.cc b/node_modules/bcrypt/src/bcrypt_node.cc new file mode 100644 index 0000000..2f072a4 --- /dev/null +++ b/node_modules/bcrypt/src/bcrypt_node.cc @@ -0,0 +1,288 @@ +#define NAPI_VERSION 3 + +#include + +#include +#include +#include +#include // atoi + +#include "node_blf.h" + +#define NODE_LESS_THAN (!(NODE_VERSION_AT_LEAST(0, 5, 4))) + +namespace { + + bool ValidateSalt(const char* salt) { + + if (!salt || *salt != '$') { + return false; + } + + // discard $ + salt++; + + if (*salt > BCRYPT_VERSION) { + return false; + } + + if (salt[1] != '$') { + switch (salt[1]) { + case 'a': + case 'b': + salt++; + break; + default: + return false; + } + } + + // discard version + $ + salt += 2; + + if (salt[2] != '$') { + return false; + } + + int n = atoi(salt); + if (n > 31 || n < 0) { + return false; + } + + if (((uint8_t)1 << (uint8_t)n) < BCRYPT_MINROUNDS) { + return false; + } + + salt += 3; + if (strlen(salt) * 3 / 4 < BCRYPT_MAXSALT) { + return false; + } + + return true; + } + + inline char ToCharVersion(const std::string& str) { + return str[0]; + } + + /* SALT GENERATION */ + + class SaltAsyncWorker : public Napi::AsyncWorker { + public: + SaltAsyncWorker(const Napi::Function& callback, const std::string& seed, ssize_t rounds, char minor_ver) + : Napi::AsyncWorker(callback, "bcrypt:SaltAsyncWorker"), seed(seed), rounds(rounds), minor_ver(minor_ver) { + } + + ~SaltAsyncWorker() {} + + void Execute() { + bcrypt_gensalt(minor_ver, rounds, (u_int8_t *)&seed[0], salt); + } + + void OnOK() { + Napi::HandleScope scope(Env()); + Callback().Call({Env().Undefined(), Napi::String::New(Env(), salt)}); + } + + private: + std::string seed; + ssize_t rounds; + char minor_ver; + char salt[_SALT_LEN]; + }; + + Napi::Value GenerateSalt(const Napi::CallbackInfo& info) { + Napi::Env env = info.Env(); + if (info.Length() < 4) { + throw Napi::TypeError::New(env, "4 arguments expected"); + } + if (!info[0].IsString()) { + throw Napi::TypeError::New(env, "First argument must be a string"); + } + if (!info[2].IsBuffer() || (info[2].As>()).Length() != 16) { + throw Napi::TypeError::New(env, "Second argument must be a 16 byte Buffer"); + } + + const char minor_ver = ToCharVersion(info[0].As()); + const int32_t rounds = info[1].As(); + Napi::Buffer seed = info[2].As>(); + Napi::Function callback = info[3].As(); + SaltAsyncWorker* saltWorker = new SaltAsyncWorker(callback, std::string(seed.Data(), 16), rounds, minor_ver); + saltWorker->Queue(); + return env.Undefined(); + } + + Napi::Value GenerateSaltSync(const Napi::CallbackInfo& info) { + Napi::Env env = info.Env(); + if (info.Length() < 3) { + throw Napi::TypeError::New(env, "3 arguments expected"); + } + if (!info[0].IsString()) { + throw Napi::TypeError::New(env, "First argument must be a string"); + } + if (!info[2].IsBuffer() || (info[2].As>()).Length() != 16) { + throw Napi::TypeError::New(env, "Third argument must be a 16 byte Buffer"); + } + const char minor_ver = ToCharVersion(info[0].As()); + const int32_t rounds = info[1].As(); + Napi::Buffer buffer = info[2].As>(); + u_int8_t* seed = (u_int8_t*) buffer.Data(); + char salt[_SALT_LEN]; + bcrypt_gensalt(minor_ver, rounds, seed, salt); + return Napi::String::New(env, salt, strlen(salt)); + } + + inline std::string BufferToString(const Napi::Buffer &buf) { + return std::string(buf.Data(), buf.Length()); + } + + /* ENCRYPT DATA - USED TO BE HASHPW */ + + class EncryptAsyncWorker : public Napi::AsyncWorker { + public: + EncryptAsyncWorker(const Napi::Function& callback, const std::string& input, const std::string& salt) + : Napi::AsyncWorker(callback, "bcrypt:EncryptAsyncWorker"), input(input), salt(salt) { + } + + ~EncryptAsyncWorker() {} + + void Execute() { + if (!(ValidateSalt(salt.c_str()))) { + SetError("Invalid salt. Salt must be in the form of: $Vers$log2(NumRounds)$saltvalue"); + } + bcrypt(input.c_str(), input.length(), salt.c_str(), bcrypted); + } + + void OnOK() { + Napi::HandleScope scope(Env()); + Callback().Call({Env().Undefined(),Napi::String::New(Env(), bcrypted)}); + } + private: + std::string input; + std::string salt; + char bcrypted[_PASSWORD_LEN]; + }; + + Napi::Value Encrypt(const Napi::CallbackInfo& info) { + if (info.Length() < 3) { + throw Napi::TypeError::New(info.Env(), "3 arguments expected"); + } + std::string data = info[0].IsBuffer() + ? BufferToString(info[0].As>()) + : info[0].As(); + std::string salt = info[1].As(); + Napi::Function callback = info[2].As(); + EncryptAsyncWorker* encryptWorker = new EncryptAsyncWorker(callback, data, salt); + encryptWorker->Queue(); + return info.Env().Undefined(); + } + + Napi::Value EncryptSync(const Napi::CallbackInfo& info) { + Napi::Env env = info.Env(); + if (info.Length() < 2) { + throw Napi::TypeError::New(info.Env(), "2 arguments expected"); + } + std::string data = info[0].IsBuffer() + ? BufferToString(info[0].As>()) + : info[0].As(); + std::string salt = info[1].As(); + if (!(ValidateSalt(salt.c_str()))) { + throw Napi::Error::New(env, "Invalid salt. Salt must be in the form of: $Vers$log2(NumRounds)$saltvalue"); + } + char bcrypted[_PASSWORD_LEN]; + bcrypt(data.c_str(), data.length(), salt.c_str(), bcrypted); + return Napi::String::New(env, bcrypted, strlen(bcrypted)); + } + + /* COMPARATOR */ + inline bool CompareStrings(const char* s1, const char* s2) { + return strcmp(s1, s2) == 0; + } + + class CompareAsyncWorker : public Napi::AsyncWorker { + public: + CompareAsyncWorker(const Napi::Function& callback, const std::string& input, const std::string& encrypted) + : Napi::AsyncWorker(callback, "bcrypt:CompareAsyncWorker"), input(input), encrypted(encrypted) { + result = false; + } + + ~CompareAsyncWorker() {} + + void Execute() { + char bcrypted[_PASSWORD_LEN]; + if (ValidateSalt(encrypted.c_str())) { + bcrypt(input.c_str(), input.length(), encrypted.c_str(), bcrypted); + result = CompareStrings(bcrypted, encrypted.c_str()); + } + } + + void OnOK() { + Napi::HandleScope scope(Env()); + Callback().Call({Env().Undefined(), Napi::Boolean::New(Env(), result)}); + } + + private: + std::string input; + std::string encrypted; + bool result; + }; + + Napi::Value Compare(const Napi::CallbackInfo& info) { + if (info.Length() < 3) { + throw Napi::TypeError::New(info.Env(), "3 arguments expected"); + } + std::string input = info[0].IsBuffer() + ? BufferToString(info[0].As>()) + : info[0].As(); + std::string encrypted = info[1].As(); + Napi::Function callback = info[2].As(); + CompareAsyncWorker* compareWorker = new CompareAsyncWorker(callback, input, encrypted); + compareWorker->Queue(); + return info.Env().Undefined(); + } + + Napi::Value CompareSync(const Napi::CallbackInfo& info) { + Napi::Env env = info.Env(); + if (info.Length() < 2) { + throw Napi::TypeError::New(info.Env(), "2 arguments expected"); + } + std::string pw = info[0].IsBuffer() + ? BufferToString(info[0].As>()) + : info[0].As(); + std::string hash = info[1].As(); + char bcrypted[_PASSWORD_LEN]; + if (ValidateSalt(hash.c_str())) { + bcrypt(pw.c_str(), pw.length(), hash.c_str(), bcrypted); + return Napi::Boolean::New(env, CompareStrings(bcrypted, hash.c_str())); + } else { + return Napi::Boolean::New(env, false); + } + } + + Napi::Value GetRounds(const Napi::CallbackInfo& info) { + Napi::Env env = info.Env(); + if (info.Length() < 1) { + throw Napi::TypeError::New(env, "1 argument expected"); + } + std::string hash = info[0].As(); + u_int32_t rounds; + if (!(rounds = bcrypt_get_rounds(hash.c_str()))) { + throw Napi::Error::New(env, "invalid hash provided"); + } + return Napi::Number::New(env, rounds); + } + +} // anonymous namespace + +Napi::Object init(Napi::Env env, Napi::Object exports) { + exports.Set(Napi::String::New(env, "gen_salt_sync"), Napi::Function::New(env, GenerateSaltSync)); + exports.Set(Napi::String::New(env, "encrypt_sync"), Napi::Function::New(env, EncryptSync)); + exports.Set(Napi::String::New(env, "compare_sync"), Napi::Function::New(env, CompareSync)); + exports.Set(Napi::String::New(env, "get_rounds"), Napi::Function::New(env, GetRounds)); + exports.Set(Napi::String::New(env, "gen_salt"), Napi::Function::New(env, GenerateSalt)); + exports.Set(Napi::String::New(env, "encrypt"), Napi::Function::New(env, Encrypt)); + exports.Set(Napi::String::New(env, "compare"), Napi::Function::New(env, Compare)); + return exports; +} + +NODE_API_MODULE(NODE_GYP_MODULE_NAME, init) diff --git a/node_modules/bcrypt/src/blowfish.cc b/node_modules/bcrypt/src/blowfish.cc new file mode 100644 index 0000000..1fc6cf1 --- /dev/null +++ b/node_modules/bcrypt/src/blowfish.cc @@ -0,0 +1,679 @@ +/* $OpenBSD: blowfish.c,v 1.18 2004/11/02 17:23:26 hshoexer Exp $ */ +/* + * Blowfish block cipher for OpenBSD + * Copyright 1997 Niels Provos + * All rights reserved. + * + * Implementation advice by David Mazieres . + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * This product includes software developed by Niels Provos. + * 4. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/* + * This code is derived from section 14.3 and the given source + * in section V of Applied Cryptography, second edition. + * Blowfish is an unpatented fast block cipher designed by + * Bruce Schneier. + */ + +#include "node_blf.h" + +#undef inline +#ifdef __GNUC__ +#define inline __inline +#else /* !__GNUC__ */ +#define inline +#endif /* !__GNUC__ */ + +/* Function for Feistel Networks */ + +#define F(s, x) ((((s)[ (((x)>>24)&0xFF)] \ + + (s)[0x100 + (((x)>>16)&0xFF)]) \ + ^ (s)[0x200 + (((x)>> 8)&0xFF)]) \ + + (s)[0x300 + ( (x) &0xFF)]) + +#define BLFRND(s,p,i,j,n) (i ^= F(s,j) ^ (p)[n]) + +void +Blowfish_encipher(blf_ctx *c, u_int32_t *xl, u_int32_t *xr) +{ + u_int32_t Xl; + u_int32_t Xr; + u_int32_t *s = c->S[0]; + u_int32_t *p = c->P; + + Xl = *xl; + Xr = *xr; + + Xl ^= p[0]; + BLFRND(s, p, Xr, Xl, 1); BLFRND(s, p, Xl, Xr, 2); + BLFRND(s, p, Xr, Xl, 3); BLFRND(s, p, Xl, Xr, 4); + BLFRND(s, p, Xr, Xl, 5); BLFRND(s, p, Xl, Xr, 6); + BLFRND(s, p, Xr, Xl, 7); BLFRND(s, p, Xl, Xr, 8); + BLFRND(s, p, Xr, Xl, 9); BLFRND(s, p, Xl, Xr, 10); + BLFRND(s, p, Xr, Xl, 11); BLFRND(s, p, Xl, Xr, 12); + BLFRND(s, p, Xr, Xl, 13); BLFRND(s, p, Xl, Xr, 14); + BLFRND(s, p, Xr, Xl, 15); BLFRND(s, p, Xl, Xr, 16); + + *xl = Xr ^ p[17]; + *xr = Xl; +} + +void +Blowfish_decipher(blf_ctx *c, u_int32_t *xl, u_int32_t *xr) +{ + u_int32_t Xl; + u_int32_t Xr; + u_int32_t *s = c->S[0]; + u_int32_t *p = c->P; + + Xl = *xl; + Xr = *xr; + + Xl ^= p[17]; + BLFRND(s, p, Xr, Xl, 16); BLFRND(s, p, Xl, Xr, 15); + BLFRND(s, p, Xr, Xl, 14); BLFRND(s, p, Xl, Xr, 13); + BLFRND(s, p, Xr, Xl, 12); BLFRND(s, p, Xl, Xr, 11); + BLFRND(s, p, Xr, Xl, 10); BLFRND(s, p, Xl, Xr, 9); + BLFRND(s, p, Xr, Xl, 8); BLFRND(s, p, Xl, Xr, 7); + BLFRND(s, p, Xr, Xl, 6); BLFRND(s, p, Xl, Xr, 5); + BLFRND(s, p, Xr, Xl, 4); BLFRND(s, p, Xl, Xr, 3); + BLFRND(s, p, Xr, Xl, 2); BLFRND(s, p, Xl, Xr, 1); + + *xl = Xr ^ p[0]; + *xr = Xl; +} + +void +Blowfish_initstate(blf_ctx *c) +{ + /* P-box and S-box tables initialized with digits of Pi */ + + static const blf_ctx initstate = + { { + { + 0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, + 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, + 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, + 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, + 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, + 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, + 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, + 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, + 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, + 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, + 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, + 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, + 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, + 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, + 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, + 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, + 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, + 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, + 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, + 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, + 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, + 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, + 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, + 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, + 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, + 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, + 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, + 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, + 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, + 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, + 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, + 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, + 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, + 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, + 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, + 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, + 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, + 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, + 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, + 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, + 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, + 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, + 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, + 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, + 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, + 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, + 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, + 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, + 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, + 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, + 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, + 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, + 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, + 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, + 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, + 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, + 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, + 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, + 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, + 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, + 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, + 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, + 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, + 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a}, + { + 0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, + 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, + 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, + 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, + 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, + 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, + 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, + 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, + 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, + 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, + 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, + 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, + 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, + 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, + 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, + 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, + 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, + 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, + 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, + 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, + 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, + 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, + 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, + 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, + 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, + 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, + 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, + 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, + 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, + 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, + 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, + 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, + 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, + 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, + 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, + 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, + 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, + 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, + 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, + 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, + 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, + 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, + 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, + 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, + 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, + 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, + 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, + 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, + 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, + 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, + 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, + 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, + 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, + 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, + 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, + 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, + 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, + 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, + 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, + 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, + 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, + 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, + 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, + 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7}, + { + 0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, + 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, + 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, + 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, + 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, + 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, + 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, + 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, + 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, + 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, + 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, + 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, + 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, + 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, + 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, + 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, + 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, + 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, + 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, + 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, + 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, + 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, + 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, + 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, + 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, + 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, + 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, + 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, + 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, + 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, + 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, + 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, + 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, + 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, + 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, + 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, + 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, + 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, + 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, + 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, + 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, + 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, + 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, + 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, + 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, + 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, + 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, + 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, + 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, + 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, + 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, + 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, + 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, + 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, + 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, + 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, + 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, + 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, + 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, + 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, + 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, + 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, + 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, + 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0}, + { + 0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, + 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, + 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, + 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, + 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, + 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, + 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, + 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, + 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, + 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, + 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, + 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, + 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, + 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, + 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, + 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, + 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, + 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, + 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, + 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, + 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, + 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, + 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, + 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, + 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, + 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, + 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, + 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, + 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, + 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, + 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, + 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, + 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, + 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, + 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, + 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, + 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, + 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, + 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, + 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, + 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, + 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, + 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, + 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, + 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, + 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, + 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, + 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, + 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, + 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, + 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, + 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, + 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, + 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, + 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, + 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, + 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, + 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, + 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, + 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, + 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, + 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, + 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, + 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6} + }, + { + 0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, + 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, + 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, + 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, + 0x9216d5d9, 0x8979fb1b + } }; + + *c = initstate; +} + +u_int32_t +Blowfish_stream2word(const u_int8_t *data, u_int16_t databytes, + u_int16_t *current) +{ + u_int8_t i; + u_int16_t j; + u_int32_t temp; + + temp = 0x00000000; + j = *current; + + for (i = 0; i < 4; i++, j++) { + if (j >= databytes) + j = 0; + temp = (temp << 8) | data[j]; + } + + *current = j; + return temp; +} + +void +Blowfish_expand0state(blf_ctx *c, const u_int8_t *key, u_int16_t keybytes) +{ + u_int16_t i; + u_int16_t j; + u_int16_t k; + u_int32_t temp; + u_int32_t datal; + u_int32_t datar; + + j = 0; + for (i = 0; i < BLF_N + 2; i++) { + /* Extract 4 int8 to 1 int32 from keystream */ + temp = Blowfish_stream2word(key, keybytes, &j); + c->P[i] = c->P[i] ^ temp; + } + + j = 0; + datal = 0x00000000; + datar = 0x00000000; + for (i = 0; i < BLF_N + 2; i += 2) { + Blowfish_encipher(c, &datal, &datar); + + c->P[i] = datal; + c->P[i + 1] = datar; + } + + for (i = 0; i < 4; i++) { + for (k = 0; k < 256; k += 2) { + Blowfish_encipher(c, &datal, &datar); + + c->S[i][k] = datal; + c->S[i][k + 1] = datar; + } + } +} + + +void +Blowfish_expandstate(blf_ctx *c, const u_int8_t *data, u_int16_t databytes, + const u_int8_t *key, u_int16_t keybytes) +{ + u_int16_t i; + u_int16_t j; + u_int16_t k; + u_int32_t temp; + u_int32_t datal; + u_int32_t datar; + + j = 0; + for (i = 0; i < BLF_N + 2; i++) { + /* Extract 4 int8 to 1 int32 from keystream */ + temp = Blowfish_stream2word(key, keybytes, &j); + c->P[i] = c->P[i] ^ temp; + } + + j = 0; + datal = 0x00000000; + datar = 0x00000000; + for (i = 0; i < BLF_N + 2; i += 2) { + datal ^= Blowfish_stream2word(data, databytes, &j); + datar ^= Blowfish_stream2word(data, databytes, &j); + Blowfish_encipher(c, &datal, &datar); + + c->P[i] = datal; + c->P[i + 1] = datar; + } + + for (i = 0; i < 4; i++) { + for (k = 0; k < 256; k += 2) { + datal ^= Blowfish_stream2word(data, databytes, &j); + datar ^= Blowfish_stream2word(data, databytes, &j); + Blowfish_encipher(c, &datal, &datar); + + c->S[i][k] = datal; + c->S[i][k + 1] = datar; + } + } + +} + +void +blf_key(blf_ctx *c, const u_int8_t *k, u_int16_t len) +{ + /* Initialize S-boxes and subkeys with Pi */ + Blowfish_initstate(c); + + /* Transform S-boxes and subkeys with key */ + Blowfish_expand0state(c, k, len); +} + +void +blf_enc(blf_ctx *c, u_int32_t *data, u_int16_t blocks) +{ + u_int32_t *d; + u_int16_t i; + + d = data; + for (i = 0; i < blocks; i++) { + Blowfish_encipher(c, d, d + 1); + d += 2; + } +} + +void +blf_dec(blf_ctx *c, u_int32_t *data, u_int16_t blocks) +{ + u_int32_t *d; + u_int16_t i; + + d = data; + for (i = 0; i < blocks; i++) { + Blowfish_decipher(c, d, d + 1); + d += 2; + } +} + +void +blf_ecb_encrypt(blf_ctx *c, u_int8_t *data, u_int32_t len) +{ + u_int32_t l, r; + u_int32_t i; + + for (i = 0; i < len; i += 8) { + l = data[0] << 24 | data[1] << 16 | data[2] << 8 | data[3]; + r = data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]; + Blowfish_encipher(c, &l, &r); + data[0] = l >> 24 & 0xff; + data[1] = l >> 16 & 0xff; + data[2] = l >> 8 & 0xff; + data[3] = l & 0xff; + data[4] = r >> 24 & 0xff; + data[5] = r >> 16 & 0xff; + data[6] = r >> 8 & 0xff; + data[7] = r & 0xff; + data += 8; + } +} + +void +blf_ecb_decrypt(blf_ctx *c, u_int8_t *data, u_int32_t len) +{ + u_int32_t l, r; + u_int32_t i; + + for (i = 0; i < len; i += 8) { + l = data[0] << 24 | data[1] << 16 | data[2] << 8 | data[3]; + r = data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]; + Blowfish_decipher(c, &l, &r); + data[0] = l >> 24 & 0xff; + data[1] = l >> 16 & 0xff; + data[2] = l >> 8 & 0xff; + data[3] = l & 0xff; + data[4] = r >> 24 & 0xff; + data[5] = r >> 16 & 0xff; + data[6] = r >> 8 & 0xff; + data[7] = r & 0xff; + data += 8; + } +} + +void +blf_cbc_encrypt(blf_ctx *c, u_int8_t *iv, u_int8_t *data, u_int32_t len) +{ + u_int32_t l, r; + u_int32_t i, j; + + for (i = 0; i < len; i += 8) { + for (j = 0; j < 8; j++) + data[j] ^= iv[j]; + l = data[0] << 24 | data[1] << 16 | data[2] << 8 | data[3]; + r = data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]; + Blowfish_encipher(c, &l, &r); + data[0] = l >> 24 & 0xff; + data[1] = l >> 16 & 0xff; + data[2] = l >> 8 & 0xff; + data[3] = l & 0xff; + data[4] = r >> 24 & 0xff; + data[5] = r >> 16 & 0xff; + data[6] = r >> 8 & 0xff; + data[7] = r & 0xff; + iv = data; + data += 8; + } +} + +void +blf_cbc_decrypt(blf_ctx *c, u_int8_t *iva, u_int8_t *data, u_int32_t len) +{ + u_int32_t l, r; + u_int8_t *iv; + u_int32_t i, j; + + iv = data + len - 16; + data = data + len - 8; + for (i = len - 8; i >= 8; i -= 8) { + l = data[0] << 24 | data[1] << 16 | data[2] << 8 | data[3]; + r = data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]; + Blowfish_decipher(c, &l, &r); + data[0] = l >> 24 & 0xff; + data[1] = l >> 16 & 0xff; + data[2] = l >> 8 & 0xff; + data[3] = l & 0xff; + data[4] = r >> 24 & 0xff; + data[5] = r >> 16 & 0xff; + data[6] = r >> 8 & 0xff; + data[7] = r & 0xff; + for (j = 0; j < 8; j++) + data[j] ^= iv[j]; + iv -= 8; + data -= 8; + } + l = data[0] << 24 | data[1] << 16 | data[2] << 8 | data[3]; + r = data[4] << 24 | data[5] << 16 | data[6] << 8 | data[7]; + Blowfish_decipher(c, &l, &r); + data[0] = l >> 24 & 0xff; + data[1] = l >> 16 & 0xff; + data[2] = l >> 8 & 0xff; + data[3] = l & 0xff; + data[4] = r >> 24 & 0xff; + data[5] = r >> 16 & 0xff; + data[6] = r >> 8 & 0xff; + data[7] = r & 0xff; + for (j = 0; j < 8; j++) + data[j] ^= iva[j]; +} + +#if 0 +void +report(u_int32_t data[], u_int16_t len) +{ + u_int16_t i; + for (i = 0; i < len; i += 2) + printf("Block %0hd: %08lx %08lx.\n", + i / 2, data[i], data[i + 1]); +} +void +main(void) +{ + + blf_ctx c; + char key[] = "AAAAA"; + char key2[] = "abcdefghijklmnopqrstuvwxyz"; + + u_int32_t data[10]; + u_int32_t data2[] = + {0x424c4f57l, 0x46495348l}; + + u_int16_t i; + + /* First test */ + for (i = 0; i < 10; i++) + data[i] = i; + + blf_key(&c, (u_int8_t *) key, 5); + blf_enc(&c, data, 5); + blf_dec(&c, data, 1); + blf_dec(&c, data + 2, 4); + printf("Should read as 0 - 9.\n"); + report(data, 10); + + /* Second test */ + blf_key(&c, (u_int8_t *) key2, strlen(key2)); + blf_enc(&c, data2, 1); + printf("\nShould read as: 0x324ed0fe 0xf413a203.\n"); + report(data2, 2); + blf_dec(&c, data2, 1); + report(data2, 2); +} +#endif diff --git a/node_modules/bcrypt/src/node_blf.h b/node_modules/bcrypt/src/node_blf.h new file mode 100644 index 0000000..2d50a39 --- /dev/null +++ b/node_modules/bcrypt/src/node_blf.h @@ -0,0 +1,132 @@ +/* $OpenBSD: blf.h,v 1.7 2007/03/14 17:59:41 grunk Exp $ */ +/* + * Blowfish - a fast block cipher designed by Bruce Schneier + * + * Copyright 1997 Niels Provos + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. All advertising materials mentioning features or use of this software + * must display the following acknowledgement: + * This product includes software developed by Niels Provos. + * 4. The name of the author may not be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. + * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, + * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT + * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF + * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +#ifndef _NODE_BLF_H_ +#define _NODE_BLF_H_ + +#include + +/* Solaris compatibility */ +#ifdef __sun +#define u_int8_t uint8_t +#define u_int16_t uint16_t +#define u_int32_t uint32_t +#define u_int64_t uint64_t +#endif + +#ifdef _WIN32 +#define u_int8_t unsigned __int8 +#define u_int16_t unsigned __int16 +#define u_int32_t unsigned __int32 +#define u_int64_t unsigned __int64 +#endif + +/* Windows ssize_t compatibility */ +#if defined(_WIN32) || defined(_WIN64) +# if defined(_WIN64) + typedef __int64 LONG_PTR; +# else + typedef long LONG_PTR; +# endif + typedef LONG_PTR SSIZE_T; + typedef SSIZE_T ssize_t; +#endif + +/* z/OS compatibility */ +#ifdef __MVS__ +typedef unsigned char u_int8_t; +typedef unsigned short u_int16_t; +typedef unsigned int u_int32_t; +typedef unsigned long long u_int64_t; +#endif + +#define BCRYPT_VERSION '2' +#define BCRYPT_MAXSALT 16 /* Precomputation is just so nice */ +#define BCRYPT_BLOCKS 6 /* Ciphertext blocks */ +#define BCRYPT_MINROUNDS 16 /* we have log2(rounds) in salt */ + +/* Schneier specifies a maximum key length of 56 bytes. + * This ensures that every key bit affects every cipher + * bit. However, the subkeys can hold up to 72 bytes. + * Warning: For normal blowfish encryption only 56 bytes + * of the key affect all cipherbits. + */ + +#define BLF_N 16 /* Number of Subkeys */ +#define BLF_MAXKEYLEN ((BLF_N-2)*4) /* 448 bits */ +#define BLF_MAXUTILIZED ((BLF_N+2)*4) /* 576 bits */ + +#define _PASSWORD_LEN 128 /* max length, not counting NUL */ +#define _SALT_LEN 32 /* max length */ + +/* Blowfish context */ +typedef struct BlowfishContext { + u_int32_t S[4][256]; /* S-Boxes */ + u_int32_t P[BLF_N + 2]; /* Subkeys */ +} blf_ctx; + +/* Raw access to customized Blowfish + * blf_key is just: + * Blowfish_initstate( state ) + * Blowfish_expand0state( state, key, keylen ) + */ + +void Blowfish_encipher(blf_ctx *, u_int32_t *, u_int32_t *); +void Blowfish_decipher(blf_ctx *, u_int32_t *, u_int32_t *); +void Blowfish_initstate(blf_ctx *); +void Blowfish_expand0state(blf_ctx *, const u_int8_t *, u_int16_t); +void Blowfish_expandstate +(blf_ctx *, const u_int8_t *, u_int16_t, const u_int8_t *, u_int16_t); + +/* Standard Blowfish */ + +void blf_key(blf_ctx *, const u_int8_t *, u_int16_t); +void blf_enc(blf_ctx *, u_int32_t *, u_int16_t); +void blf_dec(blf_ctx *, u_int32_t *, u_int16_t); + +void blf_ecb_encrypt(blf_ctx *, u_int8_t *, u_int32_t); +void blf_ecb_decrypt(blf_ctx *, u_int8_t *, u_int32_t); + +void blf_cbc_encrypt(blf_ctx *, u_int8_t *, u_int8_t *, u_int32_t); +void blf_cbc_decrypt(blf_ctx *, u_int8_t *, u_int8_t *, u_int32_t); + +/* Converts u_int8_t to u_int32_t */ +u_int32_t Blowfish_stream2word(const u_int8_t *, u_int16_t , u_int16_t *); + +/* bcrypt functions*/ +void bcrypt_gensalt(char, u_int8_t, u_int8_t*, char *); +void bcrypt(const char *, size_t key_len, const char *, char *); +void encode_salt(char *, u_int8_t *, char, u_int16_t, u_int8_t); +u_int32_t bcrypt_get_rounds(const char *); + +#endif diff --git a/node_modules/bcrypt/test-docker.sh b/node_modules/bcrypt/test-docker.sh new file mode 100644 index 0000000..7936cf7 --- /dev/null +++ b/node_modules/bcrypt/test-docker.sh @@ -0,0 +1,15 @@ +#!/bin/sh + +set -xe + +echo "Running on $(node -v)" + +# Cleanup +rm -rf node_modules build-tmp-* lib/binding + +# Install build dependencies +if [ -f /etc/alpine-release ]; then + apk add --no-cache --virtual .build-deps make gcc g++ python3 +fi + +su node -c "npm test; npx node-pre-gyp package" diff --git a/node_modules/bcrypt/test/async.test.js b/node_modules/bcrypt/test/async.test.js new file mode 100644 index 0000000..fb59367 --- /dev/null +++ b/node_modules/bcrypt/test/async.test.js @@ -0,0 +1,209 @@ +const bcrypt = require('../bcrypt'); + +test('salt_length', done => { + expect.assertions(1); + bcrypt.genSalt(10, function (err, salt) { + expect(salt).toHaveLength(29); + done(); + }); +}) + +test('salt_only_cb', () => { + expect.assertions(1); + expect(() => { + bcrypt.genSalt((err, salt) => { + }); + }).not.toThrow(); +}) + +test('salt_rounds_is_string_number', done => { + expect.assertions(2); + bcrypt.genSalt('10', void 0, function (err, salt) { + expect(err instanceof Error).toBe(true) + expect(err.message).toBe('rounds must be a number') + done(); + }); +}) + +test('salt_rounds_is_string_non_number', done => { + expect.assertions(2); + bcrypt.genSalt('z', function (err, salt) { + expect(err instanceof Error).toBe(true) + expect(err.message).toBe('rounds must be a number') + done(); + }); +}) + +test('salt_minor', done => { + expect.assertions(3); + bcrypt.genSalt(10, 'a', function (err, value) { + expect(value).toHaveLength(29); + const [_, minor, salt] = value.split('$'); + expect(minor).toEqual('2a'); + expect(salt).toEqual('10'); + done(); + }); +}) + +test('salt_minor_b', done => { + expect.assertions(3); + bcrypt.genSalt(10, 'b', function (err, value) { + expect(value).toHaveLength(29); + const [_, minor, salt] = value.split('$'); + expect(minor).toEqual('2b'); + expect(salt).toEqual('10'); + done(); + }); +}) + +test('hash', done => { + expect.assertions(2); + bcrypt.genSalt(10, function (err, salt) { + bcrypt.hash('password', salt, function (err, res) { + expect(res).toBeDefined(); + expect(err).toBeUndefined(); + done(); + }); + }); +}) + +test('hash_rounds', done => { + expect.assertions(1); + bcrypt.hash('bacon', 8, function (err, hash) { + expect(bcrypt.getRounds(hash)).toEqual(8); + done(); + }); +}) + +test('hash_empty_strings', done => { + expect.assertions(1); + bcrypt.genSalt(10, function (err, salt) { + bcrypt.hash('', salt, function (err, res) { + expect(res).toBeDefined(); + done(); + }); + }); +}) + +test('hash_fails_with_empty_salt', done => { + expect.assertions(1); + bcrypt.hash('', '', function (err, res) { + expect(err.message).toBe('Invalid salt. Salt must be in the form of: $Vers$log2(NumRounds)$saltvalue') + done(); + }); +}) + +test('hash_no_params', done => { + expect.assertions(1); + bcrypt.hash(function (err, hash) { + expect(err.message).toBe('data must be a string or Buffer and salt must either be a salt string or a number of rounds') + done(); + }); +}) + +test('hash_one_param', done => { + expect.assertions(1); + bcrypt.hash('password', function (err, hash) { + expect(err.message).toBe('data must be a string or Buffer and salt must either be a salt string or a number of rounds'); + done(); + }); +}) + +test('hash_salt_validity', done => { + expect.assertions(2); + bcrypt.hash('password', '$2a$10$somesaltyvaluertsetrse', function (err, enc) { + expect(err).toBeUndefined(); + bcrypt.hash('password', 'some$value', function (err, enc) { + expect(err.message).toBe("Invalid salt. Salt must be in the form of: $Vers$log2(NumRounds)$saltvalue"); + done(); + }); + }); +}) + +test('verify_salt', done => { + expect.assertions(2); + bcrypt.genSalt(10, function (err, value) { + const [_, version, rounds] = value.split('$'); + expect(version).toEqual('2b'); + expect(rounds).toEqual('10'); + done(); + }); +}) + +test('verify_salt_min_rounds', done => { + expect.assertions(2); + bcrypt.genSalt(1, function (err, value) { + const [_, version, rounds] = value.split('$'); + expect(version).toEqual('2b'); + expect(rounds).toEqual('04'); + done(); + }); +}) + +test('verify_salt_max_rounds', done => { + expect.assertions(2); + bcrypt.genSalt(100, function (err, value) { + const [_, version, rounds] = value.split('$'); + expect(version).toEqual('2b'); + expect(rounds).toEqual('31'); + done(); + }); +}) + +test('hash_compare', done => { + expect.assertions(2); + bcrypt.genSalt(10, function (err, salt) { + bcrypt.hash("test", salt, function (err, hash) { + bcrypt.compare("test", hash, function (err, res) { + expect(hash).toBeDefined(); + bcrypt.compare("blah", hash, function (err, res) { + expect(res).toBe(false); + done(); + }); + }); + }); + }); +}) + +test('hash_compare_empty_strings', done => { + expect.assertions(2); + const hash = bcrypt.hashSync("test", bcrypt.genSaltSync(10)); + + bcrypt.compare("", hash, function (err, res) { + expect(res).toEqual(false) + bcrypt.compare("", "", function (err, res) { + expect(res).toEqual(false); + done(); + }); + }); +}) + +test('hash_compare_invalid_strings', done => { + expect.assertions(2); + const fullString = 'envy1362987212538'; + const hash = '$2a$10$XOPbrlUPQdwdJUpSrIF6X.LbE14qsMmKGhM1A8W9iqaG3vv1BD7WC'; + const wut = ':'; + bcrypt.compare(fullString, hash, function (err, res) { + expect(res).toBe(true); + bcrypt.compare(fullString, wut, function (err, res) { + expect(res).toBe(false); + done(); + }); + }); +}) + +test('compare_no_params', done => { + expect.assertions(1); + bcrypt.compare(function (err, hash) { + expect(err.message).toBe('data and hash arguments required'); + done(); + }); +}) + +test('hash_compare_one_param', done => { + expect.assertions(1); + bcrypt.compare('password', function (err, hash) { + expect(err.message).toBe('data and hash arguments required'); + done(); + }); +}) diff --git a/node_modules/bcrypt/test/implementation.test.js b/node_modules/bcrypt/test/implementation.test.js new file mode 100644 index 0000000..647f32a --- /dev/null +++ b/node_modules/bcrypt/test/implementation.test.js @@ -0,0 +1,48 @@ +const bcrypt = require('../bcrypt'); + +// some tests were adapted from https://github.com/riverrun/bcrypt_elixir/blob/master/test/base_test.exs +// which are under the BSD LICENSE + +test('openwall', () => { + expect(bcrypt.hashSync("U*U", "$2a$05$CCCCCCCCCCCCCCCCCCCCC.")).toStrictEqual("$2a$05$CCCCCCCCCCCCCCCCCCCCC.E5YPO9kmyuRGyh0XouQYb4YMJKvyOeW"); + expect(bcrypt.hashSync("U*U*", "$2a$05$CCCCCCCCCCCCCCCCCCCCC.")).toStrictEqual("$2a$05$CCCCCCCCCCCCCCCCCCCCC.VGOzA784oUp/Z0DY336zx7pLYAy0lwK"); + expect(bcrypt.hashSync("U*U*U", "$2a$05$XXXXXXXXXXXXXXXXXXXXXO")).toStrictEqual("$2a$05$XXXXXXXXXXXXXXXXXXXXXOAcXxm9kjPGEMsLznoKqmqw7tc8WCx4a"); + expect(bcrypt.hashSync("", "$2a$05$CCCCCCCCCCCCCCCCCCCCC.")).toStrictEqual("$2a$05$CCCCCCCCCCCCCCCCCCCCC.7uG0VCzI2bS7j6ymqJi9CdcdxiRTWNy"); + expect(bcrypt.hashSync("0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789", "$2a$05$abcdefghijklmnopqrstuu")).toStrictEqual("$2a$05$abcdefghijklmnopqrstuu5s2v8.iXieOjg/.AySBTTZIIVFJeBui"); +}) + +test('openbsd', () => { + expect(bcrypt.hashSync("000000000000000000000000000000000000000000000000000000000000000000000000", "$2a$05$CCCCCCCCCCCCCCCCCCCCC.")).toStrictEqual("$2a$05$CCCCCCCCCCCCCCCCCCCCC.6.O1dLNbjod2uo0DVcW.jHucKbPDdHS") + expect(bcrypt.hashSync("000000000000000000000000000000000000000000000000000000000000000000000000", "$2b$05$CCCCCCCCCCCCCCCCCCCCC.")).toStrictEqual("$2b$05$CCCCCCCCCCCCCCCCCCCCC.6.O1dLNbjod2uo0DVcW.jHucKbPDdHS") +}) + +test('long_passwords', () => { + // bcrypt wrap-around bug in $2a$ + expect(bcrypt.hashSync("012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234", "$2a$05$CCCCCCCCCCCCCCCCCCCCC.")).toStrictEqual("$2a$05$CCCCCCCCCCCCCCCCCCCCC.6.O1dLNbjod2uo0DVcW.jHucKbPDdHS") + expect(bcrypt.hashSync("01XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX", "$2a$05$CCCCCCCCCCCCCCCCCCCCC.")).toStrictEqual("$2a$05$CCCCCCCCCCCCCCCCCCCCC.6.O1dLNbjod2uo0DVcW.jHucKbPDdHS") + + // tests for $2b$ which fixes wrap-around bugs + expect(bcrypt.hashSync("012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234", "$2b$05$CCCCCCCCCCCCCCCCCCCCC.")).toStrictEqual("$2b$05$CCCCCCCCCCCCCCCCCCCCC.XxrQqgBi/5Sxuq9soXzDtjIZ7w5pMfK") + expect(bcrypt.hashSync("0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345", "$2b$05$CCCCCCCCCCCCCCCCCCCCC.")).toStrictEqual("$2b$05$CCCCCCCCCCCCCCCCCCCCC.XxrQqgBi/5Sxuq9soXzDtjIZ7w5pMfK") +}) + +test('embedded_nulls', () => { + expect(bcrypt.hashSync("Passw\0rd123", "$2b$05$CCCCCCCCCCCCCCCCCCCCC.")).toStrictEqual("$2b$05$CCCCCCCCCCCCCCCCCCCCC.VHy/kzL4sCcX3Ib3wN5rNGiRt.TpfxS") + expect(bcrypt.hashSync("Passw\0 you can literally write anything after the NUL character", "$2b$05$CCCCCCCCCCCCCCCCCCCCC.")).toStrictEqual("$2b$05$CCCCCCCCCCCCCCCCCCCCC.4vJLJQ6nZ/70INTjjSZWQ0iyUek92tu") + expect(bcrypt.hashSync(Buffer.from("Passw\0 you can literally write anything after the NUL character"), "$2b$05$CCCCCCCCCCCCCCCCCCCCC.")).toStrictEqual("$2b$05$CCCCCCCCCCCCCCCCCCCCC.4vJLJQ6nZ/70INTjjSZWQ0iyUek92tu") +}) + +test('shorten_salt_to_128_bits', () => { + expect(bcrypt.hashSync("test", "$2a$10$1234567899123456789012")).toStrictEqual("$2a$10$123456789912345678901u.OtL1A1eGK5wmvBKUDYKvuVKI7h2XBu") + expect(bcrypt.hashSync("U*U*", "$2a$05$CCCCCCCCCCCCCCCCCCCCCh")).toStrictEqual("$2a$05$CCCCCCCCCCCCCCCCCCCCCeUQ7VjYZ2hd4bLYZdhuPpZMUpEUJDw1S") + expect(bcrypt.hashSync("U*U*", "$2a$05$CCCCCCCCCCCCCCCCCCCCCM")).toStrictEqual("$2a$05$CCCCCCCCCCCCCCCCCCCCC.VGOzA784oUp/Z0DY336zx7pLYAy0lwK") + expect(bcrypt.hashSync("U*U*", "$2a$05$CCCCCCCCCCCCCCCCCCCCCA")).toStrictEqual("$2a$05$CCCCCCCCCCCCCCCCCCCCC.VGOzA784oUp/Z0DY336zx7pLYAy0lwK") +}) + +test('consistency', () => { + expect(bcrypt.hashSync("ππππππππ", "$2a$10$.TtQJ4Jr6isd4Hp.mVfZeu")).toStrictEqual("$2a$10$.TtQJ4Jr6isd4Hp.mVfZeuh6Gws4rOQ/vdBczhDx.19NFK0Y84Dle") + expect(bcrypt.hashSync("p@5sw0rd", "$2b$12$zQ4CooEXdGqcwi0PHsgc8e")).toStrictEqual("$2b$12$zQ4CooEXdGqcwi0PHsgc8eAf0DLXE/XHoBE8kCSGQ97rXwuClaPam") + expect(bcrypt.hashSync("C'est bon, la vie!", "$2b$12$cbo7LZ.wxgW4yxAA5Vqlv.")).toStrictEqual("$2b$12$cbo7LZ.wxgW4yxAA5Vqlv.KR6QFPt4qCdc9RYJNXxa/rbUOp.1sw.") + expect(bcrypt.hashSync("ἓν οἶδα ὅτι οὐδὲν οἶδα", "$2b$12$LeHKWR2bmrazi/6P22Jpau")).toStrictEqual("$2b$12$LeHKWR2bmrazi/6P22JpauX5my/eKwwKpWqL7L5iEByBnxNc76FRW") + expect(bcrypt.hashSync(Buffer.from("ἓν οἶδα ὅτι οὐδὲν οἶδα"), "$2b$12$LeHKWR2bmrazi/6P22Jpau")).toStrictEqual("$2b$12$LeHKWR2bmrazi/6P22JpauX5my/eKwwKpWqL7L5iEByBnxNc76FRW") +}) diff --git a/node_modules/bcrypt/test/promise.test.js b/node_modules/bcrypt/test/promise.test.js new file mode 100644 index 0000000..0103418 --- /dev/null +++ b/node_modules/bcrypt/test/promise.test.js @@ -0,0 +1,168 @@ +const bcrypt = require('../bcrypt'); +const promises = require('../promises'); + +test('salt_returns_promise_on_no_args', () => { + // make sure test passes with non-native implementations such as bluebird + // http://stackoverflow.com/questions/27746304/how-do-i-tell-if-an-object-is-a-promise + expect(typeof bcrypt.genSalt().then).toEqual('function') +}) + +test('salt_returns_promise_on_null_callback', () => { + expect(typeof bcrypt.genSalt(13, null, null).then).toEqual('function') +}) + +test('salt_length', () => { + return expect(bcrypt.genSalt(10)).resolves.toHaveLength(29); +}) + +test('salt_rounds_is_string_number', () => { + return expect(bcrypt.genSalt('10')).rejects.toThrow('rounds must be a number'); +}) + +test('salt_rounds_is_string_non_number', () => { + return expect(bcrypt.genSalt('b')).rejects.toThrow('rounds must be a number'); +}) + +test('hash_returns_promise_on_null_callback', () => { + expect(typeof bcrypt.hash('password', 10, null).then).toStrictEqual('function') +}) + +test('hash', () => { + return expect(bcrypt.genSalt(10) + .then(salt => bcrypt.hash('password', salt))).resolves.toBeDefined() +}) + +test('hash_rounds', () => { + return bcrypt.hash('bacon', 8).then(hash => { + expect(bcrypt.getRounds(hash)).toStrictEqual(8) + }); +}) + +test('hash_empty_strings', () => { + expect.assertions(2); + return Promise.all([ + expect(bcrypt.genSalt(10) + .then(salt => bcrypt.hash('', salt))) + .resolves.toBeDefined(), + expect(bcrypt.hash('', '')).rejects.toThrow(''), + ]); +}) + +test('hash_no_params', () => { + expect.assertions(1); + return expect(bcrypt.hash()).rejects.toThrow('data and salt arguments required'); +}) + +test('hash_one_param', () => { + return expect(bcrypt.hash('password')).rejects.toThrow('data and salt arguments required'); +}) + +test('hash_salt_validity', () => { + expect.assertions(2); + return Promise.all( + [ + expect(bcrypt.hash('password', '$2a$10$somesaltyvaluertsetrse')).resolves.toBeDefined(), + expect(bcrypt.hash('password', 'some$value')).rejects.toThrow("Invalid salt. Salt must be in the form of: $Vers$log2(NumRounds)$saltvalue") + ]); +}) + +test('verify_salt', () => { + expect.assertions(2); + return bcrypt.genSalt(10).then(result => { + const [_, version, salt] = result.split('$'); + expect(version).toEqual('2b') + expect(salt).toEqual('10') + }); +}) + +test('verify_salt_min_rounds', () => { + expect.assertions(2); + return bcrypt.genSalt(1).then(value => { + const [_, version, rounds] = value.split('$'); + expect(version).toEqual('2b'); + expect(rounds).toEqual('04'); + }); +}) + +test('verify_salt_max_rounds', () => { + expect.assertions(2); + return bcrypt.genSalt(100).then(value => { + const [_, version, rounds] = value.split('$'); + expect(version).toEqual('2b'); + expect(rounds).toEqual('31'); + }); +}) + +test('hash_compare_returns_promise_on_null_callback', () => { + expect(typeof bcrypt.compare('password', 'something', null).then).toStrictEqual('function') +}) + +test('hash_compare', () => { + expect.assertions(3); + return bcrypt.genSalt(10).then(function (salt) { + expect(salt).toHaveLength(29); + return bcrypt.hash("test", salt); + }).then(hash => Promise.all( + [ + expect(bcrypt.compare("test", hash)).resolves.toEqual(true), + expect(bcrypt.compare("blah", hash)).resolves.toEqual(false) + ])); +}) + +test('hash_compare_empty_strings', () => { + expect.assertions(2); + const hash = bcrypt.hashSync("test", bcrypt.genSaltSync(10)); + return Promise.all([ + expect(bcrypt.compare("", hash)).resolves.toEqual(false), + expect(bcrypt.compare("", "")).resolves.toEqual(false) + ]); +}) + +test('hash_compare_invalid_strings', () => { + const fullString = 'envy1362987212538'; + const hash = '$2a$10$XOPbrlUPQdwdJUpSrIF6X.LbE14qsMmKGhM1A8W9iqaG3vv1BD7WC'; + const wut = ':'; + return Promise.all([ + expect(bcrypt.compare(fullString, hash)).resolves.toEqual(true), + expect(bcrypt.compare(fullString, wut)).resolves.toEqual(false), + ]); +}) + +test('hash_compare_no_params', () => { + expect.assertions(1); + return expect(bcrypt.compare()).rejects.toThrow('data and hash arguments required') +}) + +test('hash_compare_one_param', () => { + expect.assertions(1); + return expect(bcrypt.compare('password')).rejects.toThrow('data and hash arguments required') +}) + +test('change_promise_impl_reject', () => { + + promises.use({ + reject: function () { + return 'mock'; + } + }); + + expect(promises.reject()).toEqual('mock'); + + // need to reset the promise implementation because of require cache + promises.use(global.Promise); +}) + +test('change_promise_impl_promise', () => { + + promises.use({ + reject: function (err) { + expect(err.message).toEqual('fn must be a function'); + return 'mock'; + } + }); + + expect(promises.promise('', '', '')).toEqual('mock'); + + // need to reset the promise implementation because of require cache + promises.use(global.Promise); +}) diff --git a/node_modules/bcrypt/test/repetitions.test.js b/node_modules/bcrypt/test/repetitions.test.js new file mode 100644 index 0000000..66807f3 --- /dev/null +++ b/node_modules/bcrypt/test/repetitions.test.js @@ -0,0 +1,46 @@ +const bcrypt = require('../bcrypt'); + +const EXPECTED = 2500; //number of times to iterate these tests.) + +test('salt_length', () => { + expect.assertions(EXPECTED); + + return Promise.all(Array.from({length: EXPECTED}, + () => bcrypt.genSalt(10) + .then(salt => expect(salt).toHaveLength(29)))); +}) + +test('test_hash_length', () => { + expect.assertions(EXPECTED); + const SALT = '$2a$04$TnjywYklQbbZjdjBgBoA4e'; + return Promise.all(Array.from({length: EXPECTED}, + () => bcrypt.hash('test', SALT) + .then(hash => expect(hash).toHaveLength(60)))); +}) + +test('test_compare', () => { + expect.assertions(EXPECTED); + const HASH = '$2a$04$TnjywYklQbbZjdjBgBoA4e9G7RJt9blgMgsCvUvus4Iv4TENB5nHy'; + return Promise.all(Array.from({length: EXPECTED}, + () => bcrypt.compare('test', HASH) + .then(match => expect(match).toEqual(true)))); +}) + +test('test_hash_and_compare', () => { + expect.assertions(EXPECTED * 3); + const salt = bcrypt.genSaltSync(4) + + return Promise.all(Array.from({length: EXPECTED}, + () => { + const password = 'secret' + Math.random(); + return bcrypt.hash(password, salt) + .then(hash => { + expect(hash).toHaveLength(60); + const goodCompare = bcrypt.compare(password, hash).then(res => expect(res).toEqual(true)); + const badCompare = bcrypt.compare('bad' + password, hash).then(res => expect(res).toEqual(false)); + + return Promise.all([goodCompare, badCompare]); + }); + })); +}, 10000); + diff --git a/node_modules/bcrypt/test/sync.test.js b/node_modules/bcrypt/test/sync.test.js new file mode 100644 index 0000000..2e6809a --- /dev/null +++ b/node_modules/bcrypt/test/sync.test.js @@ -0,0 +1,125 @@ +const bcrypt = require('../bcrypt') + +test('salt_length', () => { + const salt = bcrypt.genSaltSync(13); + expect(salt).toHaveLength(29); + const [_, version, rounds] = salt.split('$'); + expect(version).toStrictEqual('2b') + expect(rounds).toStrictEqual('13') +}) + +test('salt_no_params', () => { + const salt = bcrypt.genSaltSync(); + const [_, version, rounds] = salt.split('$'); + expect(version).toStrictEqual('2b') + expect(rounds).toStrictEqual('10') +}) + +test('salt_rounds_is_string_number', () => { + expect(() => bcrypt.genSaltSync('10')).toThrowError('rounds must be a number'); +}) + +test('salt_rounds_is_NaN', () => { + expect(() => bcrypt.genSaltSync('b')).toThrowError("rounds must be a number"); +}) + +test('salt_minor_a', () => { + const salt = bcrypt.genSaltSync(10, 'a'); + const [_, version, rounds] = salt.split('$'); + expect(version).toStrictEqual('2a') + expect(rounds).toStrictEqual('10') +}) + +test('salt_minor_b', () => { + const salt = bcrypt.genSaltSync(10, 'b'); + const [_, version, rounds] = salt.split('$'); + expect(version).toStrictEqual('2b') + expect(rounds).toStrictEqual('10') +}) + +test('hash', () => { + expect(() => bcrypt.hashSync('password', bcrypt.genSaltSync(10))).not.toThrow() +}) + +test('hash_rounds', () => { + const hash = bcrypt.hashSync('password', 8); + expect(bcrypt.getRounds(hash)).toStrictEqual(8) +}) + +test('hash_empty_string', () => { + expect(() => bcrypt.hashSync('', bcrypt.genSaltSync(10))).not.toThrow(); + expect(() => bcrypt.hashSync('password', '')).toThrowError('Invalid salt. Salt must be in the form of: $Vers$log2(NumRounds)$saltvalue'); + expect(() => bcrypt.hashSync('', '')).toThrowError('Invalid salt. Salt must be in the form of: $Vers$log2(NumRounds)$saltvalue'); +}) + +test('hash_pw_no_params', () => { + expect(() => bcrypt.hashSync()).toThrow('data and salt arguments required'); +}) + +test('hash_pw_one_param', () => { + expect(() => bcrypt.hashSync('password')).toThrow('data and salt arguments required'); +}) + +test('hash_pw_not_hash_str', () => { + expect(() => bcrypt.hashSync('password', {})).toThrow("data must be a string or Buffer and salt must either be a salt string or a number of rounds") +}) + +test('hash_salt_validity', () => { + expect(2); + expect(bcrypt.hashSync('password', '$2a$10$somesaltyvaluertsetrse')).toBeDefined() + expect(() => bcrypt.hashSync('password', 'some$value')).toThrow('Invalid salt. Salt must be in the form of: $Vers$log2(NumRounds)$saltvalue') +}) + +test('verify_salt', () => { + const salt = bcrypt.genSaltSync(10); + const split_salt = salt.split('$'); + expect(split_salt[1]).toStrictEqual('2b') + expect(split_salt[2]).toStrictEqual('10') +}) + +test('verify_salt_min_rounds', () => { + const salt = bcrypt.genSaltSync(1); + const split_salt = salt.split('$'); + expect(split_salt[1]).toStrictEqual('2b') + expect(split_salt[2]).toStrictEqual('04') +}) + +test('verify_salt_max_rounds', () => { + const salt = bcrypt.genSaltSync(100); + const split_salt = salt.split('$'); + expect(split_salt[1]).toStrictEqual('2b') + expect(split_salt[2]).toStrictEqual('31') +}) + +test('hash_compare', () => { + const salt = bcrypt.genSaltSync(10); + expect(29).toStrictEqual(salt.length) + const hash = bcrypt.hashSync("test", salt); + expect(bcrypt.compareSync("test", hash)).toBeDefined() + expect(!(bcrypt.compareSync("blah", hash))).toBeDefined() +}) + +test('hash_compare_empty_strings', () => { + expect(!(bcrypt.compareSync("", "password"))).toBeDefined() + expect(!(bcrypt.compareSync("", ""))).toBeDefined() + expect(!(bcrypt.compareSync("password", ""))).toBeDefined() +}) + +test('hash_compare_invalid_strings', () => { + const fullString = 'envy1362987212538'; + const hash = '$2a$10$XOPbrlUPQdwdJUpSrIF6X.LbE14qsMmKGhM1A8W9iqaG3vv1BD7WC'; + const wut = ':'; + expect(bcrypt.compareSync(fullString, hash)).toBe(true); + expect(bcrypt.compareSync(fullString, wut)).toBe(false); +}) + +test('getRounds', () => { + const hash = bcrypt.hashSync("test", bcrypt.genSaltSync(9)); + expect(9).toStrictEqual(bcrypt.getRounds(hash)) +}) + +test('getRounds', () => { + const hash = bcrypt.hashSync("test", bcrypt.genSaltSync(9)); + expect(9).toStrictEqual(bcrypt.getRounds(hash)) + expect(() => bcrypt.getRounds('')).toThrow("invalid hash provided"); +}); diff --git a/node_modules/brace-expansion/LICENSE b/node_modules/brace-expansion/LICENSE new file mode 100644 index 0000000..de32266 --- /dev/null +++ b/node_modules/brace-expansion/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2013 Julian Gruber + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/README.md b/node_modules/brace-expansion/README.md new file mode 100644 index 0000000..6b4e0e1 --- /dev/null +++ b/node_modules/brace-expansion/README.md @@ -0,0 +1,129 @@ +# brace-expansion + +[Brace expansion](https://www.gnu.org/software/bash/manual/html_node/Brace-Expansion.html), +as known from sh/bash, in JavaScript. + +[![build status](https://secure.travis-ci.org/juliangruber/brace-expansion.svg)](http://travis-ci.org/juliangruber/brace-expansion) +[![downloads](https://img.shields.io/npm/dm/brace-expansion.svg)](https://www.npmjs.org/package/brace-expansion) +[![Greenkeeper badge](https://badges.greenkeeper.io/juliangruber/brace-expansion.svg)](https://greenkeeper.io/) + +[![testling badge](https://ci.testling.com/juliangruber/brace-expansion.png)](https://ci.testling.com/juliangruber/brace-expansion) + +## Example + +```js +var expand = require('brace-expansion'); + +expand('file-{a,b,c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('-v{,,}') +// => ['-v', '-v', '-v'] + +expand('file{0..2}.jpg') +// => ['file0.jpg', 'file1.jpg', 'file2.jpg'] + +expand('file-{a..c}.jpg') +// => ['file-a.jpg', 'file-b.jpg', 'file-c.jpg'] + +expand('file{2..0}.jpg') +// => ['file2.jpg', 'file1.jpg', 'file0.jpg'] + +expand('file{0..4..2}.jpg') +// => ['file0.jpg', 'file2.jpg', 'file4.jpg'] + +expand('file-{a..e..2}.jpg') +// => ['file-a.jpg', 'file-c.jpg', 'file-e.jpg'] + +expand('file{00..10..5}.jpg') +// => ['file00.jpg', 'file05.jpg', 'file10.jpg'] + +expand('{{A..C},{a..c}}') +// => ['A', 'B', 'C', 'a', 'b', 'c'] + +expand('ppp{,config,oe{,conf}}') +// => ['ppp', 'pppconfig', 'pppoe', 'pppoeconf'] +``` + +## API + +```js +var expand = require('brace-expansion'); +``` + +### var expanded = expand(str) + +Return an array of all possible and valid expansions of `str`. If none are +found, `[str]` is returned. + +Valid expansions are: + +```js +/^(.*,)+(.+)?$/ +// {a,b,...} +``` + +A comma separated list of options, like `{a,b}` or `{a,{b,c}}` or `{,a,}`. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +A numeric sequence from `x` to `y` inclusive, with optional increment. +If `x` or `y` start with a leading `0`, all the numbers will be padded +to have equal length. Negative numbers and backwards iteration work too. + +```js +/^-?\d+\.\.-?\d+(\.\.-?\d+)?$/ +// {x..y[..incr]} +``` + +An alphabetic sequence from `x` to `y` inclusive, with optional increment. +`x` and `y` must be exactly one character, and if given, `incr` must be a +number. + +For compatibility reasons, the string `${` is not eligible for brace expansion. + +## Installation + +With [npm](https://npmjs.org) do: + +```bash +npm install brace-expansion +``` + +## Contributors + +- [Julian Gruber](https://github.com/juliangruber) +- [Isaac Z. Schlueter](https://github.com/isaacs) + +## Sponsors + +This module is proudly supported by my [Sponsors](https://github.com/juliangruber/sponsors)! + +Do you want to support modules like this to improve their quality, stability and weigh in on new features? Then please consider donating to my [Patreon](https://www.patreon.com/juliangruber). Not sure how much of my modules you're using? Try [feross/thanks](https://github.com/feross/thanks)! + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/brace-expansion/index.js b/node_modules/brace-expansion/index.js new file mode 100644 index 0000000..bd19fe6 --- /dev/null +++ b/node_modules/brace-expansion/index.js @@ -0,0 +1,201 @@ +var concatMap = require('concat-map'); +var balanced = require('balanced-match'); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,(?!,).*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); + } + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } + } + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + diff --git a/node_modules/brace-expansion/package.json b/node_modules/brace-expansion/package.json new file mode 100644 index 0000000..3447888 --- /dev/null +++ b/node_modules/brace-expansion/package.json @@ -0,0 +1,50 @@ +{ + "name": "brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "1.1.12", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/brace-expansion.git" + }, + "homepage": "https://github.com/juliangruber/brace-expansion", + "main": "index.js", + "scripts": { + "test": "tape test/*.js", + "gentest": "bash test/generate.sh", + "bench": "matcha test/perf/bench.js" + }, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + }, + "devDependencies": { + "matcha": "^0.7.0", + "tape": "^4.6.0" + }, + "keywords": [], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/20..latest", + "firefox/nightly", + "chrome/25..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + }, + "publishConfig": { + "tag": "1.x" + } +} diff --git a/node_modules/chownr/LICENSE b/node_modules/chownr/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/chownr/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/chownr/README.md b/node_modules/chownr/README.md new file mode 100644 index 0000000..70e9a54 --- /dev/null +++ b/node_modules/chownr/README.md @@ -0,0 +1,3 @@ +Like `chown -R`. + +Takes the same arguments as `fs.chown()` diff --git a/node_modules/chownr/chownr.js b/node_modules/chownr/chownr.js new file mode 100644 index 0000000..0d40932 --- /dev/null +++ b/node_modules/chownr/chownr.js @@ -0,0 +1,167 @@ +'use strict' +const fs = require('fs') +const path = require('path') + +/* istanbul ignore next */ +const LCHOWN = fs.lchown ? 'lchown' : 'chown' +/* istanbul ignore next */ +const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync' + +/* istanbul ignore next */ +const needEISDIRHandled = fs.lchown && + !process.version.match(/v1[1-9]+\./) && + !process.version.match(/v10\.[6-9]/) + +const lchownSync = (path, uid, gid) => { + try { + return fs[LCHOWNSYNC](path, uid, gid) + } catch (er) { + if (er.code !== 'ENOENT') + throw er + } +} + +/* istanbul ignore next */ +const chownSync = (path, uid, gid) => { + try { + return fs.chownSync(path, uid, gid) + } catch (er) { + if (er.code !== 'ENOENT') + throw er + } +} + +/* istanbul ignore next */ +const handleEISDIR = + needEISDIRHandled ? (path, uid, gid, cb) => er => { + // Node prior to v10 had a very questionable implementation of + // fs.lchown, which would always try to call fs.open on a directory + // Fall back to fs.chown in those cases. + if (!er || er.code !== 'EISDIR') + cb(er) + else + fs.chown(path, uid, gid, cb) + } + : (_, __, ___, cb) => cb + +/* istanbul ignore next */ +const handleEISDirSync = + needEISDIRHandled ? (path, uid, gid) => { + try { + return lchownSync(path, uid, gid) + } catch (er) { + if (er.code !== 'EISDIR') + throw er + chownSync(path, uid, gid) + } + } + : (path, uid, gid) => lchownSync(path, uid, gid) + +// fs.readdir could only accept an options object as of node v6 +const nodeVersion = process.version +let readdir = (path, options, cb) => fs.readdir(path, options, cb) +let readdirSync = (path, options) => fs.readdirSync(path, options) +/* istanbul ignore next */ +if (/^v4\./.test(nodeVersion)) + readdir = (path, options, cb) => fs.readdir(path, cb) + +const chown = (cpath, uid, gid, cb) => { + fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => { + // Skip ENOENT error + cb(er && er.code !== 'ENOENT' ? er : null) + })) +} + +const chownrKid = (p, child, uid, gid, cb) => { + if (typeof child === 'string') + return fs.lstat(path.resolve(p, child), (er, stats) => { + // Skip ENOENT error + if (er) + return cb(er.code !== 'ENOENT' ? er : null) + stats.name = child + chownrKid(p, stats, uid, gid, cb) + }) + + if (child.isDirectory()) { + chownr(path.resolve(p, child.name), uid, gid, er => { + if (er) + return cb(er) + const cpath = path.resolve(p, child.name) + chown(cpath, uid, gid, cb) + }) + } else { + const cpath = path.resolve(p, child.name) + chown(cpath, uid, gid, cb) + } +} + + +const chownr = (p, uid, gid, cb) => { + readdir(p, { withFileTypes: true }, (er, children) => { + // any error other than ENOTDIR or ENOTSUP means it's not readable, + // or doesn't exist. give up. + if (er) { + if (er.code === 'ENOENT') + return cb() + else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP') + return cb(er) + } + if (er || !children.length) + return chown(p, uid, gid, cb) + + let len = children.length + let errState = null + const then = er => { + if (errState) + return + if (er) + return cb(errState = er) + if (-- len === 0) + return chown(p, uid, gid, cb) + } + + children.forEach(child => chownrKid(p, child, uid, gid, then)) + }) +} + +const chownrKidSync = (p, child, uid, gid) => { + if (typeof child === 'string') { + try { + const stats = fs.lstatSync(path.resolve(p, child)) + stats.name = child + child = stats + } catch (er) { + if (er.code === 'ENOENT') + return + else + throw er + } + } + + if (child.isDirectory()) + chownrSync(path.resolve(p, child.name), uid, gid) + + handleEISDirSync(path.resolve(p, child.name), uid, gid) +} + +const chownrSync = (p, uid, gid) => { + let children + try { + children = readdirSync(p, { withFileTypes: true }) + } catch (er) { + if (er.code === 'ENOENT') + return + else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP') + return handleEISDirSync(p, uid, gid) + else + throw er + } + + if (children && children.length) + children.forEach(child => chownrKidSync(p, child, uid, gid)) + + return handleEISDirSync(p, uid, gid) +} + +module.exports = chownr +chownr.sync = chownrSync diff --git a/node_modules/chownr/package.json b/node_modules/chownr/package.json new file mode 100644 index 0000000..5b0214c --- /dev/null +++ b/node_modules/chownr/package.json @@ -0,0 +1,32 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "chownr", + "description": "like `chown -R`", + "version": "2.0.0", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/chownr.git" + }, + "main": "chownr.js", + "files": [ + "chownr.js" + ], + "devDependencies": { + "mkdirp": "0.3", + "rimraf": "^2.7.1", + "tap": "^14.10.6" + }, + "tap": { + "check-coverage": true + }, + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "license": "ISC", + "engines": { + "node": ">=10" + } +} diff --git a/node_modules/cluster-key-slot/.eslintrc b/node_modules/cluster-key-slot/.eslintrc new file mode 100644 index 0000000..3ee8296 --- /dev/null +++ b/node_modules/cluster-key-slot/.eslintrc @@ -0,0 +1,16 @@ +{ + "extends": "airbnb-base/legacy", + "parserOptions":{ + "ecmaFeatures": { + "experimentalObjectRestSpread": true + } + }, + "rules": { + "max-len": 0, + "no-plusplus": 0, + "no-bitwise": 0, + "no-param-reassign": 0, + "no-undef": 0 + }, + "globals": {} +} diff --git a/node_modules/cluster-key-slot/LICENSE b/node_modules/cluster-key-slot/LICENSE new file mode 100644 index 0000000..fd22a2d --- /dev/null +++ b/node_modules/cluster-key-slot/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2018 Mike Diarmid (Salakar) + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this library except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/node_modules/cluster-key-slot/README.md b/node_modules/cluster-key-slot/README.md new file mode 100644 index 0000000..440f7b1 --- /dev/null +++ b/node_modules/cluster-key-slot/README.md @@ -0,0 +1,61 @@ +[![Coverage Status](https://coveralls.io/repos/github/Salakar/cluster-key-slot/badge.svg?branch=master)](https://coveralls.io/github/Salakar/cluster-key-slot?branch=master) +![Downloads](https://img.shields.io/npm/dt/cluster-key-slot.svg) +[![npm version](https://img.shields.io/npm/v/cluster-key-slot.svg)](https://www.npmjs.com/package/cluster-key-slot) +[![dependencies](https://img.shields.io/david/Salakar/cluster-key-slot.svg)](https://david-dm.org/Salakar/cluster-key-slot) +[![License](https://img.shields.io/npm/l/cluster-key-slot.svg)](/LICENSE) +Follow on Twitter + +# Redis Key Slot Calculator + +A high performance redis cluster key slot calculator for node redis clients e.g. [node_redis](https://github.com/NodeRedis/node_redis), [ioredis](https://github.com/luin/ioredis) and [redis-clustr](https://github.com/gosquared/redis-clustr/). + +This also handles key tags such as `somekey{actualTag}`. + +## Install + +Install with [NPM](https://npmjs.org/): + +``` +npm install cluster-key-slot --save +``` + +## Usage + +```js +const calculateSlot = require('cluster-key-slot'); +const calculateMultipleSlots = require('cluster-key-slot').generateMulti; + +// ... + +// a single slot number +const slot = calculateSlot('test:key:{butOnlyThis}redis'); +// Buffer is also supported +const anotherSlot = calculateSlot(Buffer.from([0x7b, 0x7d, 0x2a])); + +// multiple keys - multi returns a single key slot number, returns -1 if any +// of the keys does not match the base slot number (base is defaulted to first keys slot) +// This is useful to quickly determine a singe slot for multi keys operations. +const slotForRedisMulti = calculateMultipleSlots([ + 'test:key:{butOnlyThis}redis', + 'something:key45:{butOnlyThis}hello', + 'example:key46:{butOnlyThis}foobar', +]); +``` + +## Benchmarks + +`OLD` in these benchmarks refers to the `ioredis` crc calc and many of the other calculators that use `Buffer`. + +```text +node -v  ✔  16.38G RAM  10:29:07 +v10.15.3 + +NEW tags x 721,445 ops/sec ±0.44% (90 runs sampled) +OLD tags x 566,777 ops/sec ±0.97% (96 runs sampled) +NEW without tags x 2,054,845 ops/sec ±1.77% (92 runs sampled) +OLD without tags x 865,839 ops/sec ±0.43% (96 runs sampled) +NEW without tags singular x 6,354,097 ops/sec ±1.25% (94 runs sampled) +OLD without tags singular x 4,012,250 ops/sec ±0.96% (94 runs sampled) +NEW tags (Buffer) x 552,346 ops/sec ±1.35% (92 runs sampled) +``` + diff --git a/node_modules/cluster-key-slot/index.d.ts b/node_modules/cluster-key-slot/index.d.ts new file mode 100644 index 0000000..1713b44 --- /dev/null +++ b/node_modules/cluster-key-slot/index.d.ts @@ -0,0 +1,10 @@ +declare module 'cluster-key-slot' { + // Convert a string or Buffer into a redis slot hash. + function calculate(value: string | Buffer): number; + + // Convert an array of multiple strings or Buffers into a redis slot hash. + // Returns -1 if one of the keys is not for the same slot as the others + export function generateMulti(values: Array): number; + + export = calculate; +} \ No newline at end of file diff --git a/node_modules/cluster-key-slot/lib/index.js b/node_modules/cluster-key-slot/lib/index.js new file mode 100644 index 0000000..7928c77 --- /dev/null +++ b/node_modules/cluster-key-slot/lib/index.js @@ -0,0 +1,166 @@ +/* + * Copyright 2001-2010 Georges Menie (www.menie.org) + * Copyright 2010 Salvatore Sanfilippo (adapted to Redis coding style) + * Copyright 2015 Zihua Li (http://zihua.li) (ported to JavaScript) + * Copyright 2016 Mike Diarmid (http://github.com/salakar) (re-write for performance, ~700% perf inc) + * All rights reserved. + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * * Neither the name of the University of California, Berkeley nor the + * names of its contributors may be used to endorse or promote products + * derived from this software without specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY + * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY + * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; + * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +/* CRC16 implementation according to CCITT standards. + * + * Note by @antirez: this is actually the XMODEM CRC 16 algorithm, using the + * following parameters: + * + * Name : "XMODEM", also known as "ZMODEM", "CRC-16/ACORN" + * Width : 16 bit + * Poly : 1021 (That is actually x^16 + x^12 + x^5 + 1) + * Initialization : 0000 + * Reflect Input byte : False + * Reflect Output CRC : False + * Xor constant to output CRC : 0000 + * Output for "123456789" : 31C3 + */ + +var lookup = [ + 0x0000, 0x1021, 0x2042, 0x3063, 0x4084, 0x50a5, 0x60c6, 0x70e7, + 0x8108, 0x9129, 0xa14a, 0xb16b, 0xc18c, 0xd1ad, 0xe1ce, 0xf1ef, + 0x1231, 0x0210, 0x3273, 0x2252, 0x52b5, 0x4294, 0x72f7, 0x62d6, + 0x9339, 0x8318, 0xb37b, 0xa35a, 0xd3bd, 0xc39c, 0xf3ff, 0xe3de, + 0x2462, 0x3443, 0x0420, 0x1401, 0x64e6, 0x74c7, 0x44a4, 0x5485, + 0xa56a, 0xb54b, 0x8528, 0x9509, 0xe5ee, 0xf5cf, 0xc5ac, 0xd58d, + 0x3653, 0x2672, 0x1611, 0x0630, 0x76d7, 0x66f6, 0x5695, 0x46b4, + 0xb75b, 0xa77a, 0x9719, 0x8738, 0xf7df, 0xe7fe, 0xd79d, 0xc7bc, + 0x48c4, 0x58e5, 0x6886, 0x78a7, 0x0840, 0x1861, 0x2802, 0x3823, + 0xc9cc, 0xd9ed, 0xe98e, 0xf9af, 0x8948, 0x9969, 0xa90a, 0xb92b, + 0x5af5, 0x4ad4, 0x7ab7, 0x6a96, 0x1a71, 0x0a50, 0x3a33, 0x2a12, + 0xdbfd, 0xcbdc, 0xfbbf, 0xeb9e, 0x9b79, 0x8b58, 0xbb3b, 0xab1a, + 0x6ca6, 0x7c87, 0x4ce4, 0x5cc5, 0x2c22, 0x3c03, 0x0c60, 0x1c41, + 0xedae, 0xfd8f, 0xcdec, 0xddcd, 0xad2a, 0xbd0b, 0x8d68, 0x9d49, + 0x7e97, 0x6eb6, 0x5ed5, 0x4ef4, 0x3e13, 0x2e32, 0x1e51, 0x0e70, + 0xff9f, 0xefbe, 0xdfdd, 0xcffc, 0xbf1b, 0xaf3a, 0x9f59, 0x8f78, + 0x9188, 0x81a9, 0xb1ca, 0xa1eb, 0xd10c, 0xc12d, 0xf14e, 0xe16f, + 0x1080, 0x00a1, 0x30c2, 0x20e3, 0x5004, 0x4025, 0x7046, 0x6067, + 0x83b9, 0x9398, 0xa3fb, 0xb3da, 0xc33d, 0xd31c, 0xe37f, 0xf35e, + 0x02b1, 0x1290, 0x22f3, 0x32d2, 0x4235, 0x5214, 0x6277, 0x7256, + 0xb5ea, 0xa5cb, 0x95a8, 0x8589, 0xf56e, 0xe54f, 0xd52c, 0xc50d, + 0x34e2, 0x24c3, 0x14a0, 0x0481, 0x7466, 0x6447, 0x5424, 0x4405, + 0xa7db, 0xb7fa, 0x8799, 0x97b8, 0xe75f, 0xf77e, 0xc71d, 0xd73c, + 0x26d3, 0x36f2, 0x0691, 0x16b0, 0x6657, 0x7676, 0x4615, 0x5634, + 0xd94c, 0xc96d, 0xf90e, 0xe92f, 0x99c8, 0x89e9, 0xb98a, 0xa9ab, + 0x5844, 0x4865, 0x7806, 0x6827, 0x18c0, 0x08e1, 0x3882, 0x28a3, + 0xcb7d, 0xdb5c, 0xeb3f, 0xfb1e, 0x8bf9, 0x9bd8, 0xabbb, 0xbb9a, + 0x4a75, 0x5a54, 0x6a37, 0x7a16, 0x0af1, 0x1ad0, 0x2ab3, 0x3a92, + 0xfd2e, 0xed0f, 0xdd6c, 0xcd4d, 0xbdaa, 0xad8b, 0x9de8, 0x8dc9, + 0x7c26, 0x6c07, 0x5c64, 0x4c45, 0x3ca2, 0x2c83, 0x1ce0, 0x0cc1, + 0xef1f, 0xff3e, 0xcf5d, 0xdf7c, 0xaf9b, 0xbfba, 0x8fd9, 0x9ff8, + 0x6e17, 0x7e36, 0x4e55, 0x5e74, 0x2e93, 0x3eb2, 0x0ed1, 0x1ef0 +]; + +/** + * Convert a string to a UTF8 array - faster than via buffer + * @param str + * @returns {Array} + */ +var toUTF8Array = function toUTF8Array(str) { + var char; + var i = 0; + var p = 0; + var utf8 = []; + var len = str.length; + + for (; i < len; i++) { + char = str.charCodeAt(i); + if (char < 128) { + utf8[p++] = char; + } else if (char < 2048) { + utf8[p++] = (char >> 6) | 192; + utf8[p++] = (char & 63) | 128; + } else if ( + ((char & 0xFC00) === 0xD800) && (i + 1) < str.length && + ((str.charCodeAt(i + 1) & 0xFC00) === 0xDC00)) { + char = 0x10000 + ((char & 0x03FF) << 10) + (str.charCodeAt(++i) & 0x03FF); + utf8[p++] = (char >> 18) | 240; + utf8[p++] = ((char >> 12) & 63) | 128; + utf8[p++] = ((char >> 6) & 63) | 128; + utf8[p++] = (char & 63) | 128; + } else { + utf8[p++] = (char >> 12) | 224; + utf8[p++] = ((char >> 6) & 63) | 128; + utf8[p++] = (char & 63) | 128; + } + } + + return utf8; +}; + +/** + * Convert a string into a redis slot hash. + * @param str + * @returns {number} + */ +var generate = module.exports = function generate(str) { + var char; + var i = 0; + var start = -1; + var result = 0; + var resultHash = 0; + var utf8 = typeof str === 'string' ? toUTF8Array(str) : str; + var len = utf8.length; + + while (i < len) { + char = utf8[i++]; + if (start === -1) { + if (char === 0x7B) { + start = i; + } + } else if (char !== 0x7D) { + resultHash = lookup[(char ^ (resultHash >> 8)) & 0xFF] ^ (resultHash << 8); + } else if (i - 1 !== start) { + return resultHash & 0x3FFF; + } + + result = lookup[(char ^ (result >> 8)) & 0xFF] ^ (result << 8); + } + + return result & 0x3FFF; +}; + +/** + * Convert an array of multiple strings into a redis slot hash. + * Returns -1 if one of the keys is not for the same slot as the others + * @param keys + * @returns {number} + */ +module.exports.generateMulti = function generateMulti(keys) { + var i = 1; + var len = keys.length; + var base = generate(keys[0]); + + while (i < len) { + if (generate(keys[i++]) !== base) return -1; + } + + return base; +}; diff --git a/node_modules/cluster-key-slot/package.json b/node_modules/cluster-key-slot/package.json new file mode 100644 index 0000000..f75d3d6 --- /dev/null +++ b/node_modules/cluster-key-slot/package.json @@ -0,0 +1,56 @@ +{ + "name": "cluster-key-slot", + "version": "1.1.2", + "description": "Generates CRC hashes for strings - for use by node redis clients to determine key slots.", + "main": "lib/index.js", + "types": "index.d.ts", + "scripts": { + "benchmark": "node ./benchmark", + "posttest": "eslint ./lib && npm run coveralls", + "coveralls": "cat ./coverage/lcov.info | coveralls", + "test": "node ./node_modules/istanbul/lib/cli.js cover --preserve-comments ./node_modules/mocha/bin/_mocha -- -R spec", + "coverage:check": "node ./node_modules/istanbul/lib/cli.js check-coverage --branch 100 --statement 100" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/Salakar/cluster-key-slot.git" + }, + "keywords": [ + "redis", + "hash", + "crc", + "slot", + "calc", + "javascript", + "node", + "node_redis", + "ioredis" + ], + "engines": { + "node": ">=0.10.0" + }, + "devDependencies": { + "benchmark": "^2.1.0", + "codeclimate-test-reporter": "^0.3.1", + "coveralls": "^2.11.9", + "eslint": "^3.5.0", + "eslint-config-airbnb-base": "^7.1.0", + "eslint-plugin-import": "^1.8.0", + "istanbul": "^0.4.0", + "mocha": "^3.0.2" + }, + "author": { + "name": "Mike Diarmid", + "email": "mike.diarmid@gmail.com", + "url": "http://github.com/Salakar/" + }, + "license": "Apache-2.0", + "bugs": { + "url": "https://github.com/Salakar/cluster-key-slot/issues" + }, + "homepage": "https://github.com/Salakar/cluster-key-slot#readme", + "directories": { + "test": "test", + "lib": "lib" + } +} diff --git a/node_modules/color-support/LICENSE b/node_modules/color-support/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/color-support/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/color-support/README.md b/node_modules/color-support/README.md new file mode 100644 index 0000000..f89aa17 --- /dev/null +++ b/node_modules/color-support/README.md @@ -0,0 +1,129 @@ +# color-support + +A module which will endeavor to guess your terminal's level of color +support. + +[![Build Status](https://travis-ci.org/isaacs/color-support.svg?branch=master)](https://travis-ci.org/isaacs/color-support) [![Coverage Status](https://coveralls.io/repos/github/isaacs/color-support/badge.svg?branch=master)](https://coveralls.io/github/isaacs/color-support?branch=master) + +This is similar to `supports-color`, but it does not read +`process.argv`. + +1. If not in a node environment, not supported. + +2. If stdout is not a TTY, not supported, unless the `ignoreTTY` + option is set. + +3. If the `TERM` environ is `dumb`, not supported, unless the + `ignoreDumb` option is set. + +4. If on Windows, then support 16 colors. + +5. If using Tmux, then support 256 colors. + +7. Handle continuous-integration servers. If `CI` or + `TEAMCITY_VERSION` are set in the environment, and `TRAVIS` is not + set, then color is not supported, unless `ignoreCI` option is set. + +6. Guess based on the `TERM_PROGRAM` environ. These terminals support + 16m colors: + + - `iTerm.app` version 3.x supports 16m colors, below support 256 + - `MacTerm` supports 16m colors + - `Apple_Terminal` supports 256 colors + - Have more things that belong on this list? Send a PR! + +8. Make a guess based on the `TERM` environment variable. Any + `xterm-256color` will get 256 colors. Any screen, xterm, vt100, + color, ansi, cygwin, or linux `TERM` will get 16 colors. + +9. If `COLORTERM` environment variable is set, then support 16 colors. + +10. At this point, we assume that color is not supported. + +## USAGE + +```javascript +var testColorSupport = require('color-support') +var colorSupport = testColorSupport(/* options object */) + +if (!colorSupport) { + console.log('color is not supported') +} else if (colorSupport.has16m) { + console.log('\x1b[38;2;102;194;255m16m colors\x1b[0m') +} else if (colorSupport.has256) { + console.log('\x1b[38;5;119m256 colors\x1b[0m') +} else if (colorSupport.hasBasic) { + console.log('\x1b[31mbasic colors\x1b[0m') +} else { + console.log('this is impossible, but colors are not supported') +} +``` + +If you don't have any options to set, you can also just look at the +flags which will all be set on the test function itself. (Of course, +this doesn't return a falsey value when colors aren't supported, and +doesn't allow you to set options.) + +```javascript +var colorSupport = require('color-support') + +if (colorSupport.has16m) { + console.log('\x1b[38;2;102;194;255m16m colors\x1b[0m') +} else if (colorSupport.has256) { + console.log('\x1b[38;5;119m256 colors\x1b[0m') +} else if (colorSupport.hasBasic) { + console.log('\x1b[31mbasic colors\x1b[0m') +} else { + console.log('colors are not supported') +} +``` + +## Options + +You can pass in the following options. + +* ignoreTTY - default false. Ignore the `isTTY` check. +* ignoreDumb - default false. Ignore `TERM=dumb` environ check. +* ignoreCI - default false. Ignore `CI` environ check. +* env - Object for environment vars. Defaults to `process.env`. +* stream - Stream for `isTTY` check. Defaults to `process.stdout`. +* term - String for `TERM` checking. Defaults to `env.TERM`. +* alwaysReturn - default false. Return an object when colors aren't + supported (instead of returning `false`). +* level - A number from 0 to 3. This will return a result for the + specified level. This is useful if you want to be able to set the + color support level explicitly as a number in an environment + variable or config, but then use the object flags in your program. + Except for `alwaysReturn` to return an object for level 0, all other + options are ignored, since no checking is done if a level is + explicitly set. + +## Return Value + +If no color support is available, then `false` is returned by default, +unless the `alwaysReturn` flag is set to `true`. This is so that the +simple question of "can I use colors or not" can treat any truthy +return as "yes". + +Otherwise, the return object has the following fields: + +* `level` - A number from 0 to 3 + * `0` - No color support + * `1` - Basic (16) color support + * `2` - 256 color support + * `3` - 16 million (true) color support +* `hasBasic` - Boolean +* `has256` - Boolean +* `has16m` - Boolean + +## CLI + +You can run the `color-support` bin from the command line which will +just dump the values as this module calculates them in whatever env +it's run. It takes no command line arguments. + +## Credits + +This is a spiritual, if not actual, fork of +[supports-color](http://npm.im/supports-color) by the ever prolific +[Sindre Sorhus](http://npm.im/~sindresorhus). diff --git a/node_modules/color-support/bin.js b/node_modules/color-support/bin.js new file mode 100644 index 0000000..3c0a967 --- /dev/null +++ b/node_modules/color-support/bin.js @@ -0,0 +1,3 @@ +#!/usr/bin/env node +var colorSupport = require('./')({alwaysReturn: true }) +console.log(JSON.stringify(colorSupport, null, 2)) diff --git a/node_modules/color-support/browser.js b/node_modules/color-support/browser.js new file mode 100644 index 0000000..ab5c663 --- /dev/null +++ b/node_modules/color-support/browser.js @@ -0,0 +1,14 @@ +module.exports = colorSupport({ alwaysReturn: true }, colorSupport) + +function colorSupport(options, obj) { + obj = obj || {} + options = options || {} + obj.level = 0 + obj.hasBasic = false + obj.has256 = false + obj.has16m = false + if (!options.alwaysReturn) { + return false + } + return obj +} diff --git a/node_modules/color-support/index.js b/node_modules/color-support/index.js new file mode 100644 index 0000000..6b6f3b2 --- /dev/null +++ b/node_modules/color-support/index.js @@ -0,0 +1,134 @@ +// call it on itself so we can test the export val for basic stuff +module.exports = colorSupport({ alwaysReturn: true }, colorSupport) + +function hasNone (obj, options) { + obj.level = 0 + obj.hasBasic = false + obj.has256 = false + obj.has16m = false + if (!options.alwaysReturn) { + return false + } + return obj +} + +function hasBasic (obj) { + obj.hasBasic = true + obj.has256 = false + obj.has16m = false + obj.level = 1 + return obj +} + +function has256 (obj) { + obj.hasBasic = true + obj.has256 = true + obj.has16m = false + obj.level = 2 + return obj +} + +function has16m (obj) { + obj.hasBasic = true + obj.has256 = true + obj.has16m = true + obj.level = 3 + return obj +} + +function colorSupport (options, obj) { + options = options || {} + + obj = obj || {} + + // if just requesting a specific level, then return that. + if (typeof options.level === 'number') { + switch (options.level) { + case 0: + return hasNone(obj, options) + case 1: + return hasBasic(obj) + case 2: + return has256(obj) + case 3: + return has16m(obj) + } + } + + obj.level = 0 + obj.hasBasic = false + obj.has256 = false + obj.has16m = false + + if (typeof process === 'undefined' || + !process || + !process.stdout || + !process.env || + !process.platform) { + return hasNone(obj, options) + } + + var env = options.env || process.env + var stream = options.stream || process.stdout + var term = options.term || env.TERM || '' + var platform = options.platform || process.platform + + if (!options.ignoreTTY && !stream.isTTY) { + return hasNone(obj, options) + } + + if (!options.ignoreDumb && term === 'dumb' && !env.COLORTERM) { + return hasNone(obj, options) + } + + if (platform === 'win32') { + return hasBasic(obj) + } + + if (env.TMUX) { + return has256(obj) + } + + if (!options.ignoreCI && (env.CI || env.TEAMCITY_VERSION)) { + if (env.TRAVIS) { + return has256(obj) + } else { + return hasNone(obj, options) + } + } + + // TODO: add more term programs + switch (env.TERM_PROGRAM) { + case 'iTerm.app': + var ver = env.TERM_PROGRAM_VERSION || '0.' + if (/^[0-2]\./.test(ver)) { + return has256(obj) + } else { + return has16m(obj) + } + + case 'HyperTerm': + case 'Hyper': + return has16m(obj) + + case 'MacTerm': + return has16m(obj) + + case 'Apple_Terminal': + return has256(obj) + } + + if (/^xterm-256/.test(term)) { + return has256(obj) + } + + if (/^screen|^xterm|^vt100|color|ansi|cygwin|linux/i.test(term)) { + return hasBasic(obj) + } + + if (env.COLORTERM) { + return hasBasic(obj) + } + + return hasNone(obj, options) +} diff --git a/node_modules/color-support/package.json b/node_modules/color-support/package.json new file mode 100644 index 0000000..f3e3b77 --- /dev/null +++ b/node_modules/color-support/package.json @@ -0,0 +1,36 @@ +{ + "name": "color-support", + "version": "1.1.3", + "description": "A module which will endeavor to guess your terminal's level of color support.", + "main": "index.js", + "browser": "browser.js", + "bin": "bin.js", + "devDependencies": { + "tap": "^10.3.3" + }, + "scripts": { + "test": "tap test/*.js --100 -J", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/color-support.git" + }, + "keywords": [ + "terminal", + "color", + "support", + "xterm", + "truecolor", + "256" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "browser.js", + "index.js", + "bin.js" + ] +} diff --git a/node_modules/concat-map/.travis.yml b/node_modules/concat-map/.travis.yml new file mode 100644 index 0000000..f1d0f13 --- /dev/null +++ b/node_modules/concat-map/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - 0.4 + - 0.6 diff --git a/node_modules/concat-map/LICENSE b/node_modules/concat-map/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/concat-map/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/concat-map/README.markdown b/node_modules/concat-map/README.markdown new file mode 100644 index 0000000..408f70a --- /dev/null +++ b/node_modules/concat-map/README.markdown @@ -0,0 +1,62 @@ +concat-map +========== + +Concatenative mapdashery. + +[![browser support](http://ci.testling.com/substack/node-concat-map.png)](http://ci.testling.com/substack/node-concat-map) + +[![build status](https://secure.travis-ci.org/substack/node-concat-map.png)](http://travis-ci.org/substack/node-concat-map) + +example +======= + +``` js +var concatMap = require('concat-map'); +var xs = [ 1, 2, 3, 4, 5, 6 ]; +var ys = concatMap(xs, function (x) { + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; +}); +console.dir(ys); +``` + +*** + +``` +[ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ] +``` + +methods +======= + +``` js +var concatMap = require('concat-map') +``` + +concatMap(xs, fn) +----------------- + +Return an array of concatenated elements by calling `fn(x, i)` for each element +`x` and each index `i` in the array `xs`. + +When `fn(x, i)` returns an array, its result will be concatenated with the +result array. If `fn(x, i)` returns anything else, that value will be pushed +onto the end of the result array. + +install +======= + +With [npm](http://npmjs.org) do: + +``` +npm install concat-map +``` + +license +======= + +MIT + +notes +===== + +This module was written while sitting high above the ground in a tree. diff --git a/node_modules/concat-map/example/map.js b/node_modules/concat-map/example/map.js new file mode 100644 index 0000000..3365621 --- /dev/null +++ b/node_modules/concat-map/example/map.js @@ -0,0 +1,6 @@ +var concatMap = require('../'); +var xs = [ 1, 2, 3, 4, 5, 6 ]; +var ys = concatMap(xs, function (x) { + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; +}); +console.dir(ys); diff --git a/node_modules/concat-map/index.js b/node_modules/concat-map/index.js new file mode 100644 index 0000000..b29a781 --- /dev/null +++ b/node_modules/concat-map/index.js @@ -0,0 +1,13 @@ +module.exports = function (xs, fn) { + var res = []; + for (var i = 0; i < xs.length; i++) { + var x = fn(xs[i], i); + if (isArray(x)) res.push.apply(res, x); + else res.push(x); + } + return res; +}; + +var isArray = Array.isArray || function (xs) { + return Object.prototype.toString.call(xs) === '[object Array]'; +}; diff --git a/node_modules/concat-map/package.json b/node_modules/concat-map/package.json new file mode 100644 index 0000000..d3640e6 --- /dev/null +++ b/node_modules/concat-map/package.json @@ -0,0 +1,43 @@ +{ + "name" : "concat-map", + "description" : "concatenative mapdashery", + "version" : "0.0.1", + "repository" : { + "type" : "git", + "url" : "git://github.com/substack/node-concat-map.git" + }, + "main" : "index.js", + "keywords" : [ + "concat", + "concatMap", + "map", + "functional", + "higher-order" + ], + "directories" : { + "example" : "example", + "test" : "test" + }, + "scripts" : { + "test" : "tape test/*.js" + }, + "devDependencies" : { + "tape" : "~2.4.0" + }, + "license" : "MIT", + "author" : { + "name" : "James Halliday", + "email" : "mail@substack.net", + "url" : "http://substack.net" + }, + "testling" : { + "files" : "test/*.js", + "browsers" : { + "ie" : [ 6, 7, 8, 9 ], + "ff" : [ 3.5, 10, 15.0 ], + "chrome" : [ 10, 22 ], + "safari" : [ 5.1 ], + "opera" : [ 12 ] + } + } +} diff --git a/node_modules/concat-map/test/map.js b/node_modules/concat-map/test/map.js new file mode 100644 index 0000000..fdbd702 --- /dev/null +++ b/node_modules/concat-map/test/map.js @@ -0,0 +1,39 @@ +var concatMap = require('../'); +var test = require('tape'); + +test('empty or not', function (t) { + var xs = [ 1, 2, 3, 4, 5, 6 ]; + var ixes = []; + var ys = concatMap(xs, function (x, ix) { + ixes.push(ix); + return x % 2 ? [ x - 0.1, x, x + 0.1 ] : []; + }); + t.same(ys, [ 0.9, 1, 1.1, 2.9, 3, 3.1, 4.9, 5, 5.1 ]); + t.same(ixes, [ 0, 1, 2, 3, 4, 5 ]); + t.end(); +}); + +test('always something', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function (x) { + return x === 'b' ? [ 'B', 'B', 'B' ] : [ x ]; + }); + t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]); + t.end(); +}); + +test('scalars', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function (x) { + return x === 'b' ? [ 'B', 'B', 'B' ] : x; + }); + t.same(ys, [ 'a', 'B', 'B', 'B', 'c', 'd' ]); + t.end(); +}); + +test('undefs', function (t) { + var xs = [ 'a', 'b', 'c', 'd' ]; + var ys = concatMap(xs, function () {}); + t.same(ys, [ undefined, undefined, undefined, undefined ]); + t.end(); +}); diff --git a/node_modules/connect-redis/.eslintrc b/node_modules/connect-redis/.eslintrc new file mode 100644 index 0000000..a9af0c2 --- /dev/null +++ b/node_modules/connect-redis/.eslintrc @@ -0,0 +1,26 @@ +{ + "root": true, + "parser": "@typescript-eslint/parser", + "plugins": ["@typescript-eslint"], + "extends": [ + "eslint:recommended", + "plugin:@typescript-eslint/recommended", + "prettier" + ], + "env": { + "node": true, + "es6": true + }, + "parserOptions": { + "sourceType": "module", + "project": "./tsconfig.json", + "ecmaVersion": 2020 + }, + "rules": { + "prefer-const": 0, + "@typescript-eslint/no-explicit-any": 0, + "@typescript-eslint/no-empty-function": 0, + "@typescript-eslint/explicit-function-return-type": 0, + "@typescript-eslint/no-unused-vars": [2, {"argsIgnorePattern": "^_"}] + } +} diff --git a/node_modules/connect-redis/.github/release-drafter.yml b/node_modules/connect-redis/.github/release-drafter.yml new file mode 100644 index 0000000..a7a6826 --- /dev/null +++ b/node_modules/connect-redis/.github/release-drafter.yml @@ -0,0 +1,30 @@ +name-template: "v$RESOLVED_VERSION" +tag-template: "v$RESOLVED_VERSION" +template: | + $CHANGES +category-template: "#### $TITLE" +change-template: "* #$NUMBER - $TITLE (@$AUTHOR)" +categories: + - title: "Breaking changes" + label: "breaking" + - title: "Enhancements" + label: "enhancement" + - title: "Bug fixes" + label: "bug" + - title: "Maintenance" + label: "chore" + +version-resolver: + major: + labels: + - "breaking" + minor: + labels: + - "enhancement" + patch: + labels: + - "bug" + - "chore" + +exclude-labels: + - "skip-changelog" diff --git a/node_modules/connect-redis/.github/workflows/build.yml b/node_modules/connect-redis/.github/workflows/build.yml new file mode 100644 index 0000000..45c1925 --- /dev/null +++ b/node_modules/connect-redis/.github/workflows/build.yml @@ -0,0 +1,23 @@ +name: build +on: + push: + branches: + - master + pull_request: +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + node: [16, 18, 20] + name: Node v${{ matrix.node }} + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node }} + - run: sudo apt-get install -y redis-server + - run: npm install + - run: npm run fmt-check + - run: npm run lint + - run: npm test diff --git a/node_modules/connect-redis/.github/workflows/release.yml b/node_modules/connect-redis/.github/workflows/release.yml new file mode 100644 index 0000000..4aba80b --- /dev/null +++ b/node_modules/connect-redis/.github/workflows/release.yml @@ -0,0 +1,13 @@ +name: release-draft +on: + workflow_dispatch: + push: + branches: + - master +jobs: + update_release_draft: + runs-on: ubuntu-latest + steps: + - uses: release-drafter/release-drafter@master + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/node_modules/connect-redis/.github/workflows/stale.yml b/node_modules/connect-redis/.github/workflows/stale.yml new file mode 100644 index 0000000..4b66744 --- /dev/null +++ b/node_modules/connect-redis/.github/workflows/stale.yml @@ -0,0 +1,17 @@ +name: stale-issues-prs +on: + workflow_dispatch: + schedule: + - cron: "0 0 * * *" +jobs: + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v3 + with: + stale-issue-message: "This issue is stale because it has been open 30 days with no activity. Remove stale label or comment or this will be closed in 5 days." + stale-pr-message: "This PR is stale because it has been open 30 days with no activity. Remove stale label or comment or this will be closed in 5 days." + close-issue-message: "This issue was closed because it has been stalled for 5 days with no activity." + days-before-stale: 30 + days-before-close: 5 + stale-issue-label: stale diff --git a/node_modules/connect-redis/.prettierrc b/node_modules/connect-redis/.prettierrc new file mode 100644 index 0000000..841edad --- /dev/null +++ b/node_modules/connect-redis/.prettierrc @@ -0,0 +1,5 @@ +{ + "semi": false, + "bracketSpacing": false, + "plugins": ["prettier-plugin-organize-imports"] +} diff --git a/node_modules/connect-redis/dist/cjs/index.d.ts b/node_modules/connect-redis/dist/cjs/index.d.ts new file mode 100644 index 0000000..4302c41 --- /dev/null +++ b/node_modules/connect-redis/dist/cjs/index.d.ts @@ -0,0 +1,48 @@ +import { SessionData, Store } from "express-session"; +interface NormalizedRedisClient { + get(key: string): Promise; + set(key: string, value: string, ttl?: number): Promise; + expire(key: string, ttl: number): Promise; + scanIterator(match: string, count: number): AsyncIterable; + del(key: string[]): Promise; + mget(key: string[]): Promise<(string | null)[]>; +} +interface Serializer { + parse(s: string): SessionData | Promise; + stringify(s: SessionData): string; +} +interface RedisStoreOptions { + client: any; + prefix?: string; + scanCount?: number; + serializer?: Serializer; + ttl?: number | { + (sess: SessionData): number; + }; + disableTTL?: boolean; + disableTouch?: boolean; +} +declare class RedisStore extends Store { + client: NormalizedRedisClient; + prefix: string; + scanCount: number; + serializer: Serializer; + ttl: number | { + (sess: SessionData): number; + }; + disableTTL: boolean; + disableTouch: boolean; + constructor(opts: RedisStoreOptions); + private normalizeClient; + get(sid: string, cb?: (_err?: unknown, _data?: any) => void): Promise; + set(sid: string, sess: SessionData, cb?: (_err?: unknown, _data?: any) => void): Promise; + touch(sid: string, sess: SessionData, cb?: (_err?: unknown, _data?: any) => void): Promise; + destroy(sid: string, cb?: (_err?: unknown, _data?: any) => void): Promise; + clear(cb?: (_err?: unknown, _data?: any) => void): Promise; + length(cb?: (_err?: unknown, _data?: any) => void): Promise; + ids(cb?: (_err?: unknown, _data?: any) => void): Promise; + all(cb?: (_err?: unknown, _data?: any) => void): Promise; + private _getTTL; + private _getAllKeys; +} +export default RedisStore; diff --git a/node_modules/connect-redis/dist/cjs/index.js b/node_modules/connect-redis/dist/cjs/index.js new file mode 100644 index 0000000..7155b33 --- /dev/null +++ b/node_modules/connect-redis/dist/cjs/index.js @@ -0,0 +1,179 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const express_session_1 = require("express-session"); +const noop = (_err, _data) => { }; +class RedisStore extends express_session_1.Store { + constructor(opts) { + super(); + this.prefix = opts.prefix == null ? "sess:" : opts.prefix; + this.scanCount = opts.scanCount || 100; + this.serializer = opts.serializer || JSON; + this.ttl = opts.ttl || 86400; // One day in seconds. + this.disableTTL = opts.disableTTL || false; + this.disableTouch = opts.disableTouch || false; + this.client = this.normalizeClient(opts.client); + } + // Create a redis and ioredis compatible client + normalizeClient(client) { + let isRedis = "scanIterator" in client; + return { + get: (key) => client.get(key), + set: (key, val, ttl) => { + if (ttl) { + return isRedis + ? client.set(key, val, { EX: ttl }) + : client.set(key, val, "EX", ttl); + } + return client.set(key, val); + }, + del: (key) => client.del(key), + expire: (key, ttl) => client.expire(key, ttl), + mget: (keys) => (isRedis ? client.mGet(keys) : client.mget(keys)), + scanIterator: (match, count) => { + if (isRedis) + return client.scanIterator({ MATCH: match, COUNT: count }); + // ioredis impl. + return (async function* () { + let [c, xs] = await client.scan("0", "MATCH", match, "COUNT", count); + for (let key of xs) + yield key; + while (c !== "0") { + ; + [c, xs] = await client.scan(c, "MATCH", match, "COUNT", count); + for (let key of xs) + yield key; + } + })(); + }, + }; + } + async get(sid, cb = noop) { + let key = this.prefix + sid; + try { + let data = await this.client.get(key); + if (!data) + return cb(); + return cb(null, await this.serializer.parse(data)); + } + catch (err) { + return cb(err); + } + } + async set(sid, sess, cb = noop) { + let key = this.prefix + sid; + let ttl = this._getTTL(sess); + try { + let val = this.serializer.stringify(sess); + if (ttl > 0) { + if (this.disableTTL) + await this.client.set(key, val); + else + await this.client.set(key, val, ttl); + return cb(); + } + else { + return this.destroy(sid, cb); + } + } + catch (err) { + return cb(err); + } + } + async touch(sid, sess, cb = noop) { + let key = this.prefix + sid; + if (this.disableTouch || this.disableTTL) + return cb(); + try { + await this.client.expire(key, this._getTTL(sess)); + return cb(); + } + catch (err) { + return cb(err); + } + } + async destroy(sid, cb = noop) { + let key = this.prefix + sid; + try { + await this.client.del([key]); + return cb(); + } + catch (err) { + return cb(err); + } + } + async clear(cb = noop) { + try { + let keys = await this._getAllKeys(); + if (!keys.length) + return cb(); + await this.client.del(keys); + return cb(); + } + catch (err) { + return cb(err); + } + } + async length(cb = noop) { + try { + let keys = await this._getAllKeys(); + return cb(null, keys.length); + } + catch (err) { + return cb(err); + } + } + async ids(cb = noop) { + let len = this.prefix.length; + try { + let keys = await this._getAllKeys(); + return cb(null, keys.map((k) => k.substring(len))); + } + catch (err) { + return cb(err); + } + } + async all(cb = noop) { + let len = this.prefix.length; + try { + let keys = await this._getAllKeys(); + if (keys.length === 0) + return cb(null, []); + let data = await this.client.mget(keys); + let results = data.reduce((acc, raw, idx) => { + if (!raw) + return acc; + let sess = this.serializer.parse(raw); + sess.id = keys[idx].substring(len); + acc.push(sess); + return acc; + }, []); + return cb(null, results); + } + catch (err) { + return cb(err); + } + } + _getTTL(sess) { + if (typeof this.ttl === "function") { + return this.ttl(sess); + } + let ttl; + if (sess && sess.cookie && sess.cookie.expires) { + let ms = Number(new Date(sess.cookie.expires)) - Date.now(); + ttl = Math.ceil(ms / 1000); + } + else { + ttl = this.ttl; + } + return ttl; + } + async _getAllKeys() { + let pattern = this.prefix + "*"; + let keys = []; + for await (let key of this.client.scanIterator(pattern, this.scanCount)) { + keys.push(key); + } + return keys; + } +} +exports.default = RedisStore; diff --git a/node_modules/connect-redis/dist/cjs/index_test.d.ts b/node_modules/connect-redis/dist/cjs/index_test.d.ts new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/connect-redis/dist/cjs/index_test.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/connect-redis/dist/cjs/index_test.js b/node_modules/connect-redis/dist/cjs/index_test.js new file mode 100644 index 0000000..4f4fdda --- /dev/null +++ b/node_modules/connect-redis/dist/cjs/index_test.js @@ -0,0 +1,124 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const blue_tape_1 = __importDefault(require("blue-tape")); +const express_session_1 = require("express-session"); +const ioredis_1 = require("ioredis"); +const node_util_1 = require("node:util"); +const redis_1 = require("redis"); +const _1 = __importDefault(require("./")); +const redisSrv = __importStar(require("./testdata/server")); +(0, blue_tape_1.default)("setup", async () => { + await redisSrv.connect(); +}); +(0, blue_tape_1.default)("defaults", async (t) => { + let client = (0, redis_1.createClient)({ url: `redis://localhost:${redisSrv.port}` }); + await client.connect(); + let store = new _1.default({ client }); + t.ok(store.client, "stores client"); + t.equal(store.prefix, "sess:", "defaults to sess:"); + t.equal(store.ttl, 86400, "defaults to one day"); + t.equal(store.scanCount, 100, "defaults SCAN count to 100"); + t.equal(store.serializer, JSON, "defaults to JSON serialization"); + t.equal(store.disableTouch, false, "defaults to having `touch` enabled"); + t.equal(store.disableTTL, false, "defaults to having `ttl` enabled"); + await client.disconnect(); +}); +(0, blue_tape_1.default)("redis", async (t) => { + let client = (0, redis_1.createClient)({ url: `redis://localhost:${redisSrv.port}` }); + await client.connect(); + let store = new _1.default({ client }); + await lifecycleTest(store, client, t); + await client.disconnect(); +}); +(0, blue_tape_1.default)("ioredis", async (t) => { + let client = new ioredis_1.Redis(`redis://localhost:${redisSrv.port}`); + let store = new _1.default({ client }); + await lifecycleTest(store, client, t); + client.disconnect(); +}); +(0, blue_tape_1.default)("teardown", redisSrv.disconnect); +async function lifecycleTest(store, client, t) { + const P = (f) => (0, node_util_1.promisify)(f).bind(store); + let res = await P(store.clear)(); + let sess = { foo: "bar" }; + await P(store.set)("123", sess); + res = await P(store.get)("123"); + t.same(res, sess, "store.get"); + let ttl = await client.ttl("sess:123"); + t.ok(ttl >= 86399, "check one day ttl"); + ttl = 60; + let expires = new Date(Date.now() + ttl * 1000).toISOString(); + await P(store.set)("456", { cookie: { expires } }); + ttl = await client.ttl("sess:456"); + t.ok(ttl <= 60, "check expires ttl"); + ttl = 90; + let expires2 = new Date(Date.now() + ttl * 1000).toISOString(); + await P(store.touch)("456", { cookie: { expires: expires2 } }); + ttl = await client.ttl("sess:456"); + t.ok(ttl > 60, "check expires ttl touch"); + res = await P(store.length)(); + t.equal(res, 2, "stored two keys length"); + res = await P(store.ids)(); + res.sort(); + t.same(res, ["123", "456"], "stored two keys ids"); + res = await P(store.all)(); + res.sort((a, b) => (a.id > b.id ? 1 : -1)); + t.same(res, [ + { id: "123", foo: "bar" }, + { id: "456", cookie: { expires } }, + ], "stored two keys data"); + await P(store.destroy)("456"); + res = await P(store.length)(); + t.equal(res, 1, "one key remains"); + res = await P(store.clear)(); + res = await P(store.length)(); + t.equal(res, 0, "no keys remain"); + let count = 1000; + await load(store, count); + res = await P(store.length)(); + t.equal(res, count, "bulk count"); + await P(store.clear)(); + res = await P(store.length)(); + t.equal(res, 0, "bulk clear"); + expires = new Date(Date.now() + ttl * 1000).toISOString(); // expires in the future + res = await P(store.set)("789", { cookie: { expires } }); + res = await P(store.length)(); + t.equal(res, 1, "one key exists (session 789)"); + expires = new Date(Date.now() - ttl * 1000).toISOString(); // expires in the past + await P(store.set)("789", { cookie: { expires } }); + res = await P(store.length)(); + t.equal(res, 0, "no key remains and that includes session 789"); +} +async function load(store, count) { + let cookie = new express_session_1.Cookie(); + for (let sid = 0; sid < count; sid++) { + cookie.expires = new Date(Date.now() + 1000); + await store.set("s" + sid, { cookie }); + } +} diff --git a/node_modules/connect-redis/dist/esm/index.d.ts b/node_modules/connect-redis/dist/esm/index.d.ts new file mode 100644 index 0000000..4302c41 --- /dev/null +++ b/node_modules/connect-redis/dist/esm/index.d.ts @@ -0,0 +1,48 @@ +import { SessionData, Store } from "express-session"; +interface NormalizedRedisClient { + get(key: string): Promise; + set(key: string, value: string, ttl?: number): Promise; + expire(key: string, ttl: number): Promise; + scanIterator(match: string, count: number): AsyncIterable; + del(key: string[]): Promise; + mget(key: string[]): Promise<(string | null)[]>; +} +interface Serializer { + parse(s: string): SessionData | Promise; + stringify(s: SessionData): string; +} +interface RedisStoreOptions { + client: any; + prefix?: string; + scanCount?: number; + serializer?: Serializer; + ttl?: number | { + (sess: SessionData): number; + }; + disableTTL?: boolean; + disableTouch?: boolean; +} +declare class RedisStore extends Store { + client: NormalizedRedisClient; + prefix: string; + scanCount: number; + serializer: Serializer; + ttl: number | { + (sess: SessionData): number; + }; + disableTTL: boolean; + disableTouch: boolean; + constructor(opts: RedisStoreOptions); + private normalizeClient; + get(sid: string, cb?: (_err?: unknown, _data?: any) => void): Promise; + set(sid: string, sess: SessionData, cb?: (_err?: unknown, _data?: any) => void): Promise; + touch(sid: string, sess: SessionData, cb?: (_err?: unknown, _data?: any) => void): Promise; + destroy(sid: string, cb?: (_err?: unknown, _data?: any) => void): Promise; + clear(cb?: (_err?: unknown, _data?: any) => void): Promise; + length(cb?: (_err?: unknown, _data?: any) => void): Promise; + ids(cb?: (_err?: unknown, _data?: any) => void): Promise; + all(cb?: (_err?: unknown, _data?: any) => void): Promise; + private _getTTL; + private _getAllKeys; +} +export default RedisStore; diff --git a/node_modules/connect-redis/dist/esm/index.js b/node_modules/connect-redis/dist/esm/index.js new file mode 100644 index 0000000..6d92d63 --- /dev/null +++ b/node_modules/connect-redis/dist/esm/index.js @@ -0,0 +1,177 @@ +import { Store } from "express-session"; +const noop = (_err, _data) => { }; +class RedisStore extends Store { + constructor(opts) { + super(); + this.prefix = opts.prefix == null ? "sess:" : opts.prefix; + this.scanCount = opts.scanCount || 100; + this.serializer = opts.serializer || JSON; + this.ttl = opts.ttl || 86400; // One day in seconds. + this.disableTTL = opts.disableTTL || false; + this.disableTouch = opts.disableTouch || false; + this.client = this.normalizeClient(opts.client); + } + // Create a redis and ioredis compatible client + normalizeClient(client) { + let isRedis = "scanIterator" in client; + return { + get: (key) => client.get(key), + set: (key, val, ttl) => { + if (ttl) { + return isRedis + ? client.set(key, val, { EX: ttl }) + : client.set(key, val, "EX", ttl); + } + return client.set(key, val); + }, + del: (key) => client.del(key), + expire: (key, ttl) => client.expire(key, ttl), + mget: (keys) => (isRedis ? client.mGet(keys) : client.mget(keys)), + scanIterator: (match, count) => { + if (isRedis) + return client.scanIterator({ MATCH: match, COUNT: count }); + // ioredis impl. + return (async function* () { + let [c, xs] = await client.scan("0", "MATCH", match, "COUNT", count); + for (let key of xs) + yield key; + while (c !== "0") { + ; + [c, xs] = await client.scan(c, "MATCH", match, "COUNT", count); + for (let key of xs) + yield key; + } + })(); + }, + }; + } + async get(sid, cb = noop) { + let key = this.prefix + sid; + try { + let data = await this.client.get(key); + if (!data) + return cb(); + return cb(null, await this.serializer.parse(data)); + } + catch (err) { + return cb(err); + } + } + async set(sid, sess, cb = noop) { + let key = this.prefix + sid; + let ttl = this._getTTL(sess); + try { + let val = this.serializer.stringify(sess); + if (ttl > 0) { + if (this.disableTTL) + await this.client.set(key, val); + else + await this.client.set(key, val, ttl); + return cb(); + } + else { + return this.destroy(sid, cb); + } + } + catch (err) { + return cb(err); + } + } + async touch(sid, sess, cb = noop) { + let key = this.prefix + sid; + if (this.disableTouch || this.disableTTL) + return cb(); + try { + await this.client.expire(key, this._getTTL(sess)); + return cb(); + } + catch (err) { + return cb(err); + } + } + async destroy(sid, cb = noop) { + let key = this.prefix + sid; + try { + await this.client.del([key]); + return cb(); + } + catch (err) { + return cb(err); + } + } + async clear(cb = noop) { + try { + let keys = await this._getAllKeys(); + if (!keys.length) + return cb(); + await this.client.del(keys); + return cb(); + } + catch (err) { + return cb(err); + } + } + async length(cb = noop) { + try { + let keys = await this._getAllKeys(); + return cb(null, keys.length); + } + catch (err) { + return cb(err); + } + } + async ids(cb = noop) { + let len = this.prefix.length; + try { + let keys = await this._getAllKeys(); + return cb(null, keys.map((k) => k.substring(len))); + } + catch (err) { + return cb(err); + } + } + async all(cb = noop) { + let len = this.prefix.length; + try { + let keys = await this._getAllKeys(); + if (keys.length === 0) + return cb(null, []); + let data = await this.client.mget(keys); + let results = data.reduce((acc, raw, idx) => { + if (!raw) + return acc; + let sess = this.serializer.parse(raw); + sess.id = keys[idx].substring(len); + acc.push(sess); + return acc; + }, []); + return cb(null, results); + } + catch (err) { + return cb(err); + } + } + _getTTL(sess) { + if (typeof this.ttl === "function") { + return this.ttl(sess); + } + let ttl; + if (sess && sess.cookie && sess.cookie.expires) { + let ms = Number(new Date(sess.cookie.expires)) - Date.now(); + ttl = Math.ceil(ms / 1000); + } + else { + ttl = this.ttl; + } + return ttl; + } + async _getAllKeys() { + let pattern = this.prefix + "*"; + let keys = []; + for await (let key of this.client.scanIterator(pattern, this.scanCount)) { + keys.push(key); + } + return keys; + } +} +export default RedisStore; diff --git a/node_modules/connect-redis/dist/esm/index_test.d.ts b/node_modules/connect-redis/dist/esm/index_test.d.ts new file mode 100644 index 0000000..cb0ff5c --- /dev/null +++ b/node_modules/connect-redis/dist/esm/index_test.d.ts @@ -0,0 +1 @@ +export {}; diff --git a/node_modules/connect-redis/dist/esm/index_test.js b/node_modules/connect-redis/dist/esm/index_test.js new file mode 100644 index 0000000..37cb773 --- /dev/null +++ b/node_modules/connect-redis/dist/esm/index_test.js @@ -0,0 +1,96 @@ +import test from "blue-tape"; +import { Cookie } from "express-session"; +import { Redis } from "ioredis"; +import { promisify } from "node:util"; +import { createClient } from "redis"; +import RedisStore from "./"; +import * as redisSrv from "./testdata/server"; +test("setup", async () => { + await redisSrv.connect(); +}); +test("defaults", async (t) => { + let client = createClient({ url: `redis://localhost:${redisSrv.port}` }); + await client.connect(); + let store = new RedisStore({ client }); + t.ok(store.client, "stores client"); + t.equal(store.prefix, "sess:", "defaults to sess:"); + t.equal(store.ttl, 86400, "defaults to one day"); + t.equal(store.scanCount, 100, "defaults SCAN count to 100"); + t.equal(store.serializer, JSON, "defaults to JSON serialization"); + t.equal(store.disableTouch, false, "defaults to having `touch` enabled"); + t.equal(store.disableTTL, false, "defaults to having `ttl` enabled"); + await client.disconnect(); +}); +test("redis", async (t) => { + let client = createClient({ url: `redis://localhost:${redisSrv.port}` }); + await client.connect(); + let store = new RedisStore({ client }); + await lifecycleTest(store, client, t); + await client.disconnect(); +}); +test("ioredis", async (t) => { + let client = new Redis(`redis://localhost:${redisSrv.port}`); + let store = new RedisStore({ client }); + await lifecycleTest(store, client, t); + client.disconnect(); +}); +test("teardown", redisSrv.disconnect); +async function lifecycleTest(store, client, t) { + const P = (f) => promisify(f).bind(store); + let res = await P(store.clear)(); + let sess = { foo: "bar" }; + await P(store.set)("123", sess); + res = await P(store.get)("123"); + t.same(res, sess, "store.get"); + let ttl = await client.ttl("sess:123"); + t.ok(ttl >= 86399, "check one day ttl"); + ttl = 60; + let expires = new Date(Date.now() + ttl * 1000).toISOString(); + await P(store.set)("456", { cookie: { expires } }); + ttl = await client.ttl("sess:456"); + t.ok(ttl <= 60, "check expires ttl"); + ttl = 90; + let expires2 = new Date(Date.now() + ttl * 1000).toISOString(); + await P(store.touch)("456", { cookie: { expires: expires2 } }); + ttl = await client.ttl("sess:456"); + t.ok(ttl > 60, "check expires ttl touch"); + res = await P(store.length)(); + t.equal(res, 2, "stored two keys length"); + res = await P(store.ids)(); + res.sort(); + t.same(res, ["123", "456"], "stored two keys ids"); + res = await P(store.all)(); + res.sort((a, b) => (a.id > b.id ? 1 : -1)); + t.same(res, [ + { id: "123", foo: "bar" }, + { id: "456", cookie: { expires } }, + ], "stored two keys data"); + await P(store.destroy)("456"); + res = await P(store.length)(); + t.equal(res, 1, "one key remains"); + res = await P(store.clear)(); + res = await P(store.length)(); + t.equal(res, 0, "no keys remain"); + let count = 1000; + await load(store, count); + res = await P(store.length)(); + t.equal(res, count, "bulk count"); + await P(store.clear)(); + res = await P(store.length)(); + t.equal(res, 0, "bulk clear"); + expires = new Date(Date.now() + ttl * 1000).toISOString(); // expires in the future + res = await P(store.set)("789", { cookie: { expires } }); + res = await P(store.length)(); + t.equal(res, 1, "one key exists (session 789)"); + expires = new Date(Date.now() - ttl * 1000).toISOString(); // expires in the past + await P(store.set)("789", { cookie: { expires } }); + res = await P(store.length)(); + t.equal(res, 0, "no key remains and that includes session 789"); +} +async function load(store, count) { + let cookie = new Cookie(); + for (let sid = 0; sid < count; sid++) { + cookie.expires = new Date(Date.now() + 1000); + await store.set("s" + sid, { cookie }); + } +} diff --git a/node_modules/connect-redis/dist/esm/package.json b/node_modules/connect-redis/dist/esm/package.json new file mode 100644 index 0000000..089153b --- /dev/null +++ b/node_modules/connect-redis/dist/esm/package.json @@ -0,0 +1 @@ +{"type":"module"} diff --git a/node_modules/connect-redis/index.ts b/node_modules/connect-redis/index.ts new file mode 100644 index 0000000..4f0b15b --- /dev/null +++ b/node_modules/connect-redis/index.ts @@ -0,0 +1,208 @@ +import {SessionData, Store} from "express-session" + +const noop = (_err?: unknown, _data?: any) => {} + +interface NormalizedRedisClient { + get(key: string): Promise + set(key: string, value: string, ttl?: number): Promise + expire(key: string, ttl: number): Promise + scanIterator(match: string, count: number): AsyncIterable + del(key: string[]): Promise + mget(key: string[]): Promise<(string | null)[]> +} + +interface Serializer { + parse(s: string): SessionData | Promise + stringify(s: SessionData): string +} + +interface RedisStoreOptions { + client: any + prefix?: string + scanCount?: number + serializer?: Serializer + ttl?: number | {(sess: SessionData): number} + disableTTL?: boolean + disableTouch?: boolean +} + +class RedisStore extends Store { + client: NormalizedRedisClient + prefix: string + scanCount: number + serializer: Serializer + ttl: number | {(sess: SessionData): number} + disableTTL: boolean + disableTouch: boolean + + constructor(opts: RedisStoreOptions) { + super() + this.prefix = opts.prefix == null ? "sess:" : opts.prefix + this.scanCount = opts.scanCount || 100 + this.serializer = opts.serializer || JSON + this.ttl = opts.ttl || 86400 // One day in seconds. + this.disableTTL = opts.disableTTL || false + this.disableTouch = opts.disableTouch || false + this.client = this.normalizeClient(opts.client) + } + + // Create a redis and ioredis compatible client + private normalizeClient(client: any): NormalizedRedisClient { + let isRedis = "scanIterator" in client + return { + get: (key) => client.get(key), + set: (key, val, ttl) => { + if (ttl) { + return isRedis + ? client.set(key, val, {EX: ttl}) + : client.set(key, val, "EX", ttl) + } + return client.set(key, val) + }, + del: (key) => client.del(key), + expire: (key, ttl) => client.expire(key, ttl), + mget: (keys) => (isRedis ? client.mGet(keys) : client.mget(keys)), + scanIterator: (match, count) => { + if (isRedis) return client.scanIterator({MATCH: match, COUNT: count}) + + // ioredis impl. + return (async function* () { + let [c, xs] = await client.scan("0", "MATCH", match, "COUNT", count) + for (let key of xs) yield key + while (c !== "0") { + ;[c, xs] = await client.scan(c, "MATCH", match, "COUNT", count) + for (let key of xs) yield key + } + })() + }, + } + } + + async get(sid: string, cb = noop) { + let key = this.prefix + sid + try { + let data = await this.client.get(key) + if (!data) return cb() + return cb(null, await this.serializer.parse(data)) + } catch (err) { + return cb(err) + } + } + + async set(sid: string, sess: SessionData, cb = noop) { + let key = this.prefix + sid + let ttl = this._getTTL(sess) + try { + let val = this.serializer.stringify(sess) + if (ttl > 0) { + if (this.disableTTL) await this.client.set(key, val) + else await this.client.set(key, val, ttl) + return cb() + } else { + return this.destroy(sid, cb) + } + } catch (err) { + return cb(err) + } + } + + async touch(sid: string, sess: SessionData, cb = noop) { + let key = this.prefix + sid + if (this.disableTouch || this.disableTTL) return cb() + try { + await this.client.expire(key, this._getTTL(sess)) + return cb() + } catch (err) { + return cb(err) + } + } + + async destroy(sid: string, cb = noop) { + let key = this.prefix + sid + try { + await this.client.del([key]) + return cb() + } catch (err) { + return cb(err) + } + } + + async clear(cb = noop) { + try { + let keys = await this._getAllKeys() + if (!keys.length) return cb() + await this.client.del(keys) + return cb() + } catch (err) { + return cb(err) + } + } + + async length(cb = noop) { + try { + let keys = await this._getAllKeys() + return cb(null, keys.length) + } catch (err) { + return cb(err) + } + } + + async ids(cb = noop) { + let len = this.prefix.length + try { + let keys = await this._getAllKeys() + return cb( + null, + keys.map((k) => k.substring(len)), + ) + } catch (err) { + return cb(err) + } + } + + async all(cb = noop) { + let len = this.prefix.length + try { + let keys = await this._getAllKeys() + if (keys.length === 0) return cb(null, []) + + let data = await this.client.mget(keys) + let results = data.reduce((acc, raw, idx) => { + if (!raw) return acc + let sess = this.serializer.parse(raw) as any + sess.id = keys[idx].substring(len) + acc.push(sess) + return acc + }, [] as SessionData[]) + return cb(null, results) + } catch (err) { + return cb(err) + } + } + + private _getTTL(sess: SessionData) { + if (typeof this.ttl === "function") { + return this.ttl(sess) + } + + let ttl + if (sess && sess.cookie && sess.cookie.expires) { + let ms = Number(new Date(sess.cookie.expires)) - Date.now() + ttl = Math.ceil(ms / 1000) + } else { + ttl = this.ttl + } + return ttl + } + + private async _getAllKeys() { + let pattern = this.prefix + "*" + let keys = [] + for await (let key of this.client.scanIterator(pattern, this.scanCount)) { + keys.push(key) + } + return keys + } +} + +export default RedisStore diff --git a/node_modules/connect-redis/index_test.ts b/node_modules/connect-redis/index_test.ts new file mode 100644 index 0000000..9adc5f4 --- /dev/null +++ b/node_modules/connect-redis/index_test.ts @@ -0,0 +1,131 @@ +import test from "blue-tape" +import {Cookie} from "express-session" +import {Redis} from "ioredis" +import {promisify} from "node:util" +import {createClient} from "redis" +import RedisStore from "./" +import * as redisSrv from "./testdata/server" + +test("setup", async () => { + await redisSrv.connect() +}) + +test("defaults", async (t) => { + let client = createClient({url: `redis://localhost:${redisSrv.port}`}) + await client.connect() + + let store = new RedisStore({client}) + + t.ok(store.client, "stores client") + t.equal(store.prefix, "sess:", "defaults to sess:") + t.equal(store.ttl, 86400, "defaults to one day") + t.equal(store.scanCount, 100, "defaults SCAN count to 100") + t.equal(store.serializer, JSON, "defaults to JSON serialization") + t.equal(store.disableTouch, false, "defaults to having `touch` enabled") + t.equal(store.disableTTL, false, "defaults to having `ttl` enabled") + await client.disconnect() +}) + +test("redis", async (t) => { + let client = createClient({url: `redis://localhost:${redisSrv.port}`}) + await client.connect() + let store = new RedisStore({client}) + await lifecycleTest(store, client, t) + await client.disconnect() +}) + +test("ioredis", async (t) => { + let client = new Redis(`redis://localhost:${redisSrv.port}`) + let store = new RedisStore({client}) + await lifecycleTest(store, client, t) + client.disconnect() +}) + +test("teardown", redisSrv.disconnect) + +async function lifecycleTest( + store: RedisStore, + client: any, + t: test.Test, +): Promise { + const P = (f: any) => promisify(f).bind(store) + let res = await P(store.clear)() + + let sess = {foo: "bar"} + await P(store.set)("123", sess) + + res = await P(store.get)("123") + t.same(res, sess, "store.get") + + let ttl = await client.ttl("sess:123") + t.ok(ttl >= 86399, "check one day ttl") + + ttl = 60 + let expires = new Date(Date.now() + ttl * 1000).toISOString() + await P(store.set)("456", {cookie: {expires}}) + ttl = await client.ttl("sess:456") + t.ok(ttl <= 60, "check expires ttl") + + ttl = 90 + let expires2 = new Date(Date.now() + ttl * 1000).toISOString() + await P(store.touch)("456", {cookie: {expires: expires2}}) + ttl = await client.ttl("sess:456") + t.ok(ttl > 60, "check expires ttl touch") + + res = await P(store.length)() + t.equal(res, 2, "stored two keys length") + + res = await P(store.ids)() + res.sort() + t.same(res, ["123", "456"], "stored two keys ids") + + res = await P(store.all)() + res.sort((a: any, b: any) => (a.id > b.id ? 1 : -1)) + t.same( + res, + [ + {id: "123", foo: "bar"}, + {id: "456", cookie: {expires}}, + ], + "stored two keys data", + ) + + await P(store.destroy)("456") + res = await P(store.length)() + t.equal(res, 1, "one key remains") + + res = await P(store.clear)() + + res = await P(store.length)() + t.equal(res, 0, "no keys remain") + + let count = 1000 + await load(store, count) + + res = await P(store.length)() + t.equal(res, count, "bulk count") + + await P(store.clear)() + res = await P(store.length)() + t.equal(res, 0, "bulk clear") + + expires = new Date(Date.now() + ttl * 1000).toISOString() // expires in the future + res = await P(store.set)("789", {cookie: {expires}}) + + res = await P(store.length)() + t.equal(res, 1, "one key exists (session 789)") + + expires = new Date(Date.now() - ttl * 1000).toISOString() // expires in the past + await P(store.set)("789", {cookie: {expires}}) + + res = await P(store.length)() + t.equal(res, 0, "no key remains and that includes session 789") +} + +async function load(store: RedisStore, count: number) { + let cookie = new Cookie() + for (let sid = 0; sid < count; sid++) { + cookie.expires = new Date(Date.now() + 1000) + await store.set("s" + sid, {cookie}) + } +} diff --git a/node_modules/connect-redis/license b/node_modules/connect-redis/license new file mode 100644 index 0000000..4a09d2d --- /dev/null +++ b/node_modules/connect-redis/license @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2010-2023 TJ Holowaychuk + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/connect-redis/package.json b/node_modules/connect-redis/package.json new file mode 100644 index 0000000..fe91f4f --- /dev/null +++ b/node_modules/connect-redis/package.json @@ -0,0 +1,64 @@ +{ + "name": "connect-redis", + "description": "Redis session store for Connect", + "version": "7.1.1", + "author": "TJ Holowaychuk ", + "contributors": [ + "Marc Harter " + ], + "license": "MIT", + "main": "./dist/esm/index.js", + "exports": { + ".": { + "import": "./dist/esm/index.js", + "require": "./dist/cjs/index.js", + "default": "./dist/esm/index.js" + } + }, + "types": "./dist/esm/index.d.ts", + "scripts": { + "prepublishOnly": "rm -rf dist && tsc & tsc --project tsconfig.esm.json && echo '{\"type\":\"module\"}' > dist/esm/package.json", + "build": "npm run prepublishOnly", + "test": "nyc ts-node node_modules/blue-tape/bin/blue-tape \"**/*_test.ts\"", + "lint": "tsc --noemit && eslint --max-warnings 0 --ext ts testdata *.ts", + "fmt": "prettier --write .", + "fmt-check": "prettier --check ." + }, + "repository": { + "type": "git", + "url": "git@github.com:tj/connect-redis.git" + }, + "devDependencies": { + "@types/blue-tape": "^0.1.36", + "@types/express-session": "^1.17.10", + "@types/node": "^20.11.5", + "@typescript-eslint/eslint-plugin": "^6.19.0", + "@typescript-eslint/parser": "^6.19.0", + "blue-tape": "^1.0.0", + "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", + "express-session": "^1.17.3", + "ioredis": "^5.3.2", + "nyc": "^15.1.0", + "prettier": "^3.2.4", + "prettier-plugin-organize-imports": "^3.2.4", + "redis": "^4.6.12", + "ts-node": "^10.9.2", + "typescript": "^5.3.3" + }, + "peerDependencies": { + "express-session": ">=1" + }, + "engines": { + "node": ">=16" + }, + "bugs": { + "url": "https://github.com/tj/connect-redis/issues" + }, + "keywords": [ + "connect", + "redis", + "session", + "express" + ] +} diff --git a/node_modules/connect-redis/readme.md b/node_modules/connect-redis/readme.md new file mode 100644 index 0000000..67953db --- /dev/null +++ b/node_modules/connect-redis/readme.md @@ -0,0 +1,139 @@ +![Build Status](https://github.com/tj/connect-redis/workflows/build/badge.svg?branch=master) [![npm](https://img.shields.io/npm/v/connect-redis.svg)](https://npmjs.com/package/connect-redis) [![code-style](https://img.shields.io/badge/code_style-prettier-ff69b4.svg)](https://gitter.im/jlongster/prettier) ![Downloads](https://img.shields.io/npm/dm/connect-redis.svg) + +**connect-redis** provides Redis session storage for Express. + +## Installation + +**connect-redis** requires `express-session` to installed and one of the following compatible Redis clients: + +- [`redis`][1] +- [`ioredis`][2] + +Install with `redis`: + +```sh +npm install redis connect-redis express-session +``` + +Install with `ioredis`: + +```sh +npm install ioredis connect-redis express-session +``` + +## Importing + +**connect-redis** supports both CommonJS (`require`) and ESM (`import`) modules. + +Import using ESM/Typescript: + +```js +import RedisStore from "connect-redis" +``` + +Require using CommonJS: + +```js +const RedisStore = require("connect-redis").default +``` + +## API + +Full setup using [`redis`][1] package: + +```js +import RedisStore from "connect-redis" +import session from "express-session" +import {createClient} from "redis" + +// Initialize client. +let redisClient = createClient() +redisClient.connect().catch(console.error) + +// Initialize store. +let redisStore = new RedisStore({ + client: redisClient, + prefix: "myapp:", +}) + +// Initialize session storage. +app.use( + session({ + store: redisStore, + resave: false, // required: force lightweight session keep alive (touch) + saveUninitialized: false, // recommended: only save session when data exists + secret: "keyboard cat", + }), +) +``` + +### RedisStore(options) + +#### Options + +##### client + +An instance of [`redis`][1] or [`ioredis`][2]. + +##### prefix + +Key prefix in Redis (default: `sess:`). + +**Note**: This prefix appends to whatever prefix you may have set on the `client` itself. + +**Note**: You may need unique prefixes for different applications sharing the same Redis instance. This limits bulk commands exposed in `express-session` (like `length`, `all`, `keys`, and `clear`) to a single application's data. + +##### ttl + +If the session cookie has a `expires` date, `connect-redis` will use it as the TTL. + +Otherwise, it will expire the session using the `ttl` option (default: `86400` seconds or one day). + +```ts +interface RedisStoreOptions { + ... + ttl?: number | {(sess: SessionData): number} +} +``` + +`ttl` also has external callback support. You can use it for dynamic TTL generation. It has access to `session` data. + +**Note**: The TTL is reset every time a user interacts with the server. You can disable this behavior in _some_ instances by using `disableTouch`. + +**Note**: `express-session` does not update `expires` until the end of the request life cycle. _Calling `session.save()` manually beforehand will have the previous value_. + +##### disableTouch + +Disables resetting the TTL when using `touch` (default: `false`) + +The `express-session` package uses `touch` to signal to the store that the user has interacted with the session but hasn't changed anything in its data. Typically, this helps keep the users session alive if session changes are infrequent but you may want to disable it to cut down the extra calls or to prevent users from keeping sessions open too long. Also consider enabling if you store a lot of data on the session. + +Ref: + +##### disableTTL + +Disables key expiration completely (default: `false`) + +This option disables key expiration requiring the user to manually manage key cleanup outside of `connect-redis`. Only use if you know what you are doing and have an exceptional case where you need to manage your own expiration in Redis. + +**Note**: This has no effect on `express-session` setting cookie expiration. + +##### serializer + +Provide a custom encoder/decoder to use when storing and retrieving session data from Redis (default: `JSON.parse` and `JSON.stringify`). + +Optionally `parse` method can be async if need be. + +```ts +interface Serializer { + parse(string): object | Promise + stringify(object): string +} +``` + +##### scanCount + +Value used for _count_ parameter in [Redis `SCAN` command](https://redis.io/commands/scan#the-count-option). Used for `ids()` and `all()` methods (default: `100`). + +[1]: https://github.com/NodeRedis/node-redis +[2]: https://github.com/luin/ioredis diff --git a/node_modules/connect-redis/tsconfig.esm.json b/node_modules/connect-redis/tsconfig.esm.json new file mode 100644 index 0000000..f99918a --- /dev/null +++ b/node_modules/connect-redis/tsconfig.esm.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig.json", + "compilerOptions": { + "module": "es2020", + "outDir": "./dist/esm" + } +} diff --git a/node_modules/connect-redis/tsconfig.json b/node_modules/connect-redis/tsconfig.json new file mode 100644 index 0000000..525d29b --- /dev/null +++ b/node_modules/connect-redis/tsconfig.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "target": "es2020", + "module": "commonjs", + "isolatedModules": true, + "strict": true, + "noImplicitReturns": true, + "noUnusedLocals": true, + "skipLibCheck": true, + "forceConsistentCasingInFileNames": true, + "moduleResolution": "node", + "declaration": true, + "outDir": "./dist/cjs", + "esModuleInterop": true, + "resolveJsonModule": true, + }, + "exclude": ["node_modules", "dist"], +} diff --git a/node_modules/console-control-strings/LICENSE b/node_modules/console-control-strings/LICENSE new file mode 100644 index 0000000..e756052 --- /dev/null +++ b/node_modules/console-control-strings/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2014, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/console-control-strings/README.md b/node_modules/console-control-strings/README.md new file mode 100644 index 0000000..f58cc8d --- /dev/null +++ b/node_modules/console-control-strings/README.md @@ -0,0 +1,145 @@ +# Console Control Strings + +A library of cross-platform tested terminal/console command strings for +doing things like color and cursor positioning. This is a subset of both +ansi and vt100. All control codes included work on both Windows & Unix-like +OSes, except where noted. + +## Usage + +```js +var consoleControl = require('console-control-strings') + +console.log(consoleControl.color('blue','bgRed', 'bold') + 'hi there' + consoleControl.color('reset')) +process.stdout.write(consoleControl.goto(75, 10)) +``` + +## Why Another? + +There are tons of libraries similar to this one. I wanted one that was: + +1. Very clear about compatibility goals. +2. Could emit, for instance, a start color code without an end one. +3. Returned strings w/o writing to streams. +4. Was not weighed down with other unrelated baggage. + +## Functions + +### var code = consoleControl.up(_num = 1_) + +Returns the escape sequence to move _num_ lines up. + +### var code = consoleControl.down(_num = 1_) + +Returns the escape sequence to move _num_ lines down. + +### var code = consoleControl.forward(_num = 1_) + +Returns the escape sequence to move _num_ lines righ. + +### var code = consoleControl.back(_num = 1_) + +Returns the escape sequence to move _num_ lines left. + +### var code = consoleControl.nextLine(_num = 1_) + +Returns the escape sequence to move _num_ lines down and to the beginning of +the line. + +### var code = consoleControl.previousLine(_num = 1_) + +Returns the escape sequence to move _num_ lines up and to the beginning of +the line. + +### var code = consoleControl.eraseData() + +Returns the escape sequence to erase everything from the current cursor +position to the bottom right of the screen. This is line based, so it +erases the remainder of the current line and all following lines. + +### var code = consoleControl.eraseLine() + +Returns the escape sequence to erase to the end of the current line. + +### var code = consoleControl.goto(_x_, _y_) + +Returns the escape sequence to move the cursor to the designated position. +Note that the origin is _1, 1_ not _0, 0_. + +### var code = consoleControl.gotoSOL() + +Returns the escape sequence to move the cursor to the beginning of the +current line. (That is, it returns a carriage return, `\r`.) + +### var code = consoleControl.beep() + +Returns the escape sequence to cause the termianl to beep. (That is, it +returns unicode character `\x0007`, a Control-G.) + +### var code = consoleControl.hideCursor() + +Returns the escape sequence to hide the cursor. + +### var code = consoleControl.showCursor() + +Returns the escape sequence to show the cursor. + +### var code = consoleControl.color(_colors = []_) + +### var code = consoleControl.color(_color1_, _color2_, _…_, _colorn_) + +Returns the escape sequence to set the current terminal display attributes +(mostly colors). Arguments can either be a list of attributes or an array +of attributes. The difference between passing in an array or list of colors +and calling `.color` separately for each one, is that in the former case a +single escape sequence will be produced where as in the latter each change +will have its own distinct escape sequence. Each attribute can be one of: + +* Reset: + * **reset** – Reset all attributes to the terminal default. +* Styles: + * **bold** – Display text as bold. In some terminals this means using a + bold font, in others this means changing the color. In some it means + both. + * **italic** – Display text as italic. This is not available in most Windows terminals. + * **underline** – Underline text. This is not available in most Windows Terminals. + * **inverse** – Invert the foreground and background colors. + * **stopBold** – Do not display text as bold. + * **stopItalic** – Do not display text as italic. + * **stopUnderline** – Do not underline text. + * **stopInverse** – Do not invert foreground and background. +* Colors: + * **white** + * **black** + * **blue** + * **cyan** + * **green** + * **magenta** + * **red** + * **yellow** + * **grey** / **brightBlack** + * **brightRed** + * **brightGreen** + * **brightYellow** + * **brightBlue** + * **brightMagenta** + * **brightCyan** + * **brightWhite** +* Background Colors: + * **bgWhite** + * **bgBlack** + * **bgBlue** + * **bgCyan** + * **bgGreen** + * **bgMagenta** + * **bgRed** + * **bgYellow** + * **bgGrey** / **bgBrightBlack** + * **bgBrightRed** + * **bgBrightGreen** + * **bgBrightYellow** + * **bgBrightBlue** + * **bgBrightMagenta** + * **bgBrightCyan** + * **bgBrightWhite** + diff --git a/node_modules/console-control-strings/README.md~ b/node_modules/console-control-strings/README.md~ new file mode 100644 index 0000000..6eb34e8 --- /dev/null +++ b/node_modules/console-control-strings/README.md~ @@ -0,0 +1,140 @@ +# Console Control Strings + +A library of cross-platform tested terminal/console command strings for +doing things like color and cursor positioning. This is a subset of both +ansi and vt100. All control codes included work on both Windows & Unix-like +OSes, except where noted. + +## Usage + +```js +var consoleControl = require('console-control-strings') + +console.log(consoleControl.color('blue','bgRed', 'bold') + 'hi there' + consoleControl.color('reset')) +process.stdout.write(consoleControl.goto(75, 10)) +``` + +## Why Another? + +There are tons of libraries similar to this one. I wanted one that was: + +1. Very clear about compatibility goals. +2. Could emit, for instance, a start color code without an end one. +3. Returned strings w/o writing to streams. +4. Was not weighed down with other unrelated baggage. + +## Functions + +### var code = consoleControl.up(_num = 1_) + +Returns the escape sequence to move _num_ lines up. + +### var code = consoleControl.down(_num = 1_) + +Returns the escape sequence to move _num_ lines down. + +### var code = consoleControl.forward(_num = 1_) + +Returns the escape sequence to move _num_ lines righ. + +### var code = consoleControl.back(_num = 1_) + +Returns the escape sequence to move _num_ lines left. + +### var code = consoleControl.nextLine(_num = 1_) + +Returns the escape sequence to move _num_ lines down and to the beginning of +the line. + +### var code = consoleControl.previousLine(_num = 1_) + +Returns the escape sequence to move _num_ lines up and to the beginning of +the line. + +### var code = consoleControl.eraseData() + +Returns the escape sequence to erase everything from the current cursor +position to the bottom right of the screen. This is line based, so it +erases the remainder of the current line and all following lines. + +### var code = consoleControl.eraseLine() + +Returns the escape sequence to erase to the end of the current line. + +### var code = consoleControl.goto(_x_, _y_) + +Returns the escape sequence to move the cursor to the designated position. +Note that the origin is _1, 1_ not _0, 0_. + +### var code = consoleControl.gotoSOL() + +Returns the escape sequence to move the cursor to the beginning of the +current line. (That is, it returns a carriage return, `\r`.) + +### var code = consoleControl.hideCursor() + +Returns the escape sequence to hide the cursor. + +### var code = consoleControl.showCursor() + +Returns the escape sequence to show the cursor. + +### var code = consoleControl.color(_colors = []_) + +### var code = consoleControl.color(_color1_, _color2_, _…_, _colorn_) + +Returns the escape sequence to set the current terminal display attributes +(mostly colors). Arguments can either be a list of attributes or an array +of attributes. The difference between passing in an array or list of colors +and calling `.color` separately for each one, is that in the former case a +single escape sequence will be produced where as in the latter each change +will have its own distinct escape sequence. Each attribute can be one of: + +* Reset: + * **reset** – Reset all attributes to the terminal default. +* Styles: + * **bold** – Display text as bold. In some terminals this means using a + bold font, in others this means changing the color. In some it means + both. + * **italic** – Display text as italic. This is not available in most Windows terminals. + * **underline** – Underline text. This is not available in most Windows Terminals. + * **inverse** – Invert the foreground and background colors. + * **stopBold** – Do not display text as bold. + * **stopItalic** – Do not display text as italic. + * **stopUnderline** – Do not underline text. + * **stopInverse** – Do not invert foreground and background. +* Colors: + * **white** + * **black** + * **blue** + * **cyan** + * **green** + * **magenta** + * **red** + * **yellow** + * **grey** / **brightBlack** + * **brightRed** + * **brightGreen** + * **brightYellow** + * **brightBlue** + * **brightMagenta** + * **brightCyan** + * **brightWhite** +* Background Colors: + * **bgWhite** + * **bgBlack** + * **bgBlue** + * **bgCyan** + * **bgGreen** + * **bgMagenta** + * **bgRed** + * **bgYellow** + * **bgGrey** / **bgBrightBlack** + * **bgBrightRed** + * **bgBrightGreen** + * **bgBrightYellow** + * **bgBrightBlue** + * **bgBrightMagenta** + * **bgBrightCyan** + * **bgBrightWhite** + diff --git a/node_modules/console-control-strings/index.js b/node_modules/console-control-strings/index.js new file mode 100644 index 0000000..bf89034 --- /dev/null +++ b/node_modules/console-control-strings/index.js @@ -0,0 +1,125 @@ +'use strict' + +// These tables borrowed from `ansi` + +var prefix = '\x1b[' + +exports.up = function up (num) { + return prefix + (num || '') + 'A' +} + +exports.down = function down (num) { + return prefix + (num || '') + 'B' +} + +exports.forward = function forward (num) { + return prefix + (num || '') + 'C' +} + +exports.back = function back (num) { + return prefix + (num || '') + 'D' +} + +exports.nextLine = function nextLine (num) { + return prefix + (num || '') + 'E' +} + +exports.previousLine = function previousLine (num) { + return prefix + (num || '') + 'F' +} + +exports.horizontalAbsolute = function horizontalAbsolute (num) { + if (num == null) throw new Error('horizontalAboslute requires a column to position to') + return prefix + num + 'G' +} + +exports.eraseData = function eraseData () { + return prefix + 'J' +} + +exports.eraseLine = function eraseLine () { + return prefix + 'K' +} + +exports.goto = function (x, y) { + return prefix + y + ';' + x + 'H' +} + +exports.gotoSOL = function () { + return '\r' +} + +exports.beep = function () { + return '\x07' +} + +exports.hideCursor = function hideCursor () { + return prefix + '?25l' +} + +exports.showCursor = function showCursor () { + return prefix + '?25h' +} + +var colors = { + reset: 0, +// styles + bold: 1, + italic: 3, + underline: 4, + inverse: 7, +// resets + stopBold: 22, + stopItalic: 23, + stopUnderline: 24, + stopInverse: 27, +// colors + white: 37, + black: 30, + blue: 34, + cyan: 36, + green: 32, + magenta: 35, + red: 31, + yellow: 33, + bgWhite: 47, + bgBlack: 40, + bgBlue: 44, + bgCyan: 46, + bgGreen: 42, + bgMagenta: 45, + bgRed: 41, + bgYellow: 43, + + grey: 90, + brightBlack: 90, + brightRed: 91, + brightGreen: 92, + brightYellow: 93, + brightBlue: 94, + brightMagenta: 95, + brightCyan: 96, + brightWhite: 97, + + bgGrey: 100, + bgBrightBlack: 100, + bgBrightRed: 101, + bgBrightGreen: 102, + bgBrightYellow: 103, + bgBrightBlue: 104, + bgBrightMagenta: 105, + bgBrightCyan: 106, + bgBrightWhite: 107 +} + +exports.color = function color (colorWith) { + if (arguments.length !== 1 || !Array.isArray(colorWith)) { + colorWith = Array.prototype.slice.call(arguments) + } + return prefix + colorWith.map(colorNameToCode).join(';') + 'm' +} + +function colorNameToCode (color) { + if (colors[color] != null) return colors[color] + throw new Error('Unknown color or style name: ' + color) +} diff --git a/node_modules/console-control-strings/package.json b/node_modules/console-control-strings/package.json new file mode 100644 index 0000000..eb6c62a --- /dev/null +++ b/node_modules/console-control-strings/package.json @@ -0,0 +1,27 @@ +{ + "name": "console-control-strings", + "version": "1.1.0", + "description": "A library of cross-platform tested terminal/console command strings for doing things like color and cursor positioning. This is a subset of both ansi and vt100. All control codes included work on both Windows & Unix-like OSes, except where noted.", + "main": "index.js", + "directories": { + "test": "test" + }, + "scripts": { + "test": "standard && tap test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/console-control-strings" + }, + "keywords": [], + "author": "Rebecca Turner (http://re-becca.org/)", + "license": "ISC", + "files": [ + "LICENSE", + "index.js" + ], + "devDependencies": { + "standard": "^7.1.2", + "tap": "^5.7.2" + } +} diff --git a/node_modules/delegates/.npmignore b/node_modules/delegates/.npmignore new file mode 100644 index 0000000..c2658d7 --- /dev/null +++ b/node_modules/delegates/.npmignore @@ -0,0 +1 @@ +node_modules/ diff --git a/node_modules/delegates/History.md b/node_modules/delegates/History.md new file mode 100644 index 0000000..25959ea --- /dev/null +++ b/node_modules/delegates/History.md @@ -0,0 +1,22 @@ + +1.0.0 / 2015-12-14 +================== + + * Merge pull request #12 from kasicka/master + * Add license text + +0.1.0 / 2014-10-17 +================== + + * adds `.fluent()` to api + +0.0.3 / 2014-01-13 +================== + + * fix receiver for .method() + +0.0.2 / 2014-01-13 +================== + + * Object.defineProperty() sucks + * Initial commit diff --git a/node_modules/delegates/License b/node_modules/delegates/License new file mode 100644 index 0000000..60de60a --- /dev/null +++ b/node_modules/delegates/License @@ -0,0 +1,20 @@ +Copyright (c) 2015 TJ Holowaychuk + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/delegates/Makefile b/node_modules/delegates/Makefile new file mode 100644 index 0000000..a9dcfd5 --- /dev/null +++ b/node_modules/delegates/Makefile @@ -0,0 +1,8 @@ + +test: + @./node_modules/.bin/mocha \ + --require should \ + --reporter spec \ + --bail + +.PHONY: test \ No newline at end of file diff --git a/node_modules/delegates/Readme.md b/node_modules/delegates/Readme.md new file mode 100644 index 0000000..ab8cf4a --- /dev/null +++ b/node_modules/delegates/Readme.md @@ -0,0 +1,94 @@ + +# delegates + + Node method and accessor delegation utilty. + +## Installation + +``` +$ npm install delegates +``` + +## Example + +```js +var delegate = require('delegates'); + +... + +delegate(proto, 'request') + .method('acceptsLanguages') + .method('acceptsEncodings') + .method('acceptsCharsets') + .method('accepts') + .method('is') + .access('querystring') + .access('idempotent') + .access('socket') + .access('length') + .access('query') + .access('search') + .access('status') + .access('method') + .access('path') + .access('body') + .access('host') + .access('url') + .getter('subdomains') + .getter('protocol') + .getter('header') + .getter('stale') + .getter('fresh') + .getter('secure') + .getter('ips') + .getter('ip') +``` + +# API + +## Delegate(proto, prop) + +Creates a delegator instance used to configure using the `prop` on the given +`proto` object. (which is usually a prototype) + +## Delegate#method(name) + +Allows the given method `name` to be accessed on the host. + +## Delegate#getter(name) + +Creates a "getter" for the property with the given `name` on the delegated +object. + +## Delegate#setter(name) + +Creates a "setter" for the property with the given `name` on the delegated +object. + +## Delegate#access(name) + +Creates an "accessor" (ie: both getter *and* setter) for the property with the +given `name` on the delegated object. + +## Delegate#fluent(name) + +A unique type of "accessor" that works for a "fluent" API. When called as a +getter, the method returns the expected value. However, if the method is called +with a value, it will return itself so it can be chained. For example: + +```js +delegate(proto, 'request') + .fluent('query') + +// getter +var q = request.query(); + +// setter (chainable) +request + .query({ a: 1 }) + .query({ b: 2 }); +``` + +# License + + MIT diff --git a/node_modules/delegates/index.js b/node_modules/delegates/index.js new file mode 100644 index 0000000..17c222d --- /dev/null +++ b/node_modules/delegates/index.js @@ -0,0 +1,121 @@ + +/** + * Expose `Delegator`. + */ + +module.exports = Delegator; + +/** + * Initialize a delegator. + * + * @param {Object} proto + * @param {String} target + * @api public + */ + +function Delegator(proto, target) { + if (!(this instanceof Delegator)) return new Delegator(proto, target); + this.proto = proto; + this.target = target; + this.methods = []; + this.getters = []; + this.setters = []; + this.fluents = []; +} + +/** + * Delegate method `name`. + * + * @param {String} name + * @return {Delegator} self + * @api public + */ + +Delegator.prototype.method = function(name){ + var proto = this.proto; + var target = this.target; + this.methods.push(name); + + proto[name] = function(){ + return this[target][name].apply(this[target], arguments); + }; + + return this; +}; + +/** + * Delegator accessor `name`. + * + * @param {String} name + * @return {Delegator} self + * @api public + */ + +Delegator.prototype.access = function(name){ + return this.getter(name).setter(name); +}; + +/** + * Delegator getter `name`. + * + * @param {String} name + * @return {Delegator} self + * @api public + */ + +Delegator.prototype.getter = function(name){ + var proto = this.proto; + var target = this.target; + this.getters.push(name); + + proto.__defineGetter__(name, function(){ + return this[target][name]; + }); + + return this; +}; + +/** + * Delegator setter `name`. + * + * @param {String} name + * @return {Delegator} self + * @api public + */ + +Delegator.prototype.setter = function(name){ + var proto = this.proto; + var target = this.target; + this.setters.push(name); + + proto.__defineSetter__(name, function(val){ + return this[target][name] = val; + }); + + return this; +}; + +/** + * Delegator fluent accessor + * + * @param {String} name + * @return {Delegator} self + * @api public + */ + +Delegator.prototype.fluent = function (name) { + var proto = this.proto; + var target = this.target; + this.fluents.push(name); + + proto[name] = function(val){ + if ('undefined' != typeof val) { + this[target][name] = val; + return this; + } else { + return this[target][name]; + } + }; + + return this; +}; diff --git a/node_modules/delegates/package.json b/node_modules/delegates/package.json new file mode 100644 index 0000000..1724038 --- /dev/null +++ b/node_modules/delegates/package.json @@ -0,0 +1,13 @@ +{ + "name": "delegates", + "version": "1.0.0", + "repository": "visionmedia/node-delegates", + "description": "delegate methods and accessors to another property", + "keywords": ["delegate", "delegation"], + "dependencies": {}, + "devDependencies": { + "mocha": "*", + "should": "*" + }, + "license": "MIT" +} diff --git a/node_modules/delegates/test/index.js b/node_modules/delegates/test/index.js new file mode 100644 index 0000000..7b6e3d4 --- /dev/null +++ b/node_modules/delegates/test/index.js @@ -0,0 +1,94 @@ + +var assert = require('assert'); +var delegate = require('..'); + +describe('.method(name)', function(){ + it('should delegate methods', function(){ + var obj = {}; + + obj.request = { + foo: function(bar){ + assert(this == obj.request); + return bar; + } + }; + + delegate(obj, 'request').method('foo'); + + obj.foo('something').should.equal('something'); + }) +}) + +describe('.getter(name)', function(){ + it('should delegate getters', function(){ + var obj = {}; + + obj.request = { + get type() { + return 'text/html'; + } + } + + delegate(obj, 'request').getter('type'); + + obj.type.should.equal('text/html'); + }) +}) + +describe('.setter(name)', function(){ + it('should delegate setters', function(){ + var obj = {}; + + obj.request = { + get type() { + return this._type.toUpperCase(); + }, + + set type(val) { + this._type = val; + } + } + + delegate(obj, 'request').setter('type'); + + obj.type = 'hey'; + obj.request.type.should.equal('HEY'); + }) +}) + +describe('.access(name)', function(){ + it('should delegate getters and setters', function(){ + var obj = {}; + + obj.request = { + get type() { + return this._type.toUpperCase(); + }, + + set type(val) { + this._type = val; + } + } + + delegate(obj, 'request').access('type'); + + obj.type = 'hey'; + obj.type.should.equal('HEY'); + }) +}) + +describe('.fluent(name)', function () { + it('should delegate in a fluent fashion', function () { + var obj = { + settings: { + env: 'development' + } + }; + + delegate(obj, 'settings').fluent('env'); + + obj.env().should.equal('development'); + obj.env('production').should.equal(obj); + obj.settings.env.should.equal('production'); + }) +}) diff --git a/node_modules/detect-libc/LICENSE b/node_modules/detect-libc/LICENSE new file mode 100644 index 0000000..8dada3e --- /dev/null +++ b/node_modules/detect-libc/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright {yyyy} {name of copyright owner} + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/node_modules/detect-libc/README.md b/node_modules/detect-libc/README.md new file mode 100644 index 0000000..23212fd --- /dev/null +++ b/node_modules/detect-libc/README.md @@ -0,0 +1,163 @@ +# detect-libc + +Node.js module to detect details of the C standard library (libc) +implementation provided by a given Linux system. + +Currently supports detection of GNU glibc and MUSL libc. + +Provides asychronous and synchronous functions for the +family (e.g. `glibc`, `musl`) and version (e.g. `1.23`, `1.2.3`). + +The version numbers of libc implementations +are not guaranteed to be semver-compliant. + +For previous v1.x releases, please see the +[v1](https://github.com/lovell/detect-libc/tree/v1) branch. + +## Install + +```sh +npm install detect-libc +``` + +## API + +### GLIBC + +```ts +const GLIBC: string = 'glibc'; +``` + +A String constant containing the value `glibc`. + +### MUSL + +```ts +const MUSL: string = 'musl'; +``` + +A String constant containing the value `musl`. + +### family + +```ts +function family(): Promise; +``` + +Resolves asychronously with: + +* `glibc` or `musl` when the libc family can be determined +* `null` when the libc family cannot be determined +* `null` when run on a non-Linux platform + +```js +const { family, GLIBC, MUSL } = require('detect-libc'); + +switch (await family()) { + case GLIBC: ... + case MUSL: ... + case null: ... +} +``` + +### familySync + +```ts +function familySync(): string | null; +``` + +Synchronous version of `family()`. + +```js +const { familySync, GLIBC, MUSL } = require('detect-libc'); + +switch (familySync()) { + case GLIBC: ... + case MUSL: ... + case null: ... +} +``` + +### version + +```ts +function version(): Promise; +``` + +Resolves asychronously with: + +* The version when it can be determined +* `null` when the libc family cannot be determined +* `null` when run on a non-Linux platform + +```js +const { version } = require('detect-libc'); + +const v = await version(); +if (v) { + const [major, minor, patch] = v.split('.'); +} +``` + +### versionSync + +```ts +function versionSync(): string | null; +``` + +Synchronous version of `version()`. + +```js +const { versionSync } = require('detect-libc'); + +const v = versionSync(); +if (v) { + const [major, minor, patch] = v.split('.'); +} +``` + +### isNonGlibcLinux + +```ts +function isNonGlibcLinux(): Promise; +``` + +Resolves asychronously with: + +* `false` when the libc family is `glibc` +* `true` when the libc family is not `glibc` +* `false` when run on a non-Linux platform + +```js +const { isNonGlibcLinux } = require('detect-libc'); + +if (await isNonGlibcLinux()) { ... } +``` + +### isNonGlibcLinuxSync + +```ts +function isNonGlibcLinuxSync(): boolean; +``` + +Synchronous version of `isNonGlibcLinux()`. + +```js +const { isNonGlibcLinuxSync } = require('detect-libc'); + +if (isNonGlibcLinuxSync()) { ... } +``` + +## Licensing + +Copyright 2017 Lovell Fuller and others. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0.html) + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/node_modules/detect-libc/index.d.ts b/node_modules/detect-libc/index.d.ts new file mode 100644 index 0000000..4c0fb2b --- /dev/null +++ b/node_modules/detect-libc/index.d.ts @@ -0,0 +1,14 @@ +// Copyright 2017 Lovell Fuller and others. +// SPDX-License-Identifier: Apache-2.0 + +export const GLIBC: 'glibc'; +export const MUSL: 'musl'; + +export function family(): Promise; +export function familySync(): string | null; + +export function isNonGlibcLinux(): Promise; +export function isNonGlibcLinuxSync(): boolean; + +export function version(): Promise; +export function versionSync(): string | null; diff --git a/node_modules/detect-libc/lib/detect-libc.js b/node_modules/detect-libc/lib/detect-libc.js new file mode 100644 index 0000000..01299b4 --- /dev/null +++ b/node_modules/detect-libc/lib/detect-libc.js @@ -0,0 +1,313 @@ +// Copyright 2017 Lovell Fuller and others. +// SPDX-License-Identifier: Apache-2.0 + +'use strict'; + +const childProcess = require('child_process'); +const { isLinux, getReport } = require('./process'); +const { LDD_PATH, SELF_PATH, readFile, readFileSync } = require('./filesystem'); +const { interpreterPath } = require('./elf'); + +let cachedFamilyInterpreter; +let cachedFamilyFilesystem; +let cachedVersionFilesystem; + +const command = 'getconf GNU_LIBC_VERSION 2>&1 || true; ldd --version 2>&1 || true'; +let commandOut = ''; + +const safeCommand = () => { + if (!commandOut) { + return new Promise((resolve) => { + childProcess.exec(command, (err, out) => { + commandOut = err ? ' ' : out; + resolve(commandOut); + }); + }); + } + return commandOut; +}; + +const safeCommandSync = () => { + if (!commandOut) { + try { + commandOut = childProcess.execSync(command, { encoding: 'utf8' }); + } catch (_err) { + commandOut = ' '; + } + } + return commandOut; +}; + +/** + * A String constant containing the value `glibc`. + * @type {string} + * @public + */ +const GLIBC = 'glibc'; + +/** + * A Regexp constant to get the GLIBC Version. + * @type {string} + */ +const RE_GLIBC_VERSION = /LIBC[a-z0-9 \-).]*?(\d+\.\d+)/i; + +/** + * A String constant containing the value `musl`. + * @type {string} + * @public + */ +const MUSL = 'musl'; + +const isFileMusl = (f) => f.includes('libc.musl-') || f.includes('ld-musl-'); + +const familyFromReport = () => { + const report = getReport(); + if (report.header && report.header.glibcVersionRuntime) { + return GLIBC; + } + if (Array.isArray(report.sharedObjects)) { + if (report.sharedObjects.some(isFileMusl)) { + return MUSL; + } + } + return null; +}; + +const familyFromCommand = (out) => { + const [getconf, ldd1] = out.split(/[\r\n]+/); + if (getconf && getconf.includes(GLIBC)) { + return GLIBC; + } + if (ldd1 && ldd1.includes(MUSL)) { + return MUSL; + } + return null; +}; + +const familyFromInterpreterPath = (path) => { + if (path) { + if (path.includes('/ld-musl-')) { + return MUSL; + } else if (path.includes('/ld-linux-')) { + return GLIBC; + } + } + return null; +}; + +const getFamilyFromLddContent = (content) => { + content = content.toString(); + if (content.includes('musl')) { + return MUSL; + } + if (content.includes('GNU C Library')) { + return GLIBC; + } + return null; +}; + +const familyFromFilesystem = async () => { + if (cachedFamilyFilesystem !== undefined) { + return cachedFamilyFilesystem; + } + cachedFamilyFilesystem = null; + try { + const lddContent = await readFile(LDD_PATH); + cachedFamilyFilesystem = getFamilyFromLddContent(lddContent); + } catch (e) {} + return cachedFamilyFilesystem; +}; + +const familyFromFilesystemSync = () => { + if (cachedFamilyFilesystem !== undefined) { + return cachedFamilyFilesystem; + } + cachedFamilyFilesystem = null; + try { + const lddContent = readFileSync(LDD_PATH); + cachedFamilyFilesystem = getFamilyFromLddContent(lddContent); + } catch (e) {} + return cachedFamilyFilesystem; +}; + +const familyFromInterpreter = async () => { + if (cachedFamilyInterpreter !== undefined) { + return cachedFamilyInterpreter; + } + cachedFamilyInterpreter = null; + try { + const selfContent = await readFile(SELF_PATH); + const path = interpreterPath(selfContent); + cachedFamilyInterpreter = familyFromInterpreterPath(path); + } catch (e) {} + return cachedFamilyInterpreter; +}; + +const familyFromInterpreterSync = () => { + if (cachedFamilyInterpreter !== undefined) { + return cachedFamilyInterpreter; + } + cachedFamilyInterpreter = null; + try { + const selfContent = readFileSync(SELF_PATH); + const path = interpreterPath(selfContent); + cachedFamilyInterpreter = familyFromInterpreterPath(path); + } catch (e) {} + return cachedFamilyInterpreter; +}; + +/** + * Resolves with the libc family when it can be determined, `null` otherwise. + * @returns {Promise} + */ +const family = async () => { + let family = null; + if (isLinux()) { + family = await familyFromInterpreter(); + if (!family) { + family = await familyFromFilesystem(); + if (!family) { + family = familyFromReport(); + } + if (!family) { + const out = await safeCommand(); + family = familyFromCommand(out); + } + } + } + return family; +}; + +/** + * Returns the libc family when it can be determined, `null` otherwise. + * @returns {?string} + */ +const familySync = () => { + let family = null; + if (isLinux()) { + family = familyFromInterpreterSync(); + if (!family) { + family = familyFromFilesystemSync(); + if (!family) { + family = familyFromReport(); + } + if (!family) { + const out = safeCommandSync(); + family = familyFromCommand(out); + } + } + } + return family; +}; + +/** + * Resolves `true` only when the platform is Linux and the libc family is not `glibc`. + * @returns {Promise} + */ +const isNonGlibcLinux = async () => isLinux() && await family() !== GLIBC; + +/** + * Returns `true` only when the platform is Linux and the libc family is not `glibc`. + * @returns {boolean} + */ +const isNonGlibcLinuxSync = () => isLinux() && familySync() !== GLIBC; + +const versionFromFilesystem = async () => { + if (cachedVersionFilesystem !== undefined) { + return cachedVersionFilesystem; + } + cachedVersionFilesystem = null; + try { + const lddContent = await readFile(LDD_PATH); + const versionMatch = lddContent.match(RE_GLIBC_VERSION); + if (versionMatch) { + cachedVersionFilesystem = versionMatch[1]; + } + } catch (e) {} + return cachedVersionFilesystem; +}; + +const versionFromFilesystemSync = () => { + if (cachedVersionFilesystem !== undefined) { + return cachedVersionFilesystem; + } + cachedVersionFilesystem = null; + try { + const lddContent = readFileSync(LDD_PATH); + const versionMatch = lddContent.match(RE_GLIBC_VERSION); + if (versionMatch) { + cachedVersionFilesystem = versionMatch[1]; + } + } catch (e) {} + return cachedVersionFilesystem; +}; + +const versionFromReport = () => { + const report = getReport(); + if (report.header && report.header.glibcVersionRuntime) { + return report.header.glibcVersionRuntime; + } + return null; +}; + +const versionSuffix = (s) => s.trim().split(/\s+/)[1]; + +const versionFromCommand = (out) => { + const [getconf, ldd1, ldd2] = out.split(/[\r\n]+/); + if (getconf && getconf.includes(GLIBC)) { + return versionSuffix(getconf); + } + if (ldd1 && ldd2 && ldd1.includes(MUSL)) { + return versionSuffix(ldd2); + } + return null; +}; + +/** + * Resolves with the libc version when it can be determined, `null` otherwise. + * @returns {Promise} + */ +const version = async () => { + let version = null; + if (isLinux()) { + version = await versionFromFilesystem(); + if (!version) { + version = versionFromReport(); + } + if (!version) { + const out = await safeCommand(); + version = versionFromCommand(out); + } + } + return version; +}; + +/** + * Returns the libc version when it can be determined, `null` otherwise. + * @returns {?string} + */ +const versionSync = () => { + let version = null; + if (isLinux()) { + version = versionFromFilesystemSync(); + if (!version) { + version = versionFromReport(); + } + if (!version) { + const out = safeCommandSync(); + version = versionFromCommand(out); + } + } + return version; +}; + +module.exports = { + GLIBC, + MUSL, + family, + familySync, + isNonGlibcLinux, + isNonGlibcLinuxSync, + version, + versionSync +}; diff --git a/node_modules/detect-libc/lib/elf.js b/node_modules/detect-libc/lib/elf.js new file mode 100644 index 0000000..aa166aa --- /dev/null +++ b/node_modules/detect-libc/lib/elf.js @@ -0,0 +1,39 @@ +// Copyright 2017 Lovell Fuller and others. +// SPDX-License-Identifier: Apache-2.0 + +'use strict'; + +const interpreterPath = (elf) => { + if (elf.length < 64) { + return null; + } + if (elf.readUInt32BE(0) !== 0x7F454C46) { + // Unexpected magic bytes + return null; + } + if (elf.readUInt8(4) !== 2) { + // Not a 64-bit ELF + return null; + } + if (elf.readUInt8(5) !== 1) { + // Not little-endian + return null; + } + const offset = elf.readUInt32LE(32); + const size = elf.readUInt16LE(54); + const count = elf.readUInt16LE(56); + for (let i = 0; i < count; i++) { + const headerOffset = offset + (i * size); + const type = elf.readUInt32LE(headerOffset); + if (type === 3) { + const fileOffset = elf.readUInt32LE(headerOffset + 8); + const fileSize = elf.readUInt32LE(headerOffset + 32); + return elf.subarray(fileOffset, fileOffset + fileSize).toString().replace(/\0.*$/g, ''); + } + } + return null; +}; + +module.exports = { + interpreterPath +}; diff --git a/node_modules/detect-libc/lib/filesystem.js b/node_modules/detect-libc/lib/filesystem.js new file mode 100644 index 0000000..4c2443c --- /dev/null +++ b/node_modules/detect-libc/lib/filesystem.js @@ -0,0 +1,51 @@ +// Copyright 2017 Lovell Fuller and others. +// SPDX-License-Identifier: Apache-2.0 + +'use strict'; + +const fs = require('fs'); + +const LDD_PATH = '/usr/bin/ldd'; +const SELF_PATH = '/proc/self/exe'; +const MAX_LENGTH = 2048; + +/** + * Read the content of a file synchronous + * + * @param {string} path + * @returns {Buffer} + */ +const readFileSync = (path) => { + const fd = fs.openSync(path, 'r'); + const buffer = Buffer.alloc(MAX_LENGTH); + const bytesRead = fs.readSync(fd, buffer, 0, MAX_LENGTH, 0); + fs.close(fd, () => {}); + return buffer.subarray(0, bytesRead); +}; + +/** + * Read the content of a file + * + * @param {string} path + * @returns {Promise} + */ +const readFile = (path) => new Promise((resolve, reject) => { + fs.open(path, 'r', (err, fd) => { + if (err) { + reject(err); + } else { + const buffer = Buffer.alloc(MAX_LENGTH); + fs.read(fd, buffer, 0, MAX_LENGTH, 0, (_, bytesRead) => { + resolve(buffer.subarray(0, bytesRead)); + fs.close(fd, () => {}); + }); + } + }); +}); + +module.exports = { + LDD_PATH, + SELF_PATH, + readFileSync, + readFile +}; diff --git a/node_modules/detect-libc/lib/process.js b/node_modules/detect-libc/lib/process.js new file mode 100644 index 0000000..ee78ad2 --- /dev/null +++ b/node_modules/detect-libc/lib/process.js @@ -0,0 +1,24 @@ +// Copyright 2017 Lovell Fuller and others. +// SPDX-License-Identifier: Apache-2.0 + +'use strict'; + +const isLinux = () => process.platform === 'linux'; + +let report = null; +const getReport = () => { + if (!report) { + /* istanbul ignore next */ + if (isLinux() && process.report) { + const orig = process.report.excludeNetwork; + process.report.excludeNetwork = true; + report = process.report.getReport(); + process.report.excludeNetwork = orig; + } else { + report = {}; + } + } + return report; +}; + +module.exports = { isLinux, getReport }; diff --git a/node_modules/detect-libc/package.json b/node_modules/detect-libc/package.json new file mode 100644 index 0000000..36d0f2b --- /dev/null +++ b/node_modules/detect-libc/package.json @@ -0,0 +1,44 @@ +{ + "name": "detect-libc", + "version": "2.1.2", + "description": "Node.js module to detect the C standard library (libc) implementation family and version", + "main": "lib/detect-libc.js", + "files": [ + "lib/", + "index.d.ts" + ], + "scripts": { + "test": "semistandard && nyc --reporter=text --check-coverage --branches=100 ava test/unit.js", + "changelog": "conventional-changelog -i CHANGELOG.md -s", + "bench": "node benchmark/detect-libc", + "bench:calls": "node benchmark/call-familySync.js && sleep 1 && node benchmark/call-isNonGlibcLinuxSync.js && sleep 1 && node benchmark/call-versionSync.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/lovell/detect-libc.git" + }, + "keywords": [ + "libc", + "glibc", + "musl" + ], + "author": "Lovell Fuller ", + "contributors": [ + "Niklas Salmoukas ", + "Vinícius Lourenço " + ], + "license": "Apache-2.0", + "devDependencies": { + "ava": "^2.4.0", + "benchmark": "^2.1.4", + "conventional-changelog-cli": "^5.0.0", + "eslint-config-standard": "^13.0.1", + "nyc": "^15.1.0", + "proxyquire": "^2.1.3", + "semistandard": "^14.2.3" + }, + "engines": { + "node": ">=8" + }, + "types": "index.d.ts" +} diff --git a/node_modules/emoji-regex/LICENSE-MIT.txt b/node_modules/emoji-regex/LICENSE-MIT.txt new file mode 100644 index 0000000..a41e0a7 --- /dev/null +++ b/node_modules/emoji-regex/LICENSE-MIT.txt @@ -0,0 +1,20 @@ +Copyright Mathias Bynens + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/emoji-regex/README.md b/node_modules/emoji-regex/README.md new file mode 100644 index 0000000..f10e173 --- /dev/null +++ b/node_modules/emoji-regex/README.md @@ -0,0 +1,73 @@ +# emoji-regex [![Build status](https://travis-ci.org/mathiasbynens/emoji-regex.svg?branch=master)](https://travis-ci.org/mathiasbynens/emoji-regex) + +_emoji-regex_ offers a regular expression to match all emoji symbols (including textual representations of emoji) as per the Unicode Standard. + +This repository contains a script that generates this regular expression based on [the data from Unicode v12](https://github.com/mathiasbynens/unicode-12.0.0). Because of this, the regular expression can easily be updated whenever new emoji are added to the Unicode standard. + +## Installation + +Via [npm](https://www.npmjs.com/): + +```bash +npm install emoji-regex +``` + +In [Node.js](https://nodejs.org/): + +```js +const emojiRegex = require('emoji-regex'); +// Note: because the regular expression has the global flag set, this module +// exports a function that returns the regex rather than exporting the regular +// expression itself, to make it impossible to (accidentally) mutate the +// original regular expression. + +const text = ` +\u{231A}: ⌚ default emoji presentation character (Emoji_Presentation) +\u{2194}\u{FE0F}: ↔️ default text presentation character rendered as emoji +\u{1F469}: 👩 emoji modifier base (Emoji_Modifier_Base) +\u{1F469}\u{1F3FF}: 👩🏿 emoji modifier base followed by a modifier +`; + +const regex = emojiRegex(); +let match; +while (match = regex.exec(text)) { + const emoji = match[0]; + console.log(`Matched sequence ${ emoji } — code points: ${ [...emoji].length }`); +} +``` + +Console output: + +``` +Matched sequence ⌚ — code points: 1 +Matched sequence ⌚ — code points: 1 +Matched sequence ↔️ — code points: 2 +Matched sequence ↔️ — code points: 2 +Matched sequence 👩 — code points: 1 +Matched sequence 👩 — code points: 1 +Matched sequence 👩🏿 — code points: 2 +Matched sequence 👩🏿 — code points: 2 +``` + +To match emoji in their textual representation as well (i.e. emoji that are not `Emoji_Presentation` symbols and that aren’t forced to render as emoji by a variation selector), `require` the other regex: + +```js +const emojiRegex = require('emoji-regex/text.js'); +``` + +Additionally, in environments which support ES2015 Unicode escapes, you may `require` ES2015-style versions of the regexes: + +```js +const emojiRegex = require('emoji-regex/es2015/index.js'); +const emojiRegexText = require('emoji-regex/es2015/text.js'); +``` + +## Author + +| [![twitter/mathias](https://gravatar.com/avatar/24e08a9ea84deb17ae121074d0f17125?s=70)](https://twitter.com/mathias "Follow @mathias on Twitter") | +|---| +| [Mathias Bynens](https://mathiasbynens.be/) | + +## License + +_emoji-regex_ is available under the [MIT](https://mths.be/mit) license. diff --git a/node_modules/emoji-regex/es2015/index.js b/node_modules/emoji-regex/es2015/index.js new file mode 100644 index 0000000..b4cf3dc --- /dev/null +++ b/node_modules/emoji-regex/es2015/index.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = () => { + // https://mths.be/emoji + return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0065}\u{E006E}\u{E0067}|\u{E0073}\u{E0063}\u{E0074}|\u{E0077}\u{E006C}\u{E0073})\u{E007F}|\u{1F468}(?:\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}\u{1F3FB}|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708])\uFE0F|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|[\u{1F3FB}-\u{1F3FF}])|(?:\u{1F9D1}\u{1F3FB}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F469})\u{1F3FB}|\u{1F9D1}(?:\u{1F3FF}\u200D\u{1F91D}\u200D\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u200D\u{1F91D}\u200D\u{1F9D1})|(?:\u{1F9D1}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}\u{1F3FC}]|\u{1F469}(?:\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FB}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|(?:\u{1F9D1}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}-\u{1F3FD}]|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}]\uFE0F|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}](?:[\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\u{1F3F4}\u200D\u2620)\uFE0F|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F415}\u200D\u{1F9BA}|\u{1F469}\u200D\u{1F466}|\u{1F469}\u200D\u{1F467}|\u{1F1FD}\u{1F1F0}|\u{1F1F4}\u{1F1F2}|\u{1F1F6}\u{1F1E6}|[#\*0-9]\uFE0F\u20E3|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F469}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270A-\u270D\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F470}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F4AA}\u{1F574}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F936}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}-\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F393}\u{1F3A0}-\u{1F3CA}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F4}\u{1F3F8}-\u{1F43E}\u{1F440}\u{1F442}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F57A}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5FB}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CC}\u{1F6D0}-\u{1F6D2}\u{1F6D5}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]\uFE0F|[\u261D\u26F9\u270A-\u270D\u{1F385}\u{1F3C2}-\u{1F3C4}\u{1F3C7}\u{1F3CA}-\u{1F3CC}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}-\u{1F478}\u{1F47C}\u{1F481}-\u{1F483}\u{1F485}-\u{1F487}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F574}\u{1F575}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F645}-\u{1F647}\u{1F64B}-\u{1F64F}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91F}\u{1F926}\u{1F930}-\u{1F939}\u{1F93C}-\u{1F93E}\u{1F9B5}\u{1F9B6}\u{1F9B8}\u{1F9B9}\u{1F9BB}\u{1F9CD}-\u{1F9CF}\u{1F9D1}-\u{1F9DD}]/gu; +}; diff --git a/node_modules/emoji-regex/es2015/text.js b/node_modules/emoji-regex/es2015/text.js new file mode 100644 index 0000000..780309d --- /dev/null +++ b/node_modules/emoji-regex/es2015/text.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = () => { + // https://mths.be/emoji + return /\u{1F3F4}\u{E0067}\u{E0062}(?:\u{E0065}\u{E006E}\u{E0067}|\u{E0073}\u{E0063}\u{E0074}|\u{E0077}\u{E006C}\u{E0073})\u{E007F}|\u{1F468}(?:\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}\u{1F3FB}|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FE}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D)?\u{1F468}|[\u{1F468}\u{1F469}]\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}]|[\u{1F468}\u{1F469}]\u200D[\u{1F466}\u{1F467}]|[\u2695\u2696\u2708]\uFE0F|[\u{1F466}\u{1F467}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|(?:\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708])\uFE0F|\u{1F3FB}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}]|[\u{1F3FB}-\u{1F3FF}])|(?:\u{1F9D1}\u{1F3FB}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F469})\u{1F3FB}|\u{1F9D1}(?:\u{1F3FF}\u200D\u{1F91D}\u200D\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u200D\u{1F91D}\u200D\u{1F9D1})|(?:\u{1F9D1}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FF}\u200D\u{1F91D}\u200D[\u{1F468}\u{1F469}])[\u{1F3FB}-\u{1F3FE}]|(?:\u{1F9D1}\u{1F3FC}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}\u{1F3FC}]|\u{1F469}(?:\u{1F3FE}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}-\u{1F3FD}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FC}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FD}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FB}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FC}-\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FD}\u200D(?:\u{1F91D}\u200D\u{1F468}[\u{1F3FB}\u{1F3FC}\u{1F3FE}\u{1F3FF}]|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u200D(?:\u2764\uFE0F\u200D(?:\u{1F48B}\u200D[\u{1F468}\u{1F469}]|[\u{1F468}\u{1F469}])|[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F3FF}\u200D[\u{1F33E}\u{1F373}\u{1F393}\u{1F3A4}\u{1F3A8}\u{1F3EB}\u{1F3ED}\u{1F4BB}\u{1F4BC}\u{1F527}\u{1F52C}\u{1F680}\u{1F692}\u{1F9AF}-\u{1F9B3}\u{1F9BC}\u{1F9BD}])|\u{1F469}\u200D\u{1F469}\u200D(?:\u{1F466}\u200D\u{1F466}|\u{1F467}\u200D[\u{1F466}\u{1F467}])|(?:\u{1F9D1}\u{1F3FD}\u200D\u{1F91D}\u200D\u{1F9D1}|\u{1F469}\u{1F3FE}\u200D\u{1F91D}\u200D\u{1F469})[\u{1F3FB}-\u{1F3FD}]|\u{1F469}\u200D\u{1F466}\u200D\u{1F466}|\u{1F469}\u200D\u{1F469}\u200D[\u{1F466}\u{1F467}]|(?:\u{1F441}\uFE0F\u200D\u{1F5E8}|\u{1F469}(?:\u{1F3FF}\u200D[\u2695\u2696\u2708]|\u{1F3FE}\u200D[\u2695\u2696\u2708]|\u{1F3FC}\u200D[\u2695\u2696\u2708]|\u{1F3FB}\u200D[\u2695\u2696\u2708]|\u{1F3FD}\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}]\uFE0F|[\u{1F46F}\u{1F93C}\u{1F9DE}\u{1F9DF}])\u200D[\u2640\u2642]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}](?:[\u{1F3FB}-\u{1F3FF}]\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\u{1F3F4}\u200D\u2620)\uFE0F|\u{1F469}\u200D\u{1F467}\u200D[\u{1F466}\u{1F467}]|\u{1F3F3}\uFE0F\u200D\u{1F308}|\u{1F415}\u200D\u{1F9BA}|\u{1F469}\u200D\u{1F466}|\u{1F469}\u200D\u{1F467}|\u{1F1FD}\u{1F1F0}|\u{1F1F4}\u{1F1F2}|\u{1F1F6}\u{1F1E6}|[#\*0-9]\uFE0F\u20E3|\u{1F1E7}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EF}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1F9}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1ED}\u{1F1EF}-\u{1F1F4}\u{1F1F7}\u{1F1F9}\u{1F1FB}\u{1F1FC}\u{1F1FF}]|\u{1F1EA}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1ED}\u{1F1F7}-\u{1F1FA}]|\u{1F9D1}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F7}[\u{1F1EA}\u{1F1F4}\u{1F1F8}\u{1F1FA}\u{1F1FC}]|\u{1F469}[\u{1F3FB}-\u{1F3FF}]|\u{1F1F2}[\u{1F1E6}\u{1F1E8}-\u{1F1ED}\u{1F1F0}-\u{1F1FF}]|\u{1F1E6}[\u{1F1E8}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F2}\u{1F1F4}\u{1F1F6}-\u{1F1FA}\u{1F1FC}\u{1F1FD}\u{1F1FF}]|\u{1F1F0}[\u{1F1EA}\u{1F1EC}-\u{1F1EE}\u{1F1F2}\u{1F1F3}\u{1F1F5}\u{1F1F7}\u{1F1FC}\u{1F1FE}\u{1F1FF}]|\u{1F1ED}[\u{1F1F0}\u{1F1F2}\u{1F1F3}\u{1F1F7}\u{1F1F9}\u{1F1FA}]|\u{1F1E9}[\u{1F1EA}\u{1F1EC}\u{1F1EF}\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1FF}]|\u{1F1FE}[\u{1F1EA}\u{1F1F9}]|\u{1F1EC}[\u{1F1E6}\u{1F1E7}\u{1F1E9}-\u{1F1EE}\u{1F1F1}-\u{1F1F3}\u{1F1F5}-\u{1F1FA}\u{1F1FC}\u{1F1FE}]|\u{1F1F8}[\u{1F1E6}-\u{1F1EA}\u{1F1EC}-\u{1F1F4}\u{1F1F7}-\u{1F1F9}\u{1F1FB}\u{1F1FD}-\u{1F1FF}]|\u{1F1EB}[\u{1F1EE}-\u{1F1F0}\u{1F1F2}\u{1F1F4}\u{1F1F7}]|\u{1F1F5}[\u{1F1E6}\u{1F1EA}-\u{1F1ED}\u{1F1F0}-\u{1F1F3}\u{1F1F7}-\u{1F1F9}\u{1F1FC}\u{1F1FE}]|\u{1F1FB}[\u{1F1E6}\u{1F1E8}\u{1F1EA}\u{1F1EC}\u{1F1EE}\u{1F1F3}\u{1F1FA}]|\u{1F1F3}[\u{1F1E6}\u{1F1E8}\u{1F1EA}-\u{1F1EC}\u{1F1EE}\u{1F1F1}\u{1F1F4}\u{1F1F5}\u{1F1F7}\u{1F1FA}\u{1F1FF}]|\u{1F1E8}[\u{1F1E6}\u{1F1E8}\u{1F1E9}\u{1F1EB}-\u{1F1EE}\u{1F1F0}-\u{1F1F5}\u{1F1F7}\u{1F1FA}-\u{1F1FF}]|\u{1F1F1}[\u{1F1E6}-\u{1F1E8}\u{1F1EE}\u{1F1F0}\u{1F1F7}-\u{1F1FB}\u{1F1FE}]|\u{1F1FF}[\u{1F1E6}\u{1F1F2}\u{1F1FC}]|\u{1F1FC}[\u{1F1EB}\u{1F1F8}]|\u{1F1FA}[\u{1F1E6}\u{1F1EC}\u{1F1F2}\u{1F1F3}\u{1F1F8}\u{1F1FE}\u{1F1FF}]|\u{1F1EE}[\u{1F1E8}-\u{1F1EA}\u{1F1F1}-\u{1F1F4}\u{1F1F6}-\u{1F1F9}]|\u{1F1EF}[\u{1F1EA}\u{1F1F2}\u{1F1F4}\u{1F1F5}]|[\u{1F3C3}\u{1F3C4}\u{1F3CA}\u{1F46E}\u{1F471}\u{1F473}\u{1F477}\u{1F481}\u{1F482}\u{1F486}\u{1F487}\u{1F645}-\u{1F647}\u{1F64B}\u{1F64D}\u{1F64E}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F926}\u{1F937}-\u{1F939}\u{1F93D}\u{1F93E}\u{1F9B8}\u{1F9B9}\u{1F9CD}-\u{1F9CF}\u{1F9D6}-\u{1F9DD}][\u{1F3FB}-\u{1F3FF}]|[\u26F9\u{1F3CB}\u{1F3CC}\u{1F575}][\u{1F3FB}-\u{1F3FF}]|[\u261D\u270A-\u270D\u{1F385}\u{1F3C2}\u{1F3C7}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}\u{1F467}\u{1F46B}-\u{1F46D}\u{1F470}\u{1F472}\u{1F474}-\u{1F476}\u{1F478}\u{1F47C}\u{1F483}\u{1F485}\u{1F4AA}\u{1F574}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F64C}\u{1F64F}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91C}\u{1F91E}\u{1F91F}\u{1F930}-\u{1F936}\u{1F9B5}\u{1F9B6}\u{1F9BB}\u{1F9D2}-\u{1F9D5}][\u{1F3FB}-\u{1F3FF}]|[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55\u{1F004}\u{1F0CF}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F236}\u{1F238}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F320}\u{1F32D}-\u{1F335}\u{1F337}-\u{1F37C}\u{1F37E}-\u{1F393}\u{1F3A0}-\u{1F3CA}\u{1F3CF}-\u{1F3D3}\u{1F3E0}-\u{1F3F0}\u{1F3F4}\u{1F3F8}-\u{1F43E}\u{1F440}\u{1F442}-\u{1F4FC}\u{1F4FF}-\u{1F53D}\u{1F54B}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F57A}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5FB}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CC}\u{1F6D0}-\u{1F6D2}\u{1F6D5}\u{1F6EB}\u{1F6EC}\u{1F6F4}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]|[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299\u{1F004}\u{1F0CF}\u{1F170}\u{1F171}\u{1F17E}\u{1F17F}\u{1F18E}\u{1F191}-\u{1F19A}\u{1F1E6}-\u{1F1FF}\u{1F201}\u{1F202}\u{1F21A}\u{1F22F}\u{1F232}-\u{1F23A}\u{1F250}\u{1F251}\u{1F300}-\u{1F321}\u{1F324}-\u{1F393}\u{1F396}\u{1F397}\u{1F399}-\u{1F39B}\u{1F39E}-\u{1F3F0}\u{1F3F3}-\u{1F3F5}\u{1F3F7}-\u{1F4FD}\u{1F4FF}-\u{1F53D}\u{1F549}-\u{1F54E}\u{1F550}-\u{1F567}\u{1F56F}\u{1F570}\u{1F573}-\u{1F57A}\u{1F587}\u{1F58A}-\u{1F58D}\u{1F590}\u{1F595}\u{1F596}\u{1F5A4}\u{1F5A5}\u{1F5A8}\u{1F5B1}\u{1F5B2}\u{1F5BC}\u{1F5C2}-\u{1F5C4}\u{1F5D1}-\u{1F5D3}\u{1F5DC}-\u{1F5DE}\u{1F5E1}\u{1F5E3}\u{1F5E8}\u{1F5EF}\u{1F5F3}\u{1F5FA}-\u{1F64F}\u{1F680}-\u{1F6C5}\u{1F6CB}-\u{1F6D2}\u{1F6D5}\u{1F6E0}-\u{1F6E5}\u{1F6E9}\u{1F6EB}\u{1F6EC}\u{1F6F0}\u{1F6F3}-\u{1F6FA}\u{1F7E0}-\u{1F7EB}\u{1F90D}-\u{1F93A}\u{1F93C}-\u{1F945}\u{1F947}-\u{1F971}\u{1F973}-\u{1F976}\u{1F97A}-\u{1F9A2}\u{1F9A5}-\u{1F9AA}\u{1F9AE}-\u{1F9CA}\u{1F9CD}-\u{1F9FF}\u{1FA70}-\u{1FA73}\u{1FA78}-\u{1FA7A}\u{1FA80}-\u{1FA82}\u{1FA90}-\u{1FA95}]\uFE0F?|[\u261D\u26F9\u270A-\u270D\u{1F385}\u{1F3C2}-\u{1F3C4}\u{1F3C7}\u{1F3CA}-\u{1F3CC}\u{1F442}\u{1F443}\u{1F446}-\u{1F450}\u{1F466}-\u{1F478}\u{1F47C}\u{1F481}-\u{1F483}\u{1F485}-\u{1F487}\u{1F48F}\u{1F491}\u{1F4AA}\u{1F574}\u{1F575}\u{1F57A}\u{1F590}\u{1F595}\u{1F596}\u{1F645}-\u{1F647}\u{1F64B}-\u{1F64F}\u{1F6A3}\u{1F6B4}-\u{1F6B6}\u{1F6C0}\u{1F6CC}\u{1F90F}\u{1F918}-\u{1F91F}\u{1F926}\u{1F930}-\u{1F939}\u{1F93C}-\u{1F93E}\u{1F9B5}\u{1F9B6}\u{1F9B8}\u{1F9B9}\u{1F9BB}\u{1F9CD}-\u{1F9CF}\u{1F9D1}-\u{1F9DD}]/gu; +}; diff --git a/node_modules/emoji-regex/index.d.ts b/node_modules/emoji-regex/index.d.ts new file mode 100644 index 0000000..1955b47 --- /dev/null +++ b/node_modules/emoji-regex/index.d.ts @@ -0,0 +1,23 @@ +declare module 'emoji-regex' { + function emojiRegex(): RegExp; + + export default emojiRegex; +} + +declare module 'emoji-regex/text' { + function emojiRegex(): RegExp; + + export default emojiRegex; +} + +declare module 'emoji-regex/es2015' { + function emojiRegex(): RegExp; + + export default emojiRegex; +} + +declare module 'emoji-regex/es2015/text' { + function emojiRegex(): RegExp; + + export default emojiRegex; +} diff --git a/node_modules/emoji-regex/index.js b/node_modules/emoji-regex/index.js new file mode 100644 index 0000000..d993a3a --- /dev/null +++ b/node_modules/emoji-regex/index.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = function () { + // https://mths.be/emoji + return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73)\uDB40\uDC7F|\uD83D\uDC68(?:\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68\uD83C\uDFFB|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|[\u2695\u2696\u2708]\uFE0F|\uD83D[\uDC66\uDC67]|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708])\uFE0F|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C[\uDFFB-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)\uD83C\uDFFB|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB\uDFFC])|\uD83D\uDC69(?:\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB-\uDFFD])|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)\uFE0F|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\uD83C\uDFF4\u200D\u2620)\uFE0F|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF4\uD83C\uDDF2|\uD83C\uDDF6\uD83C\uDDE6|[#\*0-9]\uFE0F\u20E3|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83D\uDC69(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270A-\u270D]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC70\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDCAA\uDD74\uDD7A\uDD90\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD36\uDDB5\uDDB6\uDDBB\uDDD2-\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF93\uDFA0-\uDFCA\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF4\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC3E\uDC40\uDC42-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDD7A\uDD95\uDD96\uDDA4\uDDFB-\uDE4F\uDE80-\uDEC5\uDECC\uDED0-\uDED2\uDED5\uDEEB\uDEEC\uDEF4-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])|(?:[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299]|\uD83C[\uDC04\uDCCF\uDD70\uDD71\uDD7E\uDD7F\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE02\uDE1A\uDE2F\uDE32-\uDE3A\uDE50\uDE51\uDF00-\uDF21\uDF24-\uDF93\uDF96\uDF97\uDF99-\uDF9B\uDF9E-\uDFF0\uDFF3-\uDFF5\uDFF7-\uDFFF]|\uD83D[\uDC00-\uDCFD\uDCFF-\uDD3D\uDD49-\uDD4E\uDD50-\uDD67\uDD6F\uDD70\uDD73-\uDD7A\uDD87\uDD8A-\uDD8D\uDD90\uDD95\uDD96\uDDA4\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA-\uDE4F\uDE80-\uDEC5\uDECB-\uDED2\uDED5\uDEE0-\uDEE5\uDEE9\uDEEB\uDEEC\uDEF0\uDEF3-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])\uFE0F|(?:[\u261D\u26F9\u270A-\u270D]|\uD83C[\uDF85\uDFC2-\uDFC4\uDFC7\uDFCA-\uDFCC]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66-\uDC78\uDC7C\uDC81-\uDC83\uDC85-\uDC87\uDC8F\uDC91\uDCAA\uDD74\uDD75\uDD7A\uDD90\uDD95\uDD96\uDE45-\uDE47\uDE4B-\uDE4F\uDEA3\uDEB4-\uDEB6\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1F\uDD26\uDD30-\uDD39\uDD3C-\uDD3E\uDDB5\uDDB6\uDDB8\uDDB9\uDDBB\uDDCD-\uDDCF\uDDD1-\uDDDD])/g; +}; diff --git a/node_modules/emoji-regex/package.json b/node_modules/emoji-regex/package.json new file mode 100644 index 0000000..6d32352 --- /dev/null +++ b/node_modules/emoji-regex/package.json @@ -0,0 +1,50 @@ +{ + "name": "emoji-regex", + "version": "8.0.0", + "description": "A regular expression to match all Emoji-only symbols as per the Unicode Standard.", + "homepage": "https://mths.be/emoji-regex", + "main": "index.js", + "types": "index.d.ts", + "keywords": [ + "unicode", + "regex", + "regexp", + "regular expressions", + "code points", + "symbols", + "characters", + "emoji" + ], + "license": "MIT", + "author": { + "name": "Mathias Bynens", + "url": "https://mathiasbynens.be/" + }, + "repository": { + "type": "git", + "url": "https://github.com/mathiasbynens/emoji-regex.git" + }, + "bugs": "https://github.com/mathiasbynens/emoji-regex/issues", + "files": [ + "LICENSE-MIT.txt", + "index.js", + "index.d.ts", + "text.js", + "es2015/index.js", + "es2015/text.js" + ], + "scripts": { + "build": "rm -rf -- es2015; babel src -d .; NODE_ENV=es2015 babel src -d ./es2015; node script/inject-sequences.js", + "test": "mocha", + "test:watch": "npm run test -- --watch" + }, + "devDependencies": { + "@babel/cli": "^7.2.3", + "@babel/core": "^7.3.4", + "@babel/plugin-proposal-unicode-property-regex": "^7.2.0", + "@babel/preset-env": "^7.3.4", + "mocha": "^6.0.2", + "regexgen": "^1.3.0", + "unicode-12.0.0": "^0.7.9" + } +} diff --git a/node_modules/emoji-regex/text.js b/node_modules/emoji-regex/text.js new file mode 100644 index 0000000..0a55ce2 --- /dev/null +++ b/node_modules/emoji-regex/text.js @@ -0,0 +1,6 @@ +"use strict"; + +module.exports = function () { + // https://mths.be/emoji + return /\uD83C\uDFF4\uDB40\uDC67\uDB40\uDC62(?:\uDB40\uDC65\uDB40\uDC6E\uDB40\uDC67|\uDB40\uDC73\uDB40\uDC63\uDB40\uDC74|\uDB40\uDC77\uDB40\uDC6C\uDB40\uDC73)\uDB40\uDC7F|\uD83D\uDC68(?:\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68\uD83C\uDFFB|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFE])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D)?\uD83D\uDC68|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D[\uDC68\uDC69])\u200D(?:\uD83D[\uDC66\uDC67])|[\u2695\u2696\u2708]\uFE0F|\uD83D[\uDC66\uDC67]|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|(?:\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708])\uFE0F|\uD83C\uDFFB\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C[\uDFFB-\uDFFF])|(?:\uD83E\uDDD1\uD83C\uDFFB\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)\uD83C\uDFFB|\uD83E\uDDD1(?:\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1)|(?:\uD83E\uDDD1\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFF\u200D\uD83E\uDD1D\u200D(?:\uD83D[\uDC68\uDC69]))(?:\uD83C[\uDFFB-\uDFFE])|(?:\uD83E\uDDD1\uD83C\uDFFC\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB\uDFFC])|\uD83D\uDC69(?:\uD83C\uDFFE\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB-\uDFFD\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFC\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFD-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFB\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFC-\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFD\u200D(?:\uD83E\uDD1D\u200D\uD83D\uDC68(?:\uD83C[\uDFFB\uDFFC\uDFFE\uDFFF])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\u200D(?:\u2764\uFE0F\u200D(?:\uD83D\uDC8B\u200D(?:\uD83D[\uDC68\uDC69])|\uD83D[\uDC68\uDC69])|\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD])|\uD83C\uDFFF\u200D(?:\uD83C[\uDF3E\uDF73\uDF93\uDFA4\uDFA8\uDFEB\uDFED]|\uD83D[\uDCBB\uDCBC\uDD27\uDD2C\uDE80\uDE92]|\uD83E[\uDDAF-\uDDB3\uDDBC\uDDBD]))|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67]))|(?:\uD83E\uDDD1\uD83C\uDFFD\u200D\uD83E\uDD1D\u200D\uD83E\uDDD1|\uD83D\uDC69\uD83C\uDFFE\u200D\uD83E\uDD1D\u200D\uD83D\uDC69)(?:\uD83C[\uDFFB-\uDFFD])|\uD83D\uDC69\u200D\uD83D\uDC66\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC69\u200D(?:\uD83D[\uDC66\uDC67])|(?:\uD83D\uDC41\uFE0F\u200D\uD83D\uDDE8|\uD83D\uDC69(?:\uD83C\uDFFF\u200D[\u2695\u2696\u2708]|\uD83C\uDFFE\u200D[\u2695\u2696\u2708]|\uD83C\uDFFC\u200D[\u2695\u2696\u2708]|\uD83C\uDFFB\u200D[\u2695\u2696\u2708]|\uD83C\uDFFD\u200D[\u2695\u2696\u2708]|\u200D[\u2695\u2696\u2708])|(?:(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)\uFE0F|\uD83D\uDC6F|\uD83E[\uDD3C\uDDDE\uDDDF])\u200D[\u2640\u2642]|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:(?:\uD83C[\uDFFB-\uDFFF])\u200D[\u2640\u2642]|\u200D[\u2640\u2642])|\uD83C\uDFF4\u200D\u2620)\uFE0F|\uD83D\uDC69\u200D\uD83D\uDC67\u200D(?:\uD83D[\uDC66\uDC67])|\uD83C\uDFF3\uFE0F\u200D\uD83C\uDF08|\uD83D\uDC15\u200D\uD83E\uDDBA|\uD83D\uDC69\u200D\uD83D\uDC66|\uD83D\uDC69\u200D\uD83D\uDC67|\uD83C\uDDFD\uD83C\uDDF0|\uD83C\uDDF4\uD83C\uDDF2|\uD83C\uDDF6\uD83C\uDDE6|[#\*0-9]\uFE0F\u20E3|\uD83C\uDDE7(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEF\uDDF1-\uDDF4\uDDF6-\uDDF9\uDDFB\uDDFC\uDDFE\uDDFF])|\uD83C\uDDF9(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDED\uDDEF-\uDDF4\uDDF7\uDDF9\uDDFB\uDDFC\uDDFF])|\uD83C\uDDEA(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDED\uDDF7-\uDDFA])|\uD83E\uDDD1(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF7(?:\uD83C[\uDDEA\uDDF4\uDDF8\uDDFA\uDDFC])|\uD83D\uDC69(?:\uD83C[\uDFFB-\uDFFF])|\uD83C\uDDF2(?:\uD83C[\uDDE6\uDDE8-\uDDED\uDDF0-\uDDFF])|\uD83C\uDDE6(?:\uD83C[\uDDE8-\uDDEC\uDDEE\uDDF1\uDDF2\uDDF4\uDDF6-\uDDFA\uDDFC\uDDFD\uDDFF])|\uD83C\uDDF0(?:\uD83C[\uDDEA\uDDEC-\uDDEE\uDDF2\uDDF3\uDDF5\uDDF7\uDDFC\uDDFE\uDDFF])|\uD83C\uDDED(?:\uD83C[\uDDF0\uDDF2\uDDF3\uDDF7\uDDF9\uDDFA])|\uD83C\uDDE9(?:\uD83C[\uDDEA\uDDEC\uDDEF\uDDF0\uDDF2\uDDF4\uDDFF])|\uD83C\uDDFE(?:\uD83C[\uDDEA\uDDF9])|\uD83C\uDDEC(?:\uD83C[\uDDE6\uDDE7\uDDE9-\uDDEE\uDDF1-\uDDF3\uDDF5-\uDDFA\uDDFC\uDDFE])|\uD83C\uDDF8(?:\uD83C[\uDDE6-\uDDEA\uDDEC-\uDDF4\uDDF7-\uDDF9\uDDFB\uDDFD-\uDDFF])|\uD83C\uDDEB(?:\uD83C[\uDDEE-\uDDF0\uDDF2\uDDF4\uDDF7])|\uD83C\uDDF5(?:\uD83C[\uDDE6\uDDEA-\uDDED\uDDF0-\uDDF3\uDDF7-\uDDF9\uDDFC\uDDFE])|\uD83C\uDDFB(?:\uD83C[\uDDE6\uDDE8\uDDEA\uDDEC\uDDEE\uDDF3\uDDFA])|\uD83C\uDDF3(?:\uD83C[\uDDE6\uDDE8\uDDEA-\uDDEC\uDDEE\uDDF1\uDDF4\uDDF5\uDDF7\uDDFA\uDDFF])|\uD83C\uDDE8(?:\uD83C[\uDDE6\uDDE8\uDDE9\uDDEB-\uDDEE\uDDF0-\uDDF5\uDDF7\uDDFA-\uDDFF])|\uD83C\uDDF1(?:\uD83C[\uDDE6-\uDDE8\uDDEE\uDDF0\uDDF7-\uDDFB\uDDFE])|\uD83C\uDDFF(?:\uD83C[\uDDE6\uDDF2\uDDFC])|\uD83C\uDDFC(?:\uD83C[\uDDEB\uDDF8])|\uD83C\uDDFA(?:\uD83C[\uDDE6\uDDEC\uDDF2\uDDF3\uDDF8\uDDFE\uDDFF])|\uD83C\uDDEE(?:\uD83C[\uDDE8-\uDDEA\uDDF1-\uDDF4\uDDF6-\uDDF9])|\uD83C\uDDEF(?:\uD83C[\uDDEA\uDDF2\uDDF4\uDDF5])|(?:\uD83C[\uDFC3\uDFC4\uDFCA]|\uD83D[\uDC6E\uDC71\uDC73\uDC77\uDC81\uDC82\uDC86\uDC87\uDE45-\uDE47\uDE4B\uDE4D\uDE4E\uDEA3\uDEB4-\uDEB6]|\uD83E[\uDD26\uDD37-\uDD39\uDD3D\uDD3E\uDDB8\uDDB9\uDDCD-\uDDCF\uDDD6-\uDDDD])(?:\uD83C[\uDFFB-\uDFFF])|(?:\u26F9|\uD83C[\uDFCB\uDFCC]|\uD83D\uDD75)(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u261D\u270A-\u270D]|\uD83C[\uDF85\uDFC2\uDFC7]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66\uDC67\uDC6B-\uDC6D\uDC70\uDC72\uDC74-\uDC76\uDC78\uDC7C\uDC83\uDC85\uDCAA\uDD74\uDD7A\uDD90\uDD95\uDD96\uDE4C\uDE4F\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1C\uDD1E\uDD1F\uDD30-\uDD36\uDDB5\uDDB6\uDDBB\uDDD2-\uDDD5])(?:\uD83C[\uDFFB-\uDFFF])|(?:[\u231A\u231B\u23E9-\u23EC\u23F0\u23F3\u25FD\u25FE\u2614\u2615\u2648-\u2653\u267F\u2693\u26A1\u26AA\u26AB\u26BD\u26BE\u26C4\u26C5\u26CE\u26D4\u26EA\u26F2\u26F3\u26F5\u26FA\u26FD\u2705\u270A\u270B\u2728\u274C\u274E\u2753-\u2755\u2757\u2795-\u2797\u27B0\u27BF\u2B1B\u2B1C\u2B50\u2B55]|\uD83C[\uDC04\uDCCF\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE1A\uDE2F\uDE32-\uDE36\uDE38-\uDE3A\uDE50\uDE51\uDF00-\uDF20\uDF2D-\uDF35\uDF37-\uDF7C\uDF7E-\uDF93\uDFA0-\uDFCA\uDFCF-\uDFD3\uDFE0-\uDFF0\uDFF4\uDFF8-\uDFFF]|\uD83D[\uDC00-\uDC3E\uDC40\uDC42-\uDCFC\uDCFF-\uDD3D\uDD4B-\uDD4E\uDD50-\uDD67\uDD7A\uDD95\uDD96\uDDA4\uDDFB-\uDE4F\uDE80-\uDEC5\uDECC\uDED0-\uDED2\uDED5\uDEEB\uDEEC\uDEF4-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])|(?:[#\*0-9\xA9\xAE\u203C\u2049\u2122\u2139\u2194-\u2199\u21A9\u21AA\u231A\u231B\u2328\u23CF\u23E9-\u23F3\u23F8-\u23FA\u24C2\u25AA\u25AB\u25B6\u25C0\u25FB-\u25FE\u2600-\u2604\u260E\u2611\u2614\u2615\u2618\u261D\u2620\u2622\u2623\u2626\u262A\u262E\u262F\u2638-\u263A\u2640\u2642\u2648-\u2653\u265F\u2660\u2663\u2665\u2666\u2668\u267B\u267E\u267F\u2692-\u2697\u2699\u269B\u269C\u26A0\u26A1\u26AA\u26AB\u26B0\u26B1\u26BD\u26BE\u26C4\u26C5\u26C8\u26CE\u26CF\u26D1\u26D3\u26D4\u26E9\u26EA\u26F0-\u26F5\u26F7-\u26FA\u26FD\u2702\u2705\u2708-\u270D\u270F\u2712\u2714\u2716\u271D\u2721\u2728\u2733\u2734\u2744\u2747\u274C\u274E\u2753-\u2755\u2757\u2763\u2764\u2795-\u2797\u27A1\u27B0\u27BF\u2934\u2935\u2B05-\u2B07\u2B1B\u2B1C\u2B50\u2B55\u3030\u303D\u3297\u3299]|\uD83C[\uDC04\uDCCF\uDD70\uDD71\uDD7E\uDD7F\uDD8E\uDD91-\uDD9A\uDDE6-\uDDFF\uDE01\uDE02\uDE1A\uDE2F\uDE32-\uDE3A\uDE50\uDE51\uDF00-\uDF21\uDF24-\uDF93\uDF96\uDF97\uDF99-\uDF9B\uDF9E-\uDFF0\uDFF3-\uDFF5\uDFF7-\uDFFF]|\uD83D[\uDC00-\uDCFD\uDCFF-\uDD3D\uDD49-\uDD4E\uDD50-\uDD67\uDD6F\uDD70\uDD73-\uDD7A\uDD87\uDD8A-\uDD8D\uDD90\uDD95\uDD96\uDDA4\uDDA5\uDDA8\uDDB1\uDDB2\uDDBC\uDDC2-\uDDC4\uDDD1-\uDDD3\uDDDC-\uDDDE\uDDE1\uDDE3\uDDE8\uDDEF\uDDF3\uDDFA-\uDE4F\uDE80-\uDEC5\uDECB-\uDED2\uDED5\uDEE0-\uDEE5\uDEE9\uDEEB\uDEEC\uDEF0\uDEF3-\uDEFA\uDFE0-\uDFEB]|\uD83E[\uDD0D-\uDD3A\uDD3C-\uDD45\uDD47-\uDD71\uDD73-\uDD76\uDD7A-\uDDA2\uDDA5-\uDDAA\uDDAE-\uDDCA\uDDCD-\uDDFF\uDE70-\uDE73\uDE78-\uDE7A\uDE80-\uDE82\uDE90-\uDE95])\uFE0F?|(?:[\u261D\u26F9\u270A-\u270D]|\uD83C[\uDF85\uDFC2-\uDFC4\uDFC7\uDFCA-\uDFCC]|\uD83D[\uDC42\uDC43\uDC46-\uDC50\uDC66-\uDC78\uDC7C\uDC81-\uDC83\uDC85-\uDC87\uDC8F\uDC91\uDCAA\uDD74\uDD75\uDD7A\uDD90\uDD95\uDD96\uDE45-\uDE47\uDE4B-\uDE4F\uDEA3\uDEB4-\uDEB6\uDEC0\uDECC]|\uD83E[\uDD0F\uDD18-\uDD1F\uDD26\uDD30-\uDD39\uDD3C-\uDD3E\uDDB5\uDDB6\uDDB8\uDDB9\uDDBB\uDDCD-\uDDCF\uDDD1-\uDDDD])/g; +}; diff --git a/node_modules/express-session/LICENSE b/node_modules/express-session/LICENSE new file mode 100644 index 0000000..9b59ff8 --- /dev/null +++ b/node_modules/express-session/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2010 Sencha Inc. +Copyright (c) 2011 TJ Holowaychuk +Copyright (c) 2014-2015 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/express-session/README.md b/node_modules/express-session/README.md new file mode 100644 index 0000000..292b202 --- /dev/null +++ b/node_modules/express-session/README.md @@ -0,0 +1,1061 @@ +# express-session + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][node-url] +[![Build Status][ci-image]][ci-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install express-session +``` + +## API + +```js +var session = require('express-session') +``` + +### session(options) + +Create a session middleware with the given `options`. + +**Note** Session data is _not_ saved in the cookie itself, just the session ID. +Session data is stored server-side. + +**Note** Since version 1.5.0, the [`cookie-parser` middleware](https://www.npmjs.com/package/cookie-parser) +no longer needs to be used for this module to work. This module now directly reads +and writes cookies on `req`/`res`. Using `cookie-parser` may result in issues +if the `secret` is not the same between this module and `cookie-parser`. + +**Warning** The default server-side session storage, `MemoryStore`, is _purposely_ +not designed for a production environment. It will leak memory under most +conditions, does not scale past a single process, and is meant for debugging and +developing. + +For a list of stores, see [compatible session stores](#compatible-session-stores). + +#### Options + +`express-session` accepts these properties in the options object. + +##### cookie + +Settings object for the session ID cookie. The default value is +`{ path: '/', httpOnly: true, secure: false, maxAge: null }`. + +In addition to providing a static object, you can also pass a callback function to dynamically generate the cookie options for each request. The callback receives the `req` object as its argument and should return an object containing the cookie settings. + +```js +var app = express() +app.use(session({ + secret: 'keyboard cat', + resave: false, + saveUninitialized: true, + cookie: function(req) { + var match = req.url.match(/^\/([^/]+)/); + return { + path: match ? '/' + match[1] : '/', + httpOnly: true, + secure: req.secure || false, + maxAge: 60000 + } + } +})) +``` + +The following are options that can be set in this object. + +##### cookie.domain + +Specifies the value for the `Domain` `Set-Cookie` attribute. By default, no domain +is set, and most clients will consider the cookie to apply to only the current +domain. + +##### cookie.expires + +Specifies the `Date` object to be the value for the `Expires` `Set-Cookie` attribute. +By default, no expiration is set, and most clients will consider this a +"non-persistent cookie" and will delete it on a condition like exiting a web browser +application. + +**Note** If both `expires` and `maxAge` are set in the options, then the last one +defined in the object is what is used. + +**Note** The `expires` option should not be set directly; instead only use the `maxAge` +option. + +##### cookie.httpOnly + +Specifies the `boolean` value for the `HttpOnly` `Set-Cookie` attribute. When truthy, +the `HttpOnly` attribute is set, otherwise it is not. By default, the `HttpOnly` +attribute is set. + +**Note** be careful when setting this to `true`, as compliant clients will not allow +client-side JavaScript to see the cookie in `document.cookie`. + +##### cookie.maxAge + +Specifies the `number` (in milliseconds) to use when calculating the `Expires` +`Set-Cookie` attribute. This is done by taking the current server time and adding +`maxAge` milliseconds to the value to calculate an `Expires` datetime. By default, +no maximum age is set. + +**Note** If both `expires` and `maxAge` are set in the options, then the last one +defined in the object is what is used. + +##### cookie.partitioned + +Specifies the `boolean` value for the [`Partitioned` `Set-Cookie`](rfc-cutler-httpbis-partitioned-cookies) +attribute. When truthy, the `Partitioned` attribute is set, otherwise it is not. +By default, the `Partitioned` attribute is not set. + +**Note** This is an attribute that has not yet been fully standardized, and may +change in the future. This also means many clients may ignore this attribute until +they understand it. + +More information about can be found in [the proposal](https://github.com/privacycg/CHIPS). + +##### cookie.path + +Specifies the value for the `Path` `Set-Cookie`. By default, this is set to `'/'`, which +is the root path of the domain. + +##### cookie.priority + +Specifies the `string` to be the value for the [`Priority` `Set-Cookie` attribute][rfc-west-cookie-priority-00-4.1]. + + - `'low'` will set the `Priority` attribute to `Low`. + - `'medium'` will set the `Priority` attribute to `Medium`, the default priority when not set. + - `'high'` will set the `Priority` attribute to `High`. + +More information about the different priority levels can be found in +[the specification][rfc-west-cookie-priority-00-4.1]. + +**Note** This is an attribute that has not yet been fully standardized, and may change in the future. +This also means many clients may ignore this attribute until they understand it. + +##### cookie.sameSite + +Specifies the `boolean` or `string` to be the value for the `SameSite` `Set-Cookie` attribute. +By default, this is `false`. + + - `true` will set the `SameSite` attribute to `Strict` for strict same site enforcement. + - `false` will not set the `SameSite` attribute. + - `'lax'` will set the `SameSite` attribute to `Lax` for lax same site enforcement. + - `'none'` will set the `SameSite` attribute to `None` for an explicit cross-site cookie. + - `'strict'` will set the `SameSite` attribute to `Strict` for strict same site enforcement. + - `'auto'` will set the `SameSite` attribute to `None` for secure connections and `Lax` for non-secure connections. + +More information about the different enforcement levels can be found in +[the specification][rfc-6265bis-03-4.1.2.7]. + +**Note** This is an attribute that has not yet been fully standardized, and may change in +the future. This also means many clients may ignore this attribute until they understand it. + +**Note** There is a [draft spec](https://tools.ietf.org/html/draft-west-cookie-incrementalism-01) +that requires that the `Secure` attribute be set to `true` when the `SameSite` attribute has been +set to `'none'`. Some web browsers or other clients may be adopting this specification. + +The `cookie.sameSite` option can also be set to the special value `'auto'` to have +this setting automatically match the determined security of the connection. When the connection +is secure (HTTPS), the `SameSite` attribute will be set to `None` to enable cross-site usage. +When the connection is not secure (HTTP), the `SameSite` attribute will be set to `Lax` for +better security while maintaining functionality. This is useful when the Express `"trust proxy"` +setting is properly setup to simplify development vs production configuration, particularly +for SAML authentication scenarios. + +##### cookie.secure + +Specifies the `boolean` value for the `Secure` `Set-Cookie` attribute. When truthy, +the `Secure` attribute is set, otherwise it is not. By default, the `Secure` +attribute is not set. + +**Note** be careful when setting this to `true`, as compliant clients will not send +the cookie back to the server in the future if the browser does not have an HTTPS +connection. + +Please note that `secure: true` is a **recommended** option. However, it requires +an https-enabled website, i.e., HTTPS is necessary for secure cookies. If `secure` +is set, and you access your site over HTTP, the cookie will not be set. If you +have your node.js behind a proxy and are using `secure: true`, you need to set +"trust proxy" in express: + +```js +var app = express() +app.set('trust proxy', 1) // trust first proxy +app.use(session({ + secret: 'keyboard cat', + resave: false, + saveUninitialized: true, + cookie: { secure: true } +})) +``` + +For using secure cookies in production, but allowing for testing in development, +the following is an example of enabling this setup based on `NODE_ENV` in express: + +```js +var app = express() +var sess = { + secret: 'keyboard cat', + cookie: {} +} + +if (app.get('env') === 'production') { + app.set('trust proxy', 1) // trust first proxy + sess.cookie.secure = true // serve secure cookies +} + +app.use(session(sess)) +``` + +The `cookie.secure` option can also be set to the special value `'auto'` to have +this setting automatically match the determined security of the connection. Be +careful when using this setting if the site is available both as HTTP and HTTPS, +as once the cookie is set on HTTPS, it will no longer be visible over HTTP. This +is useful when the Express `"trust proxy"` setting is properly setup to simplify +development vs production configuration. + +##### genid + +Function to call to generate a new session ID. Provide a function that returns +a string that will be used as a session ID. The function is given `req` as the +first argument if you want to use some value attached to `req` when generating +the ID. + +The default value is a function which uses the `uid-safe` library to generate IDs. + +**NOTE** be careful to generate unique IDs so your sessions do not conflict. + +```js +app.use(session({ + genid: function(req) { + return genuuid() // use UUIDs for session IDs + }, + secret: 'keyboard cat' +})) +``` + +##### name + +The name of the session ID cookie to set in the response (and read from in the +request). + +The default value is `'connect.sid'`. + +**Note** if you have multiple apps running on the same hostname (this is just +the name, i.e. `localhost` or `127.0.0.1`; different schemes and ports do not +name a different hostname), then you need to separate the session cookies from +each other. The simplest method is to simply set different `name`s per app. + +##### proxy + +Trust the reverse proxy when setting secure cookies (via the "X-Forwarded-Proto" +header). + +The default value is `undefined`. + + - `true` The "X-Forwarded-Proto" header will be used. + - `false` All headers are ignored and the connection is considered secure only + if there is a direct TLS/SSL connection. + - `undefined` Uses the "trust proxy" setting from express + +##### resave + +Forces the session to be saved back to the session store, even if the session +was never modified during the request. Depending on your store this may be +necessary, but it can also create race conditions where a client makes two +parallel requests to your server and changes made to the session in one +request may get overwritten when the other request ends, even if it made no +changes (this behavior also depends on what store you're using). + +The default value is `true`, but using the default has been deprecated, +as the default will change in the future. Please research into this setting +and choose what is appropriate to your use-case. Typically, you'll want +`false`. + +How do I know if this is necessary for my store? The best way to know is to +check with your store if it implements the `touch` method. If it does, then +you can safely set `resave: false`. If it does not implement the `touch` +method and your store sets an expiration date on stored sessions, then you +likely need `resave: true`. + +##### rolling + +Force the session identifier cookie to be set on every response. The expiration +is reset to the original [`maxAge`](#cookiemaxage), resetting the expiration +countdown. + +The default value is `false`. + +With this enabled, the session identifier cookie will expire in +[`maxAge`](#cookiemaxage) since the last response was sent instead of in +[`maxAge`](#cookiemaxage) since the session was last modified by the server. + +This is typically used in conjunction with short, non-session-length +[`maxAge`](#cookiemaxage) values to provide a quick timeout of the session data +with reduced potential of it occurring during on going server interactions. + +**Note** When this option is set to `true` but the `saveUninitialized` option is +set to `false`, the cookie will not be set on a response with an uninitialized +session. This option only modifies the behavior when an existing session was +loaded for the request. + +##### saveUninitialized + +Forces a session that is "uninitialized" to be saved to the store. A session is +uninitialized when it is new but not modified. Choosing `false` is useful for +implementing login sessions, reducing server storage usage, or complying with +laws that require permission before setting a cookie. Choosing `false` will also +help with race conditions where a client makes multiple parallel requests +without a session. + +The default value is `true`, but using the default has been deprecated, as the +default will change in the future. Please research into this setting and +choose what is appropriate to your use-case. + +**Note** if you are using Session in conjunction with PassportJS, Passport +will add an empty Passport object to the session for use after a user is +authenticated, which will be treated as a modification to the session, causing +it to be saved. *This has been fixed in PassportJS 0.3.0* + +##### secret + +**Required option** + +This is the secret used to sign the session ID cookie. The secret can be any type +of value that is supported by Node.js `crypto.createHmac` (like a string or a +`Buffer`). This can be either a single secret, or an array of multiple secrets. If +an array of secrets is provided, only the first element will be used to sign the +session ID cookie, while all the elements will be considered when verifying the +signature in requests. The secret itself should be not easily parsed by a human and +would best be a random set of characters. A best practice may include: + + - The use of environment variables to store the secret, ensuring the secret itself + does not exist in your repository. + - Periodic updates of the secret, while ensuring the previous secret is in the + array. + +Using a secret that cannot be guessed will reduce the ability to hijack a session to +only guessing the session ID (as determined by the `genid` option). + +Changing the secret value will invalidate all existing sessions. In order to rotate +the secret without invalidating sessions, provide an array of secrets, with the new +secret as first element of the array, and including previous secrets as the later +elements. + +**Note** HMAC-256 is used to sign the session ID. For this reason, the secret should +contain at least 32 bytes of entropy. + +##### store + +The session store instance, defaults to a new `MemoryStore` instance. + +##### unset + +Control the result of unsetting `req.session` (through `delete`, setting to `null`, +etc.). + +The default value is `'keep'`. + + - `'destroy'` The session will be destroyed (deleted) when the response ends. + - `'keep'` The session in the store will be kept, but modifications made during + the request are ignored and not saved. + +### req.session + +To store or access session data, simply use the request property `req.session`, +which is (generally) serialized as JSON by the store, so nested objects +are typically fine. For example below is a user-specific view counter: + +```js +// Use the session middleware +app.use(session({ secret: 'keyboard cat', cookie: { maxAge: 60000 }})) + +// Access the session as req.session +app.get('/', function(req, res, next) { + if (req.session.views) { + req.session.views++ + res.setHeader('Content-Type', 'text/html') + res.write('

views: ' + req.session.views + '

') + res.write('

expires in: ' + (req.session.cookie.maxAge / 1000) + 's

') + res.end() + } else { + req.session.views = 1 + res.end('welcome to the session demo. refresh!') + } +}) +``` + +#### Session.regenerate(callback) + +To regenerate the session simply invoke the method. Once complete, +a new SID and `Session` instance will be initialized at `req.session` +and the `callback` will be invoked. + +```js +req.session.regenerate(function(err) { + // will have a new session here +}) +``` + +#### Session.destroy(callback) + +Destroys the session and will unset the `req.session` property. +Once complete, the `callback` will be invoked. + +```js +req.session.destroy(function(err) { + // cannot access session here +}) +``` + +#### Session.reload(callback) + +Reloads the session data from the store and re-populates the +`req.session` object. Once complete, the `callback` will be invoked. + +```js +req.session.reload(function(err) { + // session updated +}) +``` + +#### Session.save(callback) + +Save the session back to the store, replacing the contents on the store with the +contents in memory (though a store may do something else--consult the store's +documentation for exact behavior). + +This method is automatically called at the end of the HTTP response if the +session data has been altered (though this behavior can be altered with various +options in the middleware constructor). Because of this, typically this method +does not need to be called. + +There are some cases where it is useful to call this method, for example, +redirects, long-lived requests or in WebSockets. + +```js +req.session.save(function(err) { + // session saved +}) +``` + +#### Session.touch() + +Updates the `.maxAge` property. Typically this is +not necessary to call, as the session middleware does this for you. + +### req.session.id + +Each session has a unique ID associated with it. This property is an +alias of [`req.sessionID`](#reqsessionid-1) and cannot be modified. +It has been added to make the session ID accessible from the `session` +object. + +### req.session.cookie + +Each session has a unique cookie object accompany it. This allows +you to alter the session cookie per visitor. For example we can +set `req.session.cookie.expires` to `false` to enable the cookie +to remain for only the duration of the user-agent. + +#### Cookie.maxAge + +Alternatively `req.session.cookie.maxAge` will return the time +remaining in milliseconds, which we may also re-assign a new value +to adjust the `.expires` property appropriately. The following +are essentially equivalent + +```js +var hour = 3600000 +req.session.cookie.expires = new Date(Date.now() + hour) +req.session.cookie.maxAge = hour +``` + +For example when `maxAge` is set to `60000` (one minute), and 30 seconds +has elapsed it will return `30000` until the current request has completed, +at which time `req.session.touch()` is called to reset +`req.session.cookie.maxAge` to its original value. + +```js +req.session.cookie.maxAge // => 30000 +``` + +#### Cookie.originalMaxAge + +The `req.session.cookie.originalMaxAge` property returns the original +`maxAge` (time-to-live), in milliseconds, of the session cookie. + +### req.sessionID + +To get the ID of the loaded session, access the request property +`req.sessionID`. This is simply a read-only value set when a session +is loaded/created. + +## Session Store Implementation + +Every session store _must_ be an `EventEmitter` and implement specific +methods. The following methods are the list of **required**, **recommended**, +and **optional**. + + * Required methods are ones that this module will always call on the store. + * Recommended methods are ones that this module will call on the store if + available. + * Optional methods are ones this module does not call at all, but helps + present uniform stores to users. + +For an example implementation view the [connect-redis](http://github.com/visionmedia/connect-redis) repo. + +### store.all(callback) + +**Optional** + +This optional method is used to get all sessions in the store as an array. The +`callback` should be called as `callback(error, sessions)`. + +### store.destroy(sid, callback) + +**Required** + +This required method is used to destroy/delete a session from the store given +a session ID (`sid`). The `callback` should be called as `callback(error)` once +the session is destroyed. + +### store.clear(callback) + +**Optional** + +This optional method is used to delete all sessions from the store. The +`callback` should be called as `callback(error)` once the store is cleared. + +### store.length(callback) + +**Optional** + +This optional method is used to get the count of all sessions in the store. +The `callback` should be called as `callback(error, len)`. + +### store.get(sid, callback) + +**Required** + +This required method is used to get a session from the store given a session +ID (`sid`). The `callback` should be called as `callback(error, session)`. + +The `session` argument should be a session if found, otherwise `null` or +`undefined` if the session was not found (and there was no error). A special +case is made when `error.code === 'ENOENT'` to act like `callback(null, null)`. + +### store.set(sid, session, callback) + +**Required** + +This required method is used to upsert a session into the store given a +session ID (`sid`) and session (`session`) object. The callback should be +called as `callback(error)` once the session has been set in the store. + +### store.touch(sid, session, callback) + +**Recommended** + +This recommended method is used to "touch" a given session given a +session ID (`sid`) and session (`session`) object. The `callback` should be +called as `callback(error)` once the session has been touched. + +This is primarily used when the store will automatically delete idle sessions +and this method is used to signal to the store the given session is active, +potentially resetting the idle timer. + +## Compatible Session Stores + +The following modules implement a session store that is compatible with this +module. Please make a PR to add additional modules :) + +[![★][aerospike-session-store-image] aerospike-session-store][aerospike-session-store-url] A session store using [Aerospike](http://www.aerospike.com/). + +[aerospike-session-store-url]: https://www.npmjs.com/package/aerospike-session-store +[aerospike-session-store-image]: https://badgen.net/github/stars/aerospike/aerospike-session-store-expressjs?label=%E2%98%85 + +[![★][better-sqlite3-session-store-image] better-sqlite3-session-store][better-sqlite3-session-store-url] A session store based on [better-sqlite3](https://github.com/JoshuaWise/better-sqlite3). + +[better-sqlite3-session-store-url]: https://www.npmjs.com/package/better-sqlite3-session-store +[better-sqlite3-session-store-image]: https://badgen.net/github/stars/timdaub/better-sqlite3-session-store?label=%E2%98%85 + +[![★][cassandra-store-image] cassandra-store][cassandra-store-url] An Apache Cassandra-based session store. + +[cassandra-store-url]: https://www.npmjs.com/package/cassandra-store +[cassandra-store-image]: https://badgen.net/github/stars/webcc/cassandra-store?label=%E2%98%85 + +[![★][cluster-store-image] cluster-store][cluster-store-url] A wrapper for using in-process / embedded +stores - such as SQLite (via knex), leveldb, files, or memory - with node cluster (desirable for Raspberry Pi 2 +and other multi-core embedded devices). + +[cluster-store-url]: https://www.npmjs.com/package/cluster-store +[cluster-store-image]: https://badgen.net/github/stars/coolaj86/cluster-store?label=%E2%98%85 + +[![★][connect-arango-image] connect-arango][connect-arango-url] An ArangoDB-based session store. + +[connect-arango-url]: https://www.npmjs.com/package/connect-arango +[connect-arango-image]: https://badgen.net/github/stars/AlexanderArvidsson/connect-arango?label=%E2%98%85 + +[![★][connect-azuretables-image] connect-azuretables][connect-azuretables-url] An [Azure Table Storage](https://azure.microsoft.com/en-gb/services/storage/tables/)-based session store. + +[connect-azuretables-url]: https://www.npmjs.com/package/connect-azuretables +[connect-azuretables-image]: https://badgen.net/github/stars/mike-goodwin/connect-azuretables?label=%E2%98%85 + +[![★][connect-cloudant-store-image] connect-cloudant-store][connect-cloudant-store-url] An [IBM Cloudant](https://cloudant.com/)-based session store. + +[connect-cloudant-store-url]: https://www.npmjs.com/package/connect-cloudant-store +[connect-cloudant-store-image]: https://badgen.net/github/stars/adriantanasa/connect-cloudant-store?label=%E2%98%85 + +[![★][connect-cosmosdb-image] connect-cosmosdb][connect-cosmosdb-url] An Azure [Cosmos DB](https://azure.microsoft.com/en-us/products/cosmos-db/)-based session store. + +[connect-cosmosdb-url]: https://www.npmjs.com/package/connect-cosmosdb +[connect-cosmosdb-image]: https://badgen.net/github/stars/thekillingspree/connect-cosmosdb?label=%E2%98%85 + +[![★][connect-couchbase-image] connect-couchbase][connect-couchbase-url] A [couchbase](http://www.couchbase.com/)-based session store. + +[connect-couchbase-url]: https://www.npmjs.com/package/connect-couchbase +[connect-couchbase-image]: https://badgen.net/github/stars/christophermina/connect-couchbase?label=%E2%98%85 + +[![★][connect-datacache-image] connect-datacache][connect-datacache-url] An [IBM Bluemix Data Cache](http://www.ibm.com/cloud-computing/bluemix/)-based session store. + +[connect-datacache-url]: https://www.npmjs.com/package/connect-datacache +[connect-datacache-image]: https://badgen.net/github/stars/adriantanasa/connect-datacache?label=%E2%98%85 + +[![★][@google-cloud/connect-datastore-image] @google-cloud/connect-datastore][@google-cloud/connect-datastore-url] A [Google Cloud Datastore](https://cloud.google.com/datastore/docs/concepts/overview)-based session store. + +[@google-cloud/connect-datastore-url]: https://www.npmjs.com/package/@google-cloud/connect-datastore +[@google-cloud/connect-datastore-image]: https://badgen.net/github/stars/GoogleCloudPlatform/cloud-datastore-session-node?label=%E2%98%85 + +[![★][connect-db2-image] connect-db2][connect-db2-url] An IBM DB2-based session store built using [ibm_db](https://www.npmjs.com/package/ibm_db) module. + +[connect-db2-url]: https://www.npmjs.com/package/connect-db2 +[connect-db2-image]: https://badgen.net/github/stars/wallali/connect-db2?label=%E2%98%85 + +[![★][connect-dynamodb-image] connect-dynamodb][connect-dynamodb-url] A DynamoDB-based session store. + +[connect-dynamodb-url]: https://www.npmjs.com/package/connect-dynamodb +[connect-dynamodb-image]: https://badgen.net/github/stars/ca98am79/connect-dynamodb?label=%E2%98%85 + +[![★][@google-cloud/connect-firestore-image] @google-cloud/connect-firestore][@google-cloud/connect-firestore-url] A [Google Cloud Firestore](https://cloud.google.com/firestore/docs/overview)-based session store. + +[@google-cloud/connect-firestore-url]: https://www.npmjs.com/package/@google-cloud/connect-firestore +[@google-cloud/connect-firestore-image]: https://badgen.net/github/stars/googleapis/nodejs-firestore-session?label=%E2%98%85 + +[![★][connect-hazelcast-image] connect-hazelcast][connect-hazelcast-url] Hazelcast session store for Connect and Express. + +[connect-hazelcast-url]: https://www.npmjs.com/package/connect-hazelcast +[connect-hazelcast-image]: https://badgen.net/github/stars/huseyinbabal/connect-hazelcast?label=%E2%98%85 + +[![★][connect-loki-image] connect-loki][connect-loki-url] A Loki.js-based session store. + +[connect-loki-url]: https://www.npmjs.com/package/connect-loki +[connect-loki-image]: https://badgen.net/github/stars/Requarks/connect-loki?label=%E2%98%85 + +[![★][connect-lowdb-image] connect-lowdb][connect-lowdb-url] A lowdb-based session store. + +[connect-lowdb-url]: https://www.npmjs.com/package/connect-lowdb +[connect-lowdb-image]: https://badgen.net/github/stars/travishorn/connect-lowdb?label=%E2%98%85 + +[![★][connect-memcached-image] connect-memcached][connect-memcached-url] A memcached-based session store. + +[connect-memcached-url]: https://www.npmjs.com/package/connect-memcached +[connect-memcached-image]: https://badgen.net/github/stars/balor/connect-memcached?label=%E2%98%85 + +[![★][connect-memjs-image] connect-memjs][connect-memjs-url] A memcached-based session store using +[memjs](https://www.npmjs.com/package/memjs) as the memcached client. + +[connect-memjs-url]: https://www.npmjs.com/package/connect-memjs +[connect-memjs-image]: https://badgen.net/github/stars/liamdon/connect-memjs?label=%E2%98%85 + +[![★][connect-ml-image] connect-ml][connect-ml-url] A MarkLogic Server-based session store. + +[connect-ml-url]: https://www.npmjs.com/package/connect-ml +[connect-ml-image]: https://badgen.net/github/stars/bluetorch/connect-ml?label=%E2%98%85 + +[![★][connect-monetdb-image] connect-monetdb][connect-monetdb-url] A MonetDB-based session store. + +[connect-monetdb-url]: https://www.npmjs.com/package/connect-monetdb +[connect-monetdb-image]: https://badgen.net/github/stars/MonetDB/npm-connect-monetdb?label=%E2%98%85 + +[![★][connect-mongo-image] connect-mongo][connect-mongo-url] A MongoDB-based session store. + +[connect-mongo-url]: https://www.npmjs.com/package/connect-mongo +[connect-mongo-image]: https://badgen.net/github/stars/kcbanner/connect-mongo?label=%E2%98%85 + +[![★][connect-mongodb-session-image] connect-mongodb-session][connect-mongodb-session-url] Lightweight MongoDB-based session store built and maintained by MongoDB. + +[connect-mongodb-session-url]: https://www.npmjs.com/package/connect-mongodb-session +[connect-mongodb-session-image]: https://badgen.net/github/stars/mongodb-js/connect-mongodb-session?label=%E2%98%85 + +[![★][connect-mssql-v2-image] connect-mssql-v2][connect-mssql-v2-url] A Microsoft SQL Server-based session store based on [connect-mssql](https://www.npmjs.com/package/connect-mssql). + +[connect-mssql-v2-url]: https://www.npmjs.com/package/connect-mssql-v2 +[connect-mssql-v2-image]: https://badgen.net/github/stars/jluboff/connect-mssql-v2?label=%E2%98%85 + +[![★][connect-neo4j-image] connect-neo4j][connect-neo4j-url] A [Neo4j](https://neo4j.com)-based session store. + +[connect-neo4j-url]: https://www.npmjs.com/package/connect-neo4j +[connect-neo4j-image]: https://badgen.net/github/stars/MaxAndersson/connect-neo4j?label=%E2%98%85 + +[![★][connect-ottoman-image] connect-ottoman][connect-ottoman-url] A [couchbase ottoman](http://www.couchbase.com/)-based session store. + +[connect-ottoman-url]: https://www.npmjs.com/package/connect-ottoman +[connect-ottoman-image]: https://badgen.net/github/stars/noiissyboy/connect-ottoman?label=%E2%98%85 + +[![★][connect-pg-simple-image] connect-pg-simple][connect-pg-simple-url] A PostgreSQL-based session store. + +[connect-pg-simple-url]: https://www.npmjs.com/package/connect-pg-simple +[connect-pg-simple-image]: https://badgen.net/github/stars/voxpelli/node-connect-pg-simple?label=%E2%98%85 + +[![★][connect-redis-image] connect-redis][connect-redis-url] A Redis-based session store. + +[connect-redis-url]: https://www.npmjs.com/package/connect-redis +[connect-redis-image]: https://badgen.net/github/stars/tj/connect-redis?label=%E2%98%85 + +[![★][connect-session-firebase-image] connect-session-firebase][connect-session-firebase-url] A session store based on the [Firebase Realtime Database](https://firebase.google.com/docs/database/) + +[connect-session-firebase-url]: https://www.npmjs.com/package/connect-session-firebase +[connect-session-firebase-image]: https://badgen.net/github/stars/benweier/connect-session-firebase?label=%E2%98%85 + +[![★][connect-session-knex-image] connect-session-knex][connect-session-knex-url] A session store using +[Knex.js](http://knexjs.org/), which is a SQL query builder for PostgreSQL, MySQL, MariaDB, SQLite3, and Oracle. + +[connect-session-knex-url]: https://www.npmjs.com/package/connect-session-knex +[connect-session-knex-image]: https://badgen.net/github/stars/llambda/connect-session-knex?label=%E2%98%85 + +[![★][connect-session-sequelize-image] connect-session-sequelize][connect-session-sequelize-url] A session store using +[Sequelize.js](http://sequelizejs.com/), which is a Node.js / io.js ORM for PostgreSQL, MySQL, SQLite and MSSQL. + +[connect-session-sequelize-url]: https://www.npmjs.com/package/connect-session-sequelize +[connect-session-sequelize-image]: https://badgen.net/github/stars/mweibel/connect-session-sequelize?label=%E2%98%85 + +[![★][connect-sqlite3-image] connect-sqlite3][connect-sqlite3-url] A [SQLite3](https://github.com/mapbox/node-sqlite3) session store modeled after the TJ's `connect-redis` store. + +[connect-sqlite3-url]: https://www.npmjs.com/package/connect-sqlite3 +[connect-sqlite3-image]: https://badgen.net/github/stars/rawberg/connect-sqlite3?label=%E2%98%85 + +[![★][connect-typeorm-image] connect-typeorm][connect-typeorm-url] A [TypeORM](https://github.com/typeorm/typeorm)-based session store. + +[connect-typeorm-url]: https://www.npmjs.com/package/connect-typeorm +[connect-typeorm-image]: https://badgen.net/github/stars/makepost/connect-typeorm?label=%E2%98%85 + +[![★][couchdb-expression-image] couchdb-expression][couchdb-expression-url] A [CouchDB](https://couchdb.apache.org/)-based session store. + +[couchdb-expression-url]: https://www.npmjs.com/package/couchdb-expression +[couchdb-expression-image]: https://badgen.net/github/stars/tkshnwesper/couchdb-expression?label=%E2%98%85 + +[![★][dynamodb-store-image] dynamodb-store][dynamodb-store-url] A DynamoDB-based session store. + +[dynamodb-store-url]: https://www.npmjs.com/package/dynamodb-store +[dynamodb-store-image]: https://badgen.net/github/stars/rafaelrpinto/dynamodb-store?label=%E2%98%85 + +[![★][dynamodb-store-v3-image] dynamodb-store-v3][dynamodb-store-v3-url] Implementation of a session store using DynamoDB backed by the [AWS SDK for JavaScript v3](https://github.com/aws/aws-sdk-js-v3). + +[dynamodb-store-v3-url]: https://www.npmjs.com/package/dynamodb-store-v3 +[dynamodb-store-v3-image]: https://badgen.net/github/stars/FryDay/dynamodb-store-v3?label=%E2%98%85 + +[![★][express-etcd-image] express-etcd][express-etcd-url] An [etcd](https://github.com/stianeikeland/node-etcd) based session store. + +[express-etcd-url]: https://www.npmjs.com/package/express-etcd +[express-etcd-image]: https://badgen.net/github/stars/gildean/express-etcd?label=%E2%98%85 + +[![★][express-mysql-session-image] express-mysql-session][express-mysql-session-url] A session store using native +[MySQL](https://www.mysql.com/) via the [node-mysql](https://github.com/felixge/node-mysql) module. + +[express-mysql-session-url]: https://www.npmjs.com/package/express-mysql-session +[express-mysql-session-image]: https://badgen.net/github/stars/chill117/express-mysql-session?label=%E2%98%85 + +[![★][express-nedb-session-image] express-nedb-session][express-nedb-session-url] A NeDB-based session store. + +[express-nedb-session-url]: https://www.npmjs.com/package/express-nedb-session +[express-nedb-session-image]: https://badgen.net/github/stars/louischatriot/express-nedb-session?label=%E2%98%85 + +[![★][express-oracle-session-image] express-oracle-session][express-oracle-session-url] A session store using native +[oracle](https://www.oracle.com/) via the [node-oracledb](https://www.npmjs.com/package/oracledb) module. + +[express-oracle-session-url]: https://www.npmjs.com/package/express-oracle-session +[express-oracle-session-image]: https://badgen.net/github/stars/slumber86/express-oracle-session?label=%E2%98%85 + +[![★][express-session-cache-manager-image] express-session-cache-manager][express-session-cache-manager-url] +A store that implements [cache-manager](https://www.npmjs.com/package/cache-manager), which supports +a [variety of storage types](https://www.npmjs.com/package/cache-manager#store-engines). + +[express-session-cache-manager-url]: https://www.npmjs.com/package/express-session-cache-manager +[express-session-cache-manager-image]: https://badgen.net/github/stars/theogravity/express-session-cache-manager?label=%E2%98%85 + +[![★][express-session-etcd3-image] express-session-etcd3][express-session-etcd3-url] An [etcd3](https://github.com/mixer/etcd3) based session store. + +[express-session-etcd3-url]: https://www.npmjs.com/package/express-session-etcd3 +[express-session-etcd3-image]: https://badgen.net/github/stars/willgm/express-session-etcd3?label=%E2%98%85 + +[![★][express-session-level-image] express-session-level][express-session-level-url] A [LevelDB](https://github.com/Level/levelup) based session store. + +[express-session-level-url]: https://www.npmjs.com/package/express-session-level +[express-session-level-image]: https://badgen.net/github/stars/tgohn/express-session-level?label=%E2%98%85 + +[![★][express-session-rsdb-image] express-session-rsdb][express-session-rsdb-url] Session store based on Rocket-Store: A very simple, super fast and yet powerful, flat file database. + +[express-session-rsdb-url]: https://www.npmjs.com/package/express-session-rsdb +[express-session-rsdb-image]: https://badgen.net/github/stars/paragi/express-session-rsdb?label=%E2%98%85 + +[![★][express-sessions-image] express-sessions][express-sessions-url] A session store supporting both MongoDB and Redis. + +[express-sessions-url]: https://www.npmjs.com/package/express-sessions +[express-sessions-image]: https://badgen.net/github/stars/konteck/express-sessions?label=%E2%98%85 + +[![★][firestore-store-image] firestore-store][firestore-store-url] A [Firestore](https://github.com/hendrysadrak/firestore-store)-based session store. + +[firestore-store-url]: https://www.npmjs.com/package/firestore-store +[firestore-store-image]: https://badgen.net/github/stars/hendrysadrak/firestore-store?label=%E2%98%85 + +[![★][fortune-session-image] fortune-session][fortune-session-url] A [Fortune.js](https://github.com/fortunejs/fortune) +based session store. Supports all backends supported by Fortune (MongoDB, Redis, Postgres, NeDB). + +[fortune-session-url]: https://www.npmjs.com/package/fortune-session +[fortune-session-image]: https://badgen.net/github/stars/aliceklipper/fortune-session?label=%E2%98%85 + +[![★][hazelcast-store-image] hazelcast-store][hazelcast-store-url] A Hazelcast-based session store built on the [Hazelcast Node Client](https://www.npmjs.com/package/hazelcast-client). + +[hazelcast-store-url]: https://www.npmjs.com/package/hazelcast-store +[hazelcast-store-image]: https://badgen.net/github/stars/jackspaniel/hazelcast-store?label=%E2%98%85 + +[![★][level-session-store-image] level-session-store][level-session-store-url] A LevelDB-based session store. + +[level-session-store-url]: https://www.npmjs.com/package/level-session-store +[level-session-store-image]: https://badgen.net/github/stars/toddself/level-session-store?label=%E2%98%85 + +[![★][lowdb-session-store-image] lowdb-session-store][lowdb-session-store-url] A [lowdb](https://www.npmjs.com/package/lowdb)-based session store. + +[lowdb-session-store-url]: https://www.npmjs.com/package/lowdb-session-store +[lowdb-session-store-image]: https://badgen.net/github/stars/fhellwig/lowdb-session-store?label=%E2%98%85 + +[![★][medea-session-store-image] medea-session-store][medea-session-store-url] A Medea-based session store. + +[medea-session-store-url]: https://www.npmjs.com/package/medea-session-store +[medea-session-store-image]: https://badgen.net/github/stars/BenjaminVadant/medea-session-store?label=%E2%98%85 + +[![★][memorystore-image] memorystore][memorystore-url] A memory session store made for production. + +[memorystore-url]: https://www.npmjs.com/package/memorystore +[memorystore-image]: https://badgen.net/github/stars/roccomuso/memorystore?label=%E2%98%85 + +[![★][mssql-session-store-image] mssql-session-store][mssql-session-store-url] A SQL Server-based session store. + +[mssql-session-store-url]: https://www.npmjs.com/package/mssql-session-store +[mssql-session-store-image]: https://badgen.net/github/stars/jwathen/mssql-session-store?label=%E2%98%85 + +[![★][nedb-session-store-image] nedb-session-store][nedb-session-store-url] An alternate NeDB-based (either in-memory or file-persisted) session store. + +[nedb-session-store-url]: https://www.npmjs.com/package/nedb-session-store +[nedb-session-store-image]: https://badgen.net/github/stars/JamesMGreene/nedb-session-store?label=%E2%98%85 + +[![★][@quixo3/prisma-session-store-image] @quixo3/prisma-session-store][@quixo3/prisma-session-store-url] A session store for the [Prisma Framework](https://www.prisma.io). + +[@quixo3/prisma-session-store-url]: https://www.npmjs.com/package/@quixo3/prisma-session-store +[@quixo3/prisma-session-store-image]: https://badgen.net/github/stars/kleydon/prisma-session-store?label=%E2%98%85 + +[![★][restsession-image] restsession][restsession-url] Store sessions utilizing a RESTful API + +[restsession-url]: https://www.npmjs.com/package/restsession +[restsession-image]: https://badgen.net/github/stars/jankal/restsession?label=%E2%98%85 + +[![★][sequelstore-connect-image] sequelstore-connect][sequelstore-connect-url] A session store using [Sequelize.js](http://sequelizejs.com/). + +[sequelstore-connect-url]: https://www.npmjs.com/package/sequelstore-connect +[sequelstore-connect-image]: https://badgen.net/github/stars/MattMcFarland/sequelstore-connect?label=%E2%98%85 + +[![★][session-file-store-image] session-file-store][session-file-store-url] A file system-based session store. + +[session-file-store-url]: https://www.npmjs.com/package/session-file-store +[session-file-store-image]: https://badgen.net/github/stars/valery-barysok/session-file-store?label=%E2%98%85 + +[![★][session-pouchdb-store-image] session-pouchdb-store][session-pouchdb-store-url] Session store for PouchDB / CouchDB. Accepts embedded, custom, or remote PouchDB instance and realtime synchronization. + +[session-pouchdb-store-url]: https://www.npmjs.com/package/session-pouchdb-store +[session-pouchdb-store-image]: https://badgen.net/github/stars/solzimer/session-pouchdb-store?label=%E2%98%85 + +[![★][@cyclic.sh/session-store-image] @cyclic.sh/session-store][@cyclic.sh/session-store-url] A DynamoDB-based session store for [Cyclic.sh](https://www.cyclic.sh/) apps. + +[@cyclic.sh/session-store-url]: https://www.npmjs.com/package/@cyclic.sh/session-store +[@cyclic.sh/session-store-image]: https://badgen.net/github/stars/cyclic-software/session-store?label=%E2%98%85 + +[![★][@databunker/session-store-image] @databunker/session-store][@databunker/session-store-url] A [Databunker](https://databunker.org/)-based encrypted session store. + +[@databunker/session-store-url]: https://www.npmjs.com/package/@databunker/session-store +[@databunker/session-store-image]: https://badgen.net/github/stars/securitybunker/databunker-session-store?label=%E2%98%85 + +[![★][sessionstore-image] sessionstore][sessionstore-url] A session store that works with various databases. + +[sessionstore-url]: https://www.npmjs.com/package/sessionstore +[sessionstore-image]: https://badgen.net/github/stars/adrai/sessionstore?label=%E2%98%85 + +[![★][tch-nedb-session-image] tch-nedb-session][tch-nedb-session-url] A file system session store based on NeDB. + +[tch-nedb-session-url]: https://www.npmjs.com/package/tch-nedb-session +[tch-nedb-session-image]: https://badgen.net/github/stars/tomaschyly/NeDBSession?label=%E2%98%85 + +## Examples + +### View counter + +A simple example using `express-session` to store page views for a user. + +```js +var express = require('express') +var parseurl = require('parseurl') +var session = require('express-session') + +var app = express() + +app.use(session({ + secret: 'keyboard cat', + resave: false, + saveUninitialized: true +})) + +app.use(function (req, res, next) { + if (!req.session.views) { + req.session.views = {} + } + + // get the url pathname + var pathname = parseurl(req).pathname + + // count the views + req.session.views[pathname] = (req.session.views[pathname] || 0) + 1 + + next() +}) + +app.get('/foo', function (req, res, next) { + res.send('you viewed this page ' + req.session.views['/foo'] + ' times') +}) + +app.get('/bar', function (req, res, next) { + res.send('you viewed this page ' + req.session.views['/bar'] + ' times') +}) + +app.listen(3000) +``` + +### User login + +A simple example using `express-session` to keep a user log in session. + +```js +var escapeHtml = require('escape-html') +var express = require('express') +var session = require('express-session') + +var app = express() + +app.use(session({ + secret: 'keyboard cat', + resave: false, + saveUninitialized: true +})) + +// middleware to test if authenticated +function isAuthenticated (req, res, next) { + if (req.session.user) next() + else next('route') +} + +app.get('/', isAuthenticated, function (req, res) { + // this is only called when there is an authentication user due to isAuthenticated + res.send('hello, ' + escapeHtml(req.session.user) + '!' + + ' Logout') +}) + +app.get('/', function (req, res) { + res.send('
' + + 'Username:
' + + 'Password:
' + + '
') +}) + +app.post('/login', express.urlencoded({ extended: false }), function (req, res) { + // login logic to validate req.body.user and req.body.pass + // would be implemented here. for this example any combo works + + // regenerate the session, which is good practice to help + // guard against forms of session fixation + req.session.regenerate(function (err) { + if (err) next(err) + + // store user information in session, typically a user id + req.session.user = req.body.user + + // save the session before redirection to ensure page + // load does not happen before session is saved + req.session.save(function (err) { + if (err) return next(err) + res.redirect('/') + }) + }) +}) + +app.get('/logout', function (req, res, next) { + // logout logic + + // clear the user from the session object and save. + // this will ensure that re-using the old session id + // does not have a logged in user + req.session.user = null + req.session.save(function (err) { + if (err) next(err) + + // regenerate the session, which is good practice to help + // guard against forms of session fixation + req.session.regenerate(function (err) { + if (err) next(err) + res.redirect('/') + }) + }) +}) + +app.listen(3000) +``` + +## Debugging + +This module uses the [debug](https://www.npmjs.com/package/debug) module +internally to log information about session operations. + +To see all the internal logs, set the `DEBUG` environment variable to +`express-session` when launching your app (`npm start`, in this example): + +```sh +$ DEBUG=express-session npm start +``` + +On Windows, use the corresponding command; + +```sh +> set DEBUG=express-session & npm start +``` + +## License + +[MIT](LICENSE) + +[rfc-6265bis-03-4.1.2.7]: https://tools.ietf.org/html/draft-ietf-httpbis-rfc6265bis-03#section-4.1.2.7 +[rfc-cutler-httpbis-partitioned-cookies]: https://tools.ietf.org/html/draft-cutler-httpbis-partitioned-cookies/ +[rfc-west-cookie-priority-00-4.1]: https://tools.ietf.org/html/draft-west-cookie-priority-00#section-4.1 +[ci-image]: https://badgen.net/github/checks/expressjs/session/master?label=ci +[ci-url]: https://github.com/expressjs/session/actions?query=workflow%3Aci +[coveralls-image]: https://badgen.net/coveralls/c/github/expressjs/session/master +[coveralls-url]: https://coveralls.io/r/expressjs/session?branch=master +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/express-session +[npm-url]: https://npmjs.org/package/express-session +[npm-version-image]: https://badgen.net/npm/v/express-session diff --git a/node_modules/express-session/index.js b/node_modules/express-session/index.js new file mode 100644 index 0000000..1a50931 --- /dev/null +++ b/node_modules/express-session/index.js @@ -0,0 +1,700 @@ +/*! + * express-session + * Copyright(c) 2010 Sencha Inc. + * Copyright(c) 2011 TJ Holowaychuk + * Copyright(c) 2014-2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + * @private + */ + +var Buffer = require('safe-buffer').Buffer +var cookie = require('cookie'); +var crypto = require('crypto') +var debug = require('debug')('express-session'); +var deprecate = require('depd')('express-session'); +var onHeaders = require('on-headers') +var parseUrl = require('parseurl'); +var signature = require('cookie-signature') +var uid = require('uid-safe').sync + +var Cookie = require('./session/cookie') +var MemoryStore = require('./session/memory') +var Session = require('./session/session') +var Store = require('./session/store') + +// environment + +var env = process.env.NODE_ENV; + +/** + * Expose the middleware. + */ + +exports = module.exports = session; + +/** + * Expose constructors. + */ + +exports.Store = Store; +exports.Cookie = Cookie; +exports.Session = Session; +exports.MemoryStore = MemoryStore; + +/** + * Warning message for `MemoryStore` usage in production. + * @private + */ + +var warning = 'Warning: connect.session() MemoryStore is not\n' + + 'designed for a production environment, as it will leak\n' + + 'memory, and will not scale past a single process.'; + +/** + * Node.js 0.8+ async implementation. + * @private + */ + +/* istanbul ignore next */ +var defer = typeof setImmediate === 'function' + ? setImmediate + : function(fn){ process.nextTick(fn.bind.apply(fn, arguments)) } + +/** + * Setup session store with the given `options`. + * + * @param {Object} [options] + * @param {Object|Function} [options.cookie] Options for cookie + * @param {Function} [options.genid] + * @param {String} [options.name=connect.sid] Session ID cookie name + * @param {Boolean} [options.proxy] + * @param {Boolean} [options.resave] Resave unmodified sessions back to the store + * @param {Boolean} [options.rolling] Enable/disable rolling session expiration + * @param {Boolean} [options.saveUninitialized] Save uninitialized sessions to the store + * @param {String|Array} [options.secret] Secret for signing session ID + * @param {Object} [options.store=MemoryStore] Session store + * @param {String} [options.unset] + * @return {Function} middleware + * @public + */ + +function session(options) { + var opts = options || {} + + // get the cookie options + var cookieOptions = opts.cookie || {} + + // get the session id generate function + var generateId = opts.genid || generateSessionId + + // get the session cookie name + var name = opts.name || opts.key || 'connect.sid' + + // get the session store + var store = opts.store || new MemoryStore() + + // get the trust proxy setting + var trustProxy = opts.proxy + + // get the resave session option + var resaveSession = opts.resave; + + // get the rolling session option + var rollingSessions = Boolean(opts.rolling) + + // get the save uninitialized session option + var saveUninitializedSession = opts.saveUninitialized + + // get the cookie signing secret + var secret = opts.secret + + if (typeof generateId !== 'function') { + throw new TypeError('genid option must be a function'); + } + + if (resaveSession === undefined) { + deprecate('undefined resave option; provide resave option'); + resaveSession = true; + } + + if (saveUninitializedSession === undefined) { + deprecate('undefined saveUninitialized option; provide saveUninitialized option'); + saveUninitializedSession = true; + } + + if (opts.unset && opts.unset !== 'destroy' && opts.unset !== 'keep') { + throw new TypeError('unset option must be "destroy" or "keep"'); + } + + // TODO: switch to "destroy" on next major + var unsetDestroy = opts.unset === 'destroy' + + if (Array.isArray(secret) && secret.length === 0) { + throw new TypeError('secret option array must contain one or more strings'); + } + + if (secret && !Array.isArray(secret)) { + secret = [secret]; + } + + if (!secret) { + deprecate('req.secret; provide secret option'); + } + + // notify user that this store is not + // meant for a production environment + /* istanbul ignore next: not tested */ + if (env === 'production' && store instanceof MemoryStore) { + console.warn(warning); + } + + // generates the new session + store.generate = function(req){ + req.sessionID = generateId(req); + req.session = new Session(req); + req.session.cookie = new Cookie(typeof cookieOptions === 'function' ? cookieOptions(req) : cookieOptions); + + var isSecure = issecure(req, trustProxy); + + if (cookieOptions.secure === 'auto') { + req.session.cookie.secure = isSecure; + } + + if (cookieOptions.sameSite === 'auto') { + req.session.cookie.sameSite = isSecure ? 'none' : 'lax'; + } + }; + + var storeImplementsTouch = typeof store.touch === 'function'; + + // register event listeners for the store to track readiness + var storeReady = true + store.on('disconnect', function ondisconnect() { + storeReady = false + }) + store.on('connect', function onconnect() { + storeReady = true + }) + + return function session(req, res, next) { + // self-awareness + if (req.session) { + next() + return + } + + // Handle connection as if there is no session if + // the store has temporarily disconnected etc + if (!storeReady) { + debug('store is disconnected') + next() + return + } + + // pathname mismatch + var originalPath = parseUrl.original(req).pathname || '/' + var resolvedCookieOptions = typeof cookieOptions === 'function' ? cookieOptions(req) : cookieOptions + if (originalPath.indexOf(resolvedCookieOptions.path || '/') !== 0) { + debug('pathname mismatch') + next() + return + } + + // ensure a secret is available or bail + if (!secret && !req.secret) { + next(new Error('secret option required for sessions')); + return; + } + + // backwards compatibility for signed cookies + // req.secret is passed from the cookie parser middleware + var secrets = secret || [req.secret]; + + var originalHash; + var originalId; + var savedHash; + var touched = false + + // expose store + req.sessionStore = store; + + // get the session ID from the cookie + var cookieId = req.sessionID = getcookie(req, name, secrets); + + // set-cookie + onHeaders(res, function(){ + if (!req.session) { + debug('no session'); + return; + } + + if (!shouldSetCookie(req)) { + return; + } + + // only send secure cookies via https + if (req.session.cookie.secure && !issecure(req, trustProxy)) { + debug('not secured'); + return; + } + + if (!touched) { + // touch session + req.session.touch() + touched = true + } + + // set cookie + try { + setcookie(res, name, req.sessionID, secrets[0], req.session.cookie.data) + } catch (err) { + defer(next, err) + } + }); + + // proxy end() to commit the session + var _end = res.end; + var _write = res.write; + var ended = false; + res.end = function end(chunk, encoding) { + if (ended) { + return false; + } + + ended = true; + + var ret; + var sync = true; + + function writeend() { + if (sync) { + ret = _end.call(res, chunk, encoding); + sync = false; + return; + } + + _end.call(res); + } + + function writetop() { + if (!sync) { + return ret; + } + + if (!res._header) { + res._implicitHeader() + } + + if (chunk == null) { + ret = true; + return ret; + } + + var contentLength = Number(res.getHeader('Content-Length')); + + if (!isNaN(contentLength) && contentLength > 0) { + // measure chunk + chunk = !Buffer.isBuffer(chunk) + ? Buffer.from(chunk, encoding) + : chunk; + encoding = undefined; + + if (chunk.length !== 0) { + debug('split response'); + ret = _write.call(res, chunk.slice(0, chunk.length - 1)); + chunk = chunk.slice(chunk.length - 1, chunk.length); + return ret; + } + } + + ret = _write.call(res, chunk, encoding); + sync = false; + + return ret; + } + + if (shouldDestroy(req)) { + // destroy session + debug('destroying'); + store.destroy(req.sessionID, function ondestroy(err) { + if (err) { + defer(next, err); + } + + debug('destroyed'); + writeend(); + }); + + return writetop(); + } + + // no session to save + if (!req.session) { + debug('no session'); + return _end.call(res, chunk, encoding); + } + + if (!touched) { + // touch session + req.session.touch() + touched = true + } + + if (shouldSave(req)) { + req.session.save(function onsave(err) { + if (err) { + defer(next, err); + } + + writeend(); + }); + + return writetop(); + } else if (storeImplementsTouch && shouldTouch(req)) { + // store implements touch method + debug('touching'); + store.touch(req.sessionID, req.session, function ontouch(err) { + if (err) { + defer(next, err); + } + + debug('touched'); + writeend(); + }); + + return writetop(); + } + + return _end.call(res, chunk, encoding); + }; + + // generate the session + function generate() { + store.generate(req); + originalId = req.sessionID; + originalHash = hash(req.session); + wrapmethods(req.session); + } + + // inflate the session + function inflate (req, sess) { + store.createSession(req, sess) + originalId = req.sessionID + originalHash = hash(sess) + + if (!resaveSession) { + savedHash = originalHash + } + + wrapmethods(req.session) + } + + function rewrapmethods (sess, callback) { + return function () { + if (req.session !== sess) { + wrapmethods(req.session) + } + + callback.apply(this, arguments) + } + } + + // wrap session methods + function wrapmethods(sess) { + var _reload = sess.reload + var _save = sess.save; + + function reload(callback) { + debug('reloading %s', this.id) + _reload.call(this, rewrapmethods(this, callback)) + } + + function save() { + debug('saving %s', this.id); + savedHash = hash(this); + _save.apply(this, arguments); + } + + Object.defineProperty(sess, 'reload', { + configurable: true, + enumerable: false, + value: reload, + writable: true + }) + + Object.defineProperty(sess, 'save', { + configurable: true, + enumerable: false, + value: save, + writable: true + }); + } + + // check if session has been modified + function isModified(sess) { + return originalId !== sess.id || originalHash !== hash(sess); + } + + // check if session has been saved + function isSaved(sess) { + return originalId === sess.id && savedHash === hash(sess); + } + + // determine if session should be destroyed + function shouldDestroy(req) { + return req.sessionID && unsetDestroy && req.session == null; + } + + // determine if session should be saved to store + function shouldSave(req) { + // cannot set cookie without a session ID + if (typeof req.sessionID !== 'string') { + debug('session ignored because of bogus req.sessionID %o', req.sessionID); + return false; + } + + return !saveUninitializedSession && !savedHash && cookieId !== req.sessionID + ? isModified(req.session) + : !isSaved(req.session) + } + + // determine if session should be touched + function shouldTouch(req) { + // cannot set cookie without a session ID + if (typeof req.sessionID !== 'string') { + debug('session ignored because of bogus req.sessionID %o', req.sessionID); + return false; + } + + return cookieId === req.sessionID && !shouldSave(req); + } + + // determine if cookie should be set on response + function shouldSetCookie(req) { + // cannot set cookie without a session ID + if (typeof req.sessionID !== 'string') { + return false; + } + + return cookieId !== req.sessionID + ? saveUninitializedSession || isModified(req.session) + : rollingSessions || req.session.cookie.expires != null && isModified(req.session); + } + + // generate a session if the browser doesn't send a sessionID + if (!req.sessionID) { + debug('no SID sent, generating session'); + generate(); + next(); + return; + } + + // generate the session object + debug('fetching %s', req.sessionID); + store.get(req.sessionID, function(err, sess){ + // error handling + if (err && err.code !== 'ENOENT') { + debug('error %j', err); + next(err) + return + } + + try { + if (err || !sess) { + debug('no session found') + generate() + } else { + debug('session found') + inflate(req, sess) + } + } catch (e) { + next(e) + return + } + + next() + }); + }; +}; + +/** + * Generate a session ID for a new session. + * + * @return {String} + * @private + */ + +function generateSessionId() { + return uid(24); +} + +/** + * Get the session ID cookie from request. + * + * @return {string} + * @private + */ + +function getcookie(req, name, secrets) { + var header = req.headers.cookie; + var raw; + var val; + + // read from cookie header + if (header) { + var cookies = cookie.parse(header); + + raw = cookies[name]; + + if (raw) { + if (raw.substr(0, 2) === 's:') { + val = unsigncookie(raw.slice(2), secrets); + + if (val === false) { + debug('cookie signature invalid'); + val = undefined; + } + } else { + debug('cookie unsigned') + } + } + } + + // back-compat read from cookieParser() signedCookies data + if (!val && req.signedCookies) { + val = req.signedCookies[name]; + + if (val) { + deprecate('cookie should be available in req.headers.cookie'); + } + } + + // back-compat read from cookieParser() cookies data + if (!val && req.cookies) { + raw = req.cookies[name]; + + if (raw) { + if (raw.substr(0, 2) === 's:') { + val = unsigncookie(raw.slice(2), secrets); + + if (val) { + deprecate('cookie should be available in req.headers.cookie'); + } + + if (val === false) { + debug('cookie signature invalid'); + val = undefined; + } + } else { + debug('cookie unsigned') + } + } + } + + return val; +} + +/** + * Hash the given `sess` object omitting changes to `.cookie`. + * + * @param {Object} sess + * @return {String} + * @private + */ + +function hash(sess) { + // serialize + var str = JSON.stringify(sess, function (key, val) { + // ignore sess.cookie property + if (this === sess && key === 'cookie') { + return + } + + return val + }) + + // hash + return crypto + .createHash('sha1') + .update(str, 'utf8') + .digest('hex') +} + +/** + * Determine if request is secure. + * + * @param {Object} req + * @param {Boolean} [trustProxy] + * @return {Boolean} + * @private + */ + +function issecure(req, trustProxy) { + // socket is https server + if (req.connection && req.connection.encrypted) { + return true; + } + + // do not trust proxy + if (trustProxy === false) { + return false; + } + + // no explicit trust; try req.secure from express + if (trustProxy !== true) { + return req.secure === true + } + + // read the proto from x-forwarded-proto header + var header = req.headers['x-forwarded-proto'] || ''; + var index = header.indexOf(','); + var proto = index !== -1 + ? header.substr(0, index).toLowerCase().trim() + : header.toLowerCase().trim() + + return proto === 'https'; +} + +/** + * Set cookie on response. + * + * @private + */ + +function setcookie(res, name, val, secret, options) { + var signed = 's:' + signature.sign(val, secret); + var data = cookie.serialize(name, signed, options); + + debug('set-cookie %s', data); + + var prev = res.getHeader('Set-Cookie') || [] + var header = Array.isArray(prev) ? prev.concat(data) : [prev, data]; + + res.setHeader('Set-Cookie', header) +} + +/** + * Verify and decode the given `val` with `secrets`. + * + * @param {String} val + * @param {Array} secrets + * @returns {String|Boolean} + * @private + */ +function unsigncookie(val, secrets) { + for (var i = 0; i < secrets.length; i++) { + var result = signature.unsign(val, secrets[i]); + + if (result !== false) { + return result; + } + } + + return false; +} diff --git a/node_modules/express-session/package.json b/node_modules/express-session/package.json new file mode 100644 index 0000000..3638779 --- /dev/null +++ b/node_modules/express-session/package.json @@ -0,0 +1,50 @@ +{ + "name": "express-session", + "version": "1.19.0", + "description": "Simple session middleware for Express", + "author": "TJ Holowaychuk (http://tjholowaychuk.com)", + "contributors": [ + "Douglas Christopher Wilson ", + "Joe Wagner " + ], + "repository": "expressjs/session", + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + }, + "license": "MIT", + "dependencies": { + "cookie": "~0.7.2", + "cookie-signature": "~1.0.7", + "debug": "~2.6.9", + "depd": "~2.0.0", + "on-headers": "~1.1.0", + "parseurl": "~1.3.3", + "safe-buffer": "~5.2.1", + "uid-safe": "~2.1.5" + }, + "devDependencies": { + "after": "0.8.2", + "cookie-parser": "1.4.6", + "eslint": "8.56.0", + "eslint-plugin-markdown": "3.0.1", + "express": "4.17.3", + "mocha": "10.8.2", + "nyc": "15.1.0", + "supertest": "6.3.4" + }, + "files": [ + "session/", + "index.js" + ], + "engines": { + "node": ">= 0.8.0" + }, + "scripts": { + "lint": "eslint . && node ./scripts/lint-readme.js", + "test": "./test/support/gencert.sh && mocha --require test/support/env --check-leaks --no-exit --reporter spec test/", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc npm test", + "version": "node scripts/version-history.js && git add HISTORY.md" + } +} diff --git a/node_modules/express-session/session/cookie.js b/node_modules/express-session/session/cookie.js new file mode 100644 index 0000000..8bb5907 --- /dev/null +++ b/node_modules/express-session/session/cookie.js @@ -0,0 +1,152 @@ +/*! + * Connect - session - Cookie + * Copyright(c) 2010 Sencha Inc. + * Copyright(c) 2011 TJ Holowaychuk + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + */ + +var cookie = require('cookie') +var deprecate = require('depd')('express-session') + +/** + * Initialize a new `Cookie` with the given `options`. + * + * @param {IncomingMessage} req + * @param {Object} options + * @api private + */ + +var Cookie = module.exports = function Cookie(options) { + this.path = '/'; + this.maxAge = null; + this.httpOnly = true; + + if (options) { + if (typeof options !== 'object') { + throw new TypeError('argument options must be a object') + } + + for (var key in options) { + if (key !== 'data') { + this[key] = options[key] + } + } + } + + if (this.originalMaxAge === undefined || this.originalMaxAge === null) { + this.originalMaxAge = this.maxAge + } +}; + +/*! + * Prototype. + */ + +Cookie.prototype = { + + /** + * Set expires `date`. + * + * @param {Date} date + * @api public + */ + + set expires(date) { + this._expires = date; + this.originalMaxAge = this.maxAge; + }, + + /** + * Get expires `date`. + * + * @return {Date} + * @api public + */ + + get expires() { + return this._expires; + }, + + /** + * Set expires via max-age in `ms`. + * + * @param {Number} ms + * @api public + */ + + set maxAge(ms) { + if (ms && typeof ms !== 'number' && !(ms instanceof Date)) { + throw new TypeError('maxAge must be a number or Date') + } + + if (ms instanceof Date) { + deprecate('maxAge as Date; pass number of milliseconds instead') + } + + this.expires = typeof ms === 'number' + ? new Date(Date.now() + ms) + : ms; + }, + + /** + * Get expires max-age in `ms`. + * + * @return {Number} + * @api public + */ + + get maxAge() { + return this.expires instanceof Date + ? this.expires.valueOf() - Date.now() + : this.expires; + }, + + /** + * Return cookie data object. + * + * @return {Object} + * @api private + */ + + get data() { + return { + originalMaxAge: this.originalMaxAge, + partitioned: this.partitioned, + priority: this.priority + , expires: this._expires + , secure: this.secure + , httpOnly: this.httpOnly + , domain: this.domain + , path: this.path + , sameSite: this.sameSite + } + }, + + /** + * Return a serialized cookie string. + * + * @return {String} + * @api public + */ + + serialize: function(name, val){ + return cookie.serialize(name, val, this.data); + }, + + /** + * Return JSON representation of this cookie. + * + * @return {Object} + * @api private + */ + + toJSON: function(){ + return this.data; + } +}; diff --git a/node_modules/express-session/session/memory.js b/node_modules/express-session/session/memory.js new file mode 100644 index 0000000..11ed686 --- /dev/null +++ b/node_modules/express-session/session/memory.js @@ -0,0 +1,187 @@ +/*! + * express-session + * Copyright(c) 2010 Sencha Inc. + * Copyright(c) 2011 TJ Holowaychuk + * Copyright(c) 2015 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + * @private + */ + +var Store = require('./store') +var util = require('util') + +/** + * Shim setImmediate for node.js < 0.10 + * @private + */ + +/* istanbul ignore next */ +var defer = typeof setImmediate === 'function' + ? setImmediate + : function(fn){ process.nextTick(fn.bind.apply(fn, arguments)) } + +/** + * Module exports. + */ + +module.exports = MemoryStore + +/** + * A session store in memory. + * @public + */ + +function MemoryStore() { + Store.call(this) + this.sessions = Object.create(null) +} + +/** + * Inherit from Store. + */ + +util.inherits(MemoryStore, Store) + +/** + * Get all active sessions. + * + * @param {function} callback + * @public + */ + +MemoryStore.prototype.all = function all(callback) { + var sessionIds = Object.keys(this.sessions) + var sessions = Object.create(null) + + for (var i = 0; i < sessionIds.length; i++) { + var sessionId = sessionIds[i] + var session = getSession.call(this, sessionId) + + if (session) { + sessions[sessionId] = session; + } + } + + callback && defer(callback, null, sessions) +} + +/** + * Clear all sessions. + * + * @param {function} callback + * @public + */ + +MemoryStore.prototype.clear = function clear(callback) { + this.sessions = Object.create(null) + callback && defer(callback) +} + +/** + * Destroy the session associated with the given session ID. + * + * @param {string} sessionId + * @public + */ + +MemoryStore.prototype.destroy = function destroy(sessionId, callback) { + delete this.sessions[sessionId] + callback && defer(callback) +} + +/** + * Fetch session by the given session ID. + * + * @param {string} sessionId + * @param {function} callback + * @public + */ + +MemoryStore.prototype.get = function get(sessionId, callback) { + defer(callback, null, getSession.call(this, sessionId)) +} + +/** + * Commit the given session associated with the given sessionId to the store. + * + * @param {string} sessionId + * @param {object} session + * @param {function} callback + * @public + */ + +MemoryStore.prototype.set = function set(sessionId, session, callback) { + this.sessions[sessionId] = JSON.stringify(session) + callback && defer(callback) +} + +/** + * Get number of active sessions. + * + * @param {function} callback + * @public + */ + +MemoryStore.prototype.length = function length(callback) { + this.all(function (err, sessions) { + if (err) return callback(err) + callback(null, Object.keys(sessions).length) + }) +} + +/** + * Touch the given session object associated with the given session ID. + * + * @param {string} sessionId + * @param {object} session + * @param {function} callback + * @public + */ + +MemoryStore.prototype.touch = function touch(sessionId, session, callback) { + var currentSession = getSession.call(this, sessionId) + + if (currentSession) { + // update expiration + currentSession.cookie = session.cookie + this.sessions[sessionId] = JSON.stringify(currentSession) + } + + callback && defer(callback) +} + +/** + * Get session from the store. + * @private + */ + +function getSession(sessionId) { + var sess = this.sessions[sessionId] + + if (!sess) { + return + } + + // parse + sess = JSON.parse(sess) + + if (sess.cookie) { + var expires = typeof sess.cookie.expires === 'string' + ? new Date(sess.cookie.expires) + : sess.cookie.expires + + // destroy expired session + if (expires && expires <= Date.now()) { + delete this.sessions[sessionId] + return + } + } + + return sess +} diff --git a/node_modules/express-session/session/session.js b/node_modules/express-session/session/session.js new file mode 100644 index 0000000..fee7608 --- /dev/null +++ b/node_modules/express-session/session/session.js @@ -0,0 +1,143 @@ +/*! + * Connect - session - Session + * Copyright(c) 2010 Sencha Inc. + * Copyright(c) 2011 TJ Holowaychuk + * MIT Licensed + */ + +'use strict'; + +/** + * Expose Session. + */ + +module.exports = Session; + +/** + * Create a new `Session` with the given request and `data`. + * + * @param {IncomingRequest} req + * @param {Object} data + * @api private + */ + +function Session(req, data) { + Object.defineProperty(this, 'req', { value: req }); + Object.defineProperty(this, 'id', { value: req.sessionID }); + + if (typeof data === 'object' && data !== null) { + // merge data into this, ignoring prototype properties + for (var prop in data) { + if (!(prop in this)) { + this[prop] = data[prop] + } + } + } +} + +/** + * Update reset `.cookie.maxAge` to prevent + * the cookie from expiring when the + * session is still active. + * + * @return {Session} for chaining + * @api public + */ + +defineMethod(Session.prototype, 'touch', function touch() { + return this.resetMaxAge(); +}); + +/** + * Reset `.maxAge` to `.originalMaxAge`. + * + * @return {Session} for chaining + * @api public + */ + +defineMethod(Session.prototype, 'resetMaxAge', function resetMaxAge() { + this.cookie.maxAge = this.cookie.originalMaxAge; + return this; +}); + +/** + * Save the session data with optional callback `fn(err)`. + * + * @param {Function} fn + * @return {Session} for chaining + * @api public + */ + +defineMethod(Session.prototype, 'save', function save(fn) { + this.req.sessionStore.set(this.id, this, fn || function(){}); + return this; +}); + +/** + * Re-loads the session data _without_ altering + * the maxAge properties. Invokes the callback `fn(err)`, + * after which time if no exception has occurred the + * `req.session` property will be a new `Session` object, + * although representing the same session. + * + * @param {Function} fn + * @return {Session} for chaining + * @api public + */ + +defineMethod(Session.prototype, 'reload', function reload(fn) { + var req = this.req + var store = this.req.sessionStore + + store.get(this.id, function(err, sess){ + if (err) return fn(err); + if (!sess) return fn(new Error('failed to load session')); + store.createSession(req, sess); + fn(); + }); + return this; +}); + +/** + * Destroy `this` session. + * + * @param {Function} fn + * @return {Session} for chaining + * @api public + */ + +defineMethod(Session.prototype, 'destroy', function destroy(fn) { + delete this.req.session; + this.req.sessionStore.destroy(this.id, fn); + return this; +}); + +/** + * Regenerate this request's session. + * + * @param {Function} fn + * @return {Session} for chaining + * @api public + */ + +defineMethod(Session.prototype, 'regenerate', function regenerate(fn) { + this.req.sessionStore.regenerate(this.req, fn); + return this; +}); + +/** + * Helper function for creating a method on a prototype. + * + * @param {Object} obj + * @param {String} name + * @param {Function} fn + * @private + */ +function defineMethod(obj, name, fn) { + Object.defineProperty(obj, name, { + configurable: true, + enumerable: false, + value: fn, + writable: true + }); +}; diff --git a/node_modules/express-session/session/store.js b/node_modules/express-session/session/store.js new file mode 100644 index 0000000..3793877 --- /dev/null +++ b/node_modules/express-session/session/store.js @@ -0,0 +1,102 @@ +/*! + * Connect - session - Store + * Copyright(c) 2010 Sencha Inc. + * Copyright(c) 2011 TJ Holowaychuk + * MIT Licensed + */ + +'use strict'; + +/** + * Module dependencies. + * @private + */ + +var Cookie = require('./cookie') +var EventEmitter = require('events').EventEmitter +var Session = require('./session') +var util = require('util') + +/** + * Module exports. + * @public + */ + +module.exports = Store + +/** + * Abstract base class for session stores. + * @public + */ + +function Store () { + EventEmitter.call(this) +} + +/** + * Inherit from EventEmitter. + */ + +util.inherits(Store, EventEmitter) + +/** + * Re-generate the given requests's session. + * + * @param {IncomingRequest} req + * @return {Function} fn + * @api public + */ + +Store.prototype.regenerate = function(req, fn){ + var self = this; + this.destroy(req.sessionID, function(err){ + self.generate(req); + fn(err); + }); +}; + +/** + * Load a `Session` instance via the given `sid` + * and invoke the callback `fn(err, sess)`. + * + * @param {String} sid + * @param {Function} fn + * @api public + */ + +Store.prototype.load = function(sid, fn){ + var self = this; + this.get(sid, function(err, sess){ + if (err) return fn(err); + if (!sess) return fn(); + var req = { sessionID: sid, sessionStore: self }; + fn(null, self.createSession(req, sess)) + }); +}; + +/** + * Create session from JSON `sess` data. + * + * @param {IncomingRequest} req + * @param {Object} sess + * @return {Session} + * @api private + */ + +Store.prototype.createSession = function(req, sess){ + var expires = sess.cookie.expires + var originalMaxAge = sess.cookie.originalMaxAge + + sess.cookie = new Cookie(sess.cookie); + + if (typeof expires === 'string') { + // convert expires to a Date object + sess.cookie.expires = new Date(expires) + } + + // keep originalMaxAge intact + sess.cookie.originalMaxAge = originalMaxAge + + req.session = new Session(req, sess); + return req.session; +}; diff --git a/node_modules/fs-minipass/LICENSE b/node_modules/fs-minipass/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/fs-minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fs-minipass/README.md b/node_modules/fs-minipass/README.md new file mode 100644 index 0000000..1e61241 --- /dev/null +++ b/node_modules/fs-minipass/README.md @@ -0,0 +1,70 @@ +# fs-minipass + +Filesystem streams based on [minipass](http://npm.im/minipass). + +4 classes are exported: + +- ReadStream +- ReadStreamSync +- WriteStream +- WriteStreamSync + +When using `ReadStreamSync`, all of the data is made available +immediately upon consuming the stream. Nothing is buffered in memory +when the stream is constructed. If the stream is piped to a writer, +then it will synchronously `read()` and emit data into the writer as +fast as the writer can consume it. (That is, it will respect +backpressure.) If you call `stream.read()` then it will read the +entire file and return the contents. + +When using `WriteStreamSync`, every write is flushed to the file +synchronously. If your writes all come in a single tick, then it'll +write it all out in a single tick. It's as synchronous as you are. + +The async versions work much like their node builtin counterparts, +with the exception of introducing significantly less Stream machinery +overhead. + +## USAGE + +It's just streams, you pipe them or read() them or write() to them. + +```js +const fsm = require('fs-minipass') +const readStream = new fsm.ReadStream('file.txt') +const writeStream = new fsm.WriteStream('output.txt') +writeStream.write('some file header or whatever\n') +readStream.pipe(writeStream) +``` + +## ReadStream(path, options) + +Path string is required, but somewhat irrelevant if an open file +descriptor is passed in as an option. + +Options: + +- `fd` Pass in a numeric file descriptor, if the file is already open. +- `readSize` The size of reads to do, defaults to 16MB +- `size` The size of the file, if known. Prevents zero-byte read() + call at the end. +- `autoClose` Set to `false` to prevent the file descriptor from being + closed when the file is done being read. + +## WriteStream(path, options) + +Path string is required, but somewhat irrelevant if an open file +descriptor is passed in as an option. + +Options: + +- `fd` Pass in a numeric file descriptor, if the file is already open. +- `mode` The mode to create the file with. Defaults to `0o666`. +- `start` The position in the file to start reading. If not + specified, then the file will start writing at position zero, and be + truncated by default. +- `autoClose` Set to `false` to prevent the file descriptor from being + closed when the stream is ended. +- `flags` Flags to use when opening the file. Irrelevant if `fd` is + passed in, since file won't be opened in that case. Defaults to + `'a'` if a `pos` is specified, or `'w'` otherwise. diff --git a/node_modules/fs-minipass/index.js b/node_modules/fs-minipass/index.js new file mode 100644 index 0000000..9b0779c --- /dev/null +++ b/node_modules/fs-minipass/index.js @@ -0,0 +1,422 @@ +'use strict' +const MiniPass = require('minipass') +const EE = require('events').EventEmitter +const fs = require('fs') + +let writev = fs.writev +/* istanbul ignore next */ +if (!writev) { + // This entire block can be removed if support for earlier than Node.js + // 12.9.0 is not needed. + const binding = process.binding('fs') + const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback + + writev = (fd, iovec, pos, cb) => { + const done = (er, bw) => cb(er, bw, iovec) + const req = new FSReqWrap() + req.oncomplete = done + binding.writeBuffers(fd, iovec, pos, req) + } +} + +const _autoClose = Symbol('_autoClose') +const _close = Symbol('_close') +const _ended = Symbol('_ended') +const _fd = Symbol('_fd') +const _finished = Symbol('_finished') +const _flags = Symbol('_flags') +const _flush = Symbol('_flush') +const _handleChunk = Symbol('_handleChunk') +const _makeBuf = Symbol('_makeBuf') +const _mode = Symbol('_mode') +const _needDrain = Symbol('_needDrain') +const _onerror = Symbol('_onerror') +const _onopen = Symbol('_onopen') +const _onread = Symbol('_onread') +const _onwrite = Symbol('_onwrite') +const _open = Symbol('_open') +const _path = Symbol('_path') +const _pos = Symbol('_pos') +const _queue = Symbol('_queue') +const _read = Symbol('_read') +const _readSize = Symbol('_readSize') +const _reading = Symbol('_reading') +const _remain = Symbol('_remain') +const _size = Symbol('_size') +const _write = Symbol('_write') +const _writing = Symbol('_writing') +const _defaultFlag = Symbol('_defaultFlag') +const _errored = Symbol('_errored') + +class ReadStream extends MiniPass { + constructor (path, opt) { + opt = opt || {} + super(opt) + + this.readable = true + this.writable = false + + if (typeof path !== 'string') + throw new TypeError('path must be a string') + + this[_errored] = false + this[_fd] = typeof opt.fd === 'number' ? opt.fd : null + this[_path] = path + this[_readSize] = opt.readSize || 16*1024*1024 + this[_reading] = false + this[_size] = typeof opt.size === 'number' ? opt.size : Infinity + this[_remain] = this[_size] + this[_autoClose] = typeof opt.autoClose === 'boolean' ? + opt.autoClose : true + + if (typeof this[_fd] === 'number') + this[_read]() + else + this[_open]() + } + + get fd () { return this[_fd] } + get path () { return this[_path] } + + write () { + throw new TypeError('this is a readable stream') + } + + end () { + throw new TypeError('this is a readable stream') + } + + [_open] () { + fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)) + } + + [_onopen] (er, fd) { + if (er) + this[_onerror](er) + else { + this[_fd] = fd + this.emit('open', fd) + this[_read]() + } + } + + [_makeBuf] () { + return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])) + } + + [_read] () { + if (!this[_reading]) { + this[_reading] = true + const buf = this[_makeBuf]() + /* istanbul ignore if */ + if (buf.length === 0) + return process.nextTick(() => this[_onread](null, 0, buf)) + fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) => + this[_onread](er, br, buf)) + } + } + + [_onread] (er, br, buf) { + this[_reading] = false + if (er) + this[_onerror](er) + else if (this[_handleChunk](br, buf)) + this[_read]() + } + + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) + } + } + + [_onerror] (er) { + this[_reading] = true + this[_close]() + this.emit('error', er) + } + + [_handleChunk] (br, buf) { + let ret = false + // no effect if infinite + this[_remain] -= br + if (br > 0) + ret = super.write(br < buf.length ? buf.slice(0, br) : buf) + + if (br === 0 || this[_remain] <= 0) { + ret = false + this[_close]() + super.end() + } + + return ret + } + + emit (ev, data) { + switch (ev) { + case 'prefinish': + case 'finish': + break + + case 'drain': + if (typeof this[_fd] === 'number') + this[_read]() + break + + case 'error': + if (this[_errored]) + return + this[_errored] = true + return super.emit(ev, data) + + default: + return super.emit(ev, data) + } + } +} + +class ReadStreamSync extends ReadStream { + [_open] () { + let threw = true + try { + this[_onopen](null, fs.openSync(this[_path], 'r')) + threw = false + } finally { + if (threw) + this[_close]() + } + } + + [_read] () { + let threw = true + try { + if (!this[_reading]) { + this[_reading] = true + do { + const buf = this[_makeBuf]() + /* istanbul ignore next */ + const br = buf.length === 0 ? 0 + : fs.readSync(this[_fd], buf, 0, buf.length, null) + if (!this[_handleChunk](br, buf)) + break + } while (true) + this[_reading] = false + } + threw = false + } finally { + if (threw) + this[_close]() + } + } + + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.closeSync(fd) + this.emit('close') + } + } +} + +class WriteStream extends EE { + constructor (path, opt) { + opt = opt || {} + super(opt) + this.readable = false + this.writable = true + this[_errored] = false + this[_writing] = false + this[_ended] = false + this[_needDrain] = false + this[_queue] = [] + this[_path] = path + this[_fd] = typeof opt.fd === 'number' ? opt.fd : null + this[_mode] = opt.mode === undefined ? 0o666 : opt.mode + this[_pos] = typeof opt.start === 'number' ? opt.start : null + this[_autoClose] = typeof opt.autoClose === 'boolean' ? + opt.autoClose : true + + // truncating makes no sense when writing into the middle + const defaultFlag = this[_pos] !== null ? 'r+' : 'w' + this[_defaultFlag] = opt.flags === undefined + this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags + + if (this[_fd] === null) + this[_open]() + } + + emit (ev, data) { + if (ev === 'error') { + if (this[_errored]) + return + this[_errored] = true + } + return super.emit(ev, data) + } + + + get fd () { return this[_fd] } + get path () { return this[_path] } + + [_onerror] (er) { + this[_close]() + this[_writing] = true + this.emit('error', er) + } + + [_open] () { + fs.open(this[_path], this[_flags], this[_mode], + (er, fd) => this[_onopen](er, fd)) + } + + [_onopen] (er, fd) { + if (this[_defaultFlag] && + this[_flags] === 'r+' && + er && er.code === 'ENOENT') { + this[_flags] = 'w' + this[_open]() + } else if (er) + this[_onerror](er) + else { + this[_fd] = fd + this.emit('open', fd) + this[_flush]() + } + } + + end (buf, enc) { + if (buf) + this.write(buf, enc) + + this[_ended] = true + + // synthetic after-write logic, where drain/finish live + if (!this[_writing] && !this[_queue].length && + typeof this[_fd] === 'number') + this[_onwrite](null, 0) + return this + } + + write (buf, enc) { + if (typeof buf === 'string') + buf = Buffer.from(buf, enc) + + if (this[_ended]) { + this.emit('error', new Error('write() after end()')) + return false + } + + if (this[_fd] === null || this[_writing] || this[_queue].length) { + this[_queue].push(buf) + this[_needDrain] = true + return false + } + + this[_writing] = true + this[_write](buf) + return true + } + + [_write] (buf) { + fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => + this[_onwrite](er, bw)) + } + + [_onwrite] (er, bw) { + if (er) + this[_onerror](er) + else { + if (this[_pos] !== null) + this[_pos] += bw + if (this[_queue].length) + this[_flush]() + else { + this[_writing] = false + + if (this[_ended] && !this[_finished]) { + this[_finished] = true + this[_close]() + this.emit('finish') + } else if (this[_needDrain]) { + this[_needDrain] = false + this.emit('drain') + } + } + } + } + + [_flush] () { + if (this[_queue].length === 0) { + if (this[_ended]) + this[_onwrite](null, 0) + } else if (this[_queue].length === 1) + this[_write](this[_queue].pop()) + else { + const iovec = this[_queue] + this[_queue] = [] + writev(this[_fd], iovec, this[_pos], + (er, bw) => this[_onwrite](er, bw)) + } + } + + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) + } + } +} + +class WriteStreamSync extends WriteStream { + [_open] () { + let fd + // only wrap in a try{} block if we know we'll retry, to avoid + // the rethrow obscuring the error's source frame in most cases. + if (this[_defaultFlag] && this[_flags] === 'r+') { + try { + fd = fs.openSync(this[_path], this[_flags], this[_mode]) + } catch (er) { + if (er.code === 'ENOENT') { + this[_flags] = 'w' + return this[_open]() + } else + throw er + } + } else + fd = fs.openSync(this[_path], this[_flags], this[_mode]) + + this[_onopen](null, fd) + } + + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.closeSync(fd) + this.emit('close') + } + } + + [_write] (buf) { + // throw the original, but try to close if it fails + let threw = true + try { + this[_onwrite](null, + fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos])) + threw = false + } finally { + if (threw) + try { this[_close]() } catch (_) {} + } + } +} + +exports.ReadStream = ReadStream +exports.ReadStreamSync = ReadStreamSync + +exports.WriteStream = WriteStream +exports.WriteStreamSync = WriteStreamSync diff --git a/node_modules/fs-minipass/node_modules/minipass/LICENSE b/node_modules/fs-minipass/node_modules/minipass/LICENSE new file mode 100644 index 0000000..bf1dece --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/fs-minipass/node_modules/minipass/README.md b/node_modules/fs-minipass/node_modules/minipass/README.md new file mode 100644 index 0000000..2cde46c --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/README.md @@ -0,0 +1,728 @@ +# minipass + +A _very_ minimal implementation of a [PassThrough +stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough) + +[It's very +fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0) +for objects, strings, and buffers. + +Supports `pipe()`ing (including multi-`pipe()` and backpressure transmission), +buffering data until either a `data` event handler or `pipe()` is added (so +you don't lose the first chunk), and most other cases where PassThrough is +a good idea. + +There is a `read()` method, but it's much more efficient to consume data +from this stream via `'data'` events or by calling `pipe()` into some other +stream. Calling `read()` requires the buffer to be flattened in some +cases, which requires copying memory. + +If you set `objectMode: true` in the options, then whatever is written will +be emitted. Otherwise, it'll do a minimal amount of Buffer copying to +ensure proper Streams semantics when `read(n)` is called. + +`objectMode` can also be set by doing `stream.objectMode = true`, or by +writing any non-string/non-buffer data. `objectMode` cannot be set to +false once it is set. + +This is not a `through` or `through2` stream. It doesn't transform the +data, it just passes it right through. If you want to transform the data, +extend the class, and override the `write()` method. Once you're done +transforming the data however you want, call `super.write()` with the +transform output. + +For some examples of streams that extend Minipass in various ways, check +out: + +- [minizlib](http://npm.im/minizlib) +- [fs-minipass](http://npm.im/fs-minipass) +- [tar](http://npm.im/tar) +- [minipass-collect](http://npm.im/minipass-collect) +- [minipass-flush](http://npm.im/minipass-flush) +- [minipass-pipeline](http://npm.im/minipass-pipeline) +- [tap](http://npm.im/tap) +- [tap-parser](http://npm.im/tap-parser) +- [treport](http://npm.im/treport) +- [minipass-fetch](http://npm.im/minipass-fetch) +- [pacote](http://npm.im/pacote) +- [make-fetch-happen](http://npm.im/make-fetch-happen) +- [cacache](http://npm.im/cacache) +- [ssri](http://npm.im/ssri) +- [npm-registry-fetch](http://npm.im/npm-registry-fetch) +- [minipass-json-stream](http://npm.im/minipass-json-stream) +- [minipass-sized](http://npm.im/minipass-sized) + +## Differences from Node.js Streams + +There are several things that make Minipass streams different from (and in +some ways superior to) Node.js core streams. + +Please read these caveats if you are familiar with node-core streams and +intend to use Minipass streams in your programs. + +You can avoid most of these differences entirely (for a very +small performance penalty) by setting `{async: true}` in the +constructor options. + +### Timing + +Minipass streams are designed to support synchronous use-cases. Thus, data +is emitted as soon as it is available, always. It is buffered until read, +but no longer. Another way to look at it is that Minipass streams are +exactly as synchronous as the logic that writes into them. + +This can be surprising if your code relies on `PassThrough.write()` always +providing data on the next tick rather than the current one, or being able +to call `resume()` and not have the entire buffer disappear immediately. + +However, without this synchronicity guarantee, there would be no way for +Minipass to achieve the speeds it does, or support the synchronous use +cases that it does. Simply put, waiting takes time. + +This non-deferring approach makes Minipass streams much easier to reason +about, especially in the context of Promises and other flow-control +mechanisms. + +Example: + +```js +const Minipass = require('minipass') +const stream = new Minipass({ async: true }) +stream.on('data', () => console.log('data event')) +console.log('before write') +stream.write('hello') +console.log('after write') +// output: +// before write +// data event +// after write +``` + +### Exception: Async Opt-In + +If you wish to have a Minipass stream with behavior that more +closely mimics Node.js core streams, you can set the stream in +async mode either by setting `async: true` in the constructor +options, or by setting `stream.async = true` later on. + +```js +const Minipass = require('minipass') +const asyncStream = new Minipass({ async: true }) +asyncStream.on('data', () => console.log('data event')) +console.log('before write') +asyncStream.write('hello') +console.log('after write') +// output: +// before write +// after write +// data event <-- this is deferred until the next tick +``` + +Switching _out_ of async mode is unsafe, as it could cause data +corruption, and so is not enabled. Example: + +```js +const Minipass = require('minipass') +const stream = new Minipass({ encoding: 'utf8' }) +stream.on('data', chunk => console.log(chunk)) +stream.async = true +console.log('before writes') +stream.write('hello') +setStreamSyncAgainSomehow(stream) // <-- this doesn't actually exist! +stream.write('world') +console.log('after writes') +// hypothetical output would be: +// before writes +// world +// after writes +// hello +// NOT GOOD! +``` + +To avoid this problem, once set into async mode, any attempt to +make the stream sync again will be ignored. + +```js +const Minipass = require('minipass') +const stream = new Minipass({ encoding: 'utf8' }) +stream.on('data', chunk => console.log(chunk)) +stream.async = true +console.log('before writes') +stream.write('hello') +stream.async = false // <-- no-op, stream already async +stream.write('world') +console.log('after writes') +// actual output: +// before writes +// after writes +// hello +// world +``` + +### No High/Low Water Marks + +Node.js core streams will optimistically fill up a buffer, returning `true` +on all writes until the limit is hit, even if the data has nowhere to go. +Then, they will not attempt to draw more data in until the buffer size dips +below a minimum value. + +Minipass streams are much simpler. The `write()` method will return `true` +if the data has somewhere to go (which is to say, given the timing +guarantees, that the data is already there by the time `write()` returns). + +If the data has nowhere to go, then `write()` returns false, and the data +sits in a buffer, to be drained out immediately as soon as anyone consumes +it. + +Since nothing is ever buffered unnecessarily, there is much less +copying data, and less bookkeeping about buffer capacity levels. + +### Hazards of Buffering (or: Why Minipass Is So Fast) + +Since data written to a Minipass stream is immediately written all the way +through the pipeline, and `write()` always returns true/false based on +whether the data was fully flushed, backpressure is communicated +immediately to the upstream caller. This minimizes buffering. + +Consider this case: + +```js +const {PassThrough} = require('stream') +const p1 = new PassThrough({ highWaterMark: 1024 }) +const p2 = new PassThrough({ highWaterMark: 1024 }) +const p3 = new PassThrough({ highWaterMark: 1024 }) +const p4 = new PassThrough({ highWaterMark: 1024 }) + +p1.pipe(p2).pipe(p3).pipe(p4) +p4.on('data', () => console.log('made it through')) + +// this returns false and buffers, then writes to p2 on next tick (1) +// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2) +// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3) +// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain' +// on next tick (4) +// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and +// 'drain' on next tick (5) +// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6) +// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next +// tick (7) + +p1.write(Buffer.alloc(2048)) // returns false +``` + +Along the way, the data was buffered and deferred at each stage, and +multiple event deferrals happened, for an unblocked pipeline where it was +perfectly safe to write all the way through! + +Furthermore, setting a `highWaterMark` of `1024` might lead someone reading +the code to think an advisory maximum of 1KiB is being set for the +pipeline. However, the actual advisory buffering level is the _sum_ of +`highWaterMark` values, since each one has its own bucket. + +Consider the Minipass case: + +```js +const m1 = new Minipass() +const m2 = new Minipass() +const m3 = new Minipass() +const m4 = new Minipass() + +m1.pipe(m2).pipe(m3).pipe(m4) +m4.on('data', () => console.log('made it through')) + +// m1 is flowing, so it writes the data to m2 immediately +// m2 is flowing, so it writes the data to m3 immediately +// m3 is flowing, so it writes the data to m4 immediately +// m4 is flowing, so it fires the 'data' event immediately, returns true +// m4's write returned true, so m3 is still flowing, returns true +// m3's write returned true, so m2 is still flowing, returns true +// m2's write returned true, so m1 is still flowing, returns true +// No event deferrals or buffering along the way! + +m1.write(Buffer.alloc(2048)) // returns true +``` + +It is extremely unlikely that you _don't_ want to buffer any data written, +or _ever_ buffer data that can be flushed all the way through. Neither +node-core streams nor Minipass ever fail to buffer written data, but +node-core streams do a lot of unnecessary buffering and pausing. + +As always, the faster implementation is the one that does less stuff and +waits less time to do it. + +### Immediately emit `end` for empty streams (when not paused) + +If a stream is not paused, and `end()` is called before writing any data +into it, then it will emit `end` immediately. + +If you have logic that occurs on the `end` event which you don't want to +potentially happen immediately (for example, closing file descriptors, +moving on to the next entry in an archive parse stream, etc.) then be sure +to call `stream.pause()` on creation, and then `stream.resume()` once you +are ready to respond to the `end` event. + +However, this is _usually_ not a problem because: + +### Emit `end` When Asked + +One hazard of immediately emitting `'end'` is that you may not yet have had +a chance to add a listener. In order to avoid this hazard, Minipass +streams safely re-emit the `'end'` event if a new listener is added after +`'end'` has been emitted. + +Ie, if you do `stream.on('end', someFunction)`, and the stream has already +emitted `end`, then it will call the handler right away. (You can think of +this somewhat like attaching a new `.then(fn)` to a previously-resolved +Promise.) + +To prevent calling handlers multiple times who would not expect multiple +ends to occur, all listeners are removed from the `'end'` event whenever it +is emitted. + +### Emit `error` When Asked + +The most recent error object passed to the `'error'` event is +stored on the stream. If a new `'error'` event handler is added, +and an error was previously emitted, then the event handler will +be called immediately (or on `process.nextTick` in the case of +async streams). + +This makes it much more difficult to end up trying to interact +with a broken stream, if the error handler is added after an +error was previously emitted. + +### Impact of "immediate flow" on Tee-streams + +A "tee stream" is a stream piping to multiple destinations: + +```js +const tee = new Minipass() +t.pipe(dest1) +t.pipe(dest2) +t.write('foo') // goes to both destinations +``` + +Since Minipass streams _immediately_ process any pending data through the +pipeline when a new pipe destination is added, this can have surprising +effects, especially when a stream comes in from some other function and may +or may not have data in its buffer. + +```js +// WARNING! WILL LOSE DATA! +const src = new Minipass() +src.write('foo') +src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone +src.pipe(dest2) // gets nothing! +``` + +One solution is to create a dedicated tee-stream junction that pipes to +both locations, and then pipe to _that_ instead. + +```js +// Safe example: tee to both places +const src = new Minipass() +src.write('foo') +const tee = new Minipass() +tee.pipe(dest1) +tee.pipe(dest2) +src.pipe(tee) // tee gets 'foo', pipes to both locations +``` + +The same caveat applies to `on('data')` event listeners. The first one +added will _immediately_ receive all of the data, leaving nothing for the +second: + +```js +// WARNING! WILL LOSE DATA! +const src = new Minipass() +src.write('foo') +src.on('data', handler1) // receives 'foo' right away +src.on('data', handler2) // nothing to see here! +``` + +Using a dedicated tee-stream can be used in this case as well: + +```js +// Safe example: tee to both data handlers +const src = new Minipass() +src.write('foo') +const tee = new Minipass() +tee.on('data', handler1) +tee.on('data', handler2) +src.pipe(tee) +``` + +All of the hazards in this section are avoided by setting `{ +async: true }` in the Minipass constructor, or by setting +`stream.async = true` afterwards. Note that this does add some +overhead, so should only be done in cases where you are willing +to lose a bit of performance in order to avoid having to refactor +program logic. + +## USAGE + +It's a stream! Use it like a stream and it'll most likely do what you +want. + +```js +const Minipass = require('minipass') +const mp = new Minipass(options) // optional: { encoding, objectMode } +mp.write('foo') +mp.pipe(someOtherStream) +mp.end('bar') +``` + +### OPTIONS + +* `encoding` How would you like the data coming _out_ of the stream to be + encoded? Accepts any values that can be passed to `Buffer.toString()`. +* `objectMode` Emit data exactly as it comes in. This will be flipped on + by default if you write() something other than a string or Buffer at any + point. Setting `objectMode: true` will prevent setting any encoding + value. +* `async` Defaults to `false`. Set to `true` to defer data + emission until next tick. This reduces performance slightly, + but makes Minipass streams use timing behavior closer to Node + core streams. See [Timing](#timing) for more details. + +### API + +Implements the user-facing portions of Node.js's `Readable` and `Writable` +streams. + +### Methods + +* `write(chunk, [encoding], [callback])` - Put data in. (Note that, in the + base Minipass class, the same data will come out.) Returns `false` if + the stream will buffer the next write, or true if it's still in "flowing" + mode. +* `end([chunk, [encoding]], [callback])` - Signal that you have no more + data to write. This will queue an `end` event to be fired when all the + data has been consumed. +* `setEncoding(encoding)` - Set the encoding for data coming of the stream. + This can only be done once. +* `pause()` - No more data for a while, please. This also prevents `end` + from being emitted for empty streams until the stream is resumed. +* `resume()` - Resume the stream. If there's data in the buffer, it is all + discarded. Any buffered events are immediately emitted. +* `pipe(dest)` - Send all output to the stream provided. When + data is emitted, it is immediately written to any and all pipe + destinations. (Or written on next tick in `async` mode.) +* `unpipe(dest)` - Stop piping to the destination stream. This + is immediate, meaning that any asynchronously queued data will + _not_ make it to the destination when running in `async` mode. + * `options.end` - Boolean, end the destination stream when + the source stream ends. Default `true`. + * `options.proxyErrors` - Boolean, proxy `error` events from + the source stream to the destination stream. Note that + errors are _not_ proxied after the pipeline terminates, + either due to the source emitting `'end'` or manually + unpiping with `src.unpipe(dest)`. Default `false`. +* `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are EventEmitters. Some + events are given special treatment, however. (See below under "events".) +* `promise()` - Returns a Promise that resolves when the stream emits + `end`, or rejects if the stream emits `error`. +* `collect()` - Return a Promise that resolves on `end` with an array + containing each chunk of data that was emitted, or rejects if the stream + emits `error`. Note that this consumes the stream data. +* `concat()` - Same as `collect()`, but concatenates the data into a single + Buffer object. Will reject the returned promise if the stream is in + objectMode, or if it goes into objectMode by the end of the data. +* `read(n)` - Consume `n` bytes of data out of the buffer. If `n` is not + provided, then consume all of it. If `n` bytes are not available, then + it returns null. **Note** consuming streams in this way is less + efficient, and can lead to unnecessary Buffer copying. +* `destroy([er])` - Destroy the stream. If an error is provided, then an + `'error'` event is emitted. If the stream has a `close()` method, and + has not emitted a `'close'` event yet, then `stream.close()` will be + called. Any Promises returned by `.promise()`, `.collect()` or + `.concat()` will be rejected. After being destroyed, writing to the + stream will emit an error. No more data will be emitted if the stream is + destroyed, even if it was previously buffered. + +### Properties + +* `bufferLength` Read-only. Total number of bytes buffered, or in the case + of objectMode, the total number of objects. +* `encoding` The encoding that has been set. (Setting this is equivalent + to calling `setEncoding(enc)` and has the same prohibition against + setting multiple times.) +* `flowing` Read-only. Boolean indicating whether a chunk written to the + stream will be immediately emitted. +* `emittedEnd` Read-only. Boolean indicating whether the end-ish events + (ie, `end`, `prefinish`, `finish`) have been emitted. Note that + listening on any end-ish event will immediateyl re-emit it if it has + already been emitted. +* `writable` Whether the stream is writable. Default `true`. Set to + `false` when `end()` +* `readable` Whether the stream is readable. Default `true`. +* `buffer` A [yallist](http://npm.im/yallist) linked list of chunks written + to the stream that have not yet been emitted. (It's probably a bad idea + to mess with this.) +* `pipes` A [yallist](http://npm.im/yallist) linked list of streams that + this stream is piping into. (It's probably a bad idea to mess with + this.) +* `destroyed` A getter that indicates whether the stream was destroyed. +* `paused` True if the stream has been explicitly paused, otherwise false. +* `objectMode` Indicates whether the stream is in `objectMode`. Once set + to `true`, it cannot be set to `false`. + +### Events + +* `data` Emitted when there's data to read. Argument is the data to read. + This is never emitted while not flowing. If a listener is attached, that + will resume the stream. +* `end` Emitted when there's no more data to read. This will be emitted + immediately for empty streams when `end()` is called. If a listener is + attached, and `end` was already emitted, then it will be emitted again. + All listeners are removed when `end` is emitted. +* `prefinish` An end-ish event that follows the same logic as `end` and is + emitted in the same conditions where `end` is emitted. Emitted after + `'end'`. +* `finish` An end-ish event that follows the same logic as `end` and is + emitted in the same conditions where `end` is emitted. Emitted after + `'prefinish'`. +* `close` An indication that an underlying resource has been released. + Minipass does not emit this event, but will defer it until after `end` + has been emitted, since it throws off some stream libraries otherwise. +* `drain` Emitted when the internal buffer empties, and it is again + suitable to `write()` into the stream. +* `readable` Emitted when data is buffered and ready to be read by a + consumer. +* `resume` Emitted when stream changes state from buffering to flowing + mode. (Ie, when `resume` is called, `pipe` is called, or a `data` event + listener is added.) + +### Static Methods + +* `Minipass.isStream(stream)` Returns `true` if the argument is a stream, + and false otherwise. To be considered a stream, the object must be + either an instance of Minipass, or an EventEmitter that has either a + `pipe()` method, or both `write()` and `end()` methods. (Pretty much any + stream in node-land will return `true` for this.) + +## EXAMPLES + +Here are some examples of things you can do with Minipass streams. + +### simple "are you done yet" promise + +```js +mp.promise().then(() => { + // stream is finished +}, er => { + // stream emitted an error +}) +``` + +### collecting + +```js +mp.collect().then(all => { + // all is an array of all the data emitted + // encoding is supported in this case, so + // so the result will be a collection of strings if + // an encoding is specified, or buffers/objects if not. + // + // In an async function, you may do + // const data = await stream.collect() +}) +``` + +### collecting into a single blob + +This is a bit slower because it concatenates the data into one chunk for +you, but if you're going to do it yourself anyway, it's convenient this +way: + +```js +mp.concat().then(onebigchunk => { + // onebigchunk is a string if the stream + // had an encoding set, or a buffer otherwise. +}) +``` + +### iteration + +You can iterate over streams synchronously or asynchronously in platforms +that support it. + +Synchronous iteration will end when the currently available data is +consumed, even if the `end` event has not been reached. In string and +buffer mode, the data is concatenated, so unless multiple writes are +occurring in the same tick as the `read()`, sync iteration loops will +generally only have a single iteration. + +To consume chunks in this way exactly as they have been written, with no +flattening, create the stream with the `{ objectMode: true }` option. + +```js +const mp = new Minipass({ objectMode: true }) +mp.write('a') +mp.write('b') +for (let letter of mp) { + console.log(letter) // a, b +} +mp.write('c') +mp.write('d') +for (let letter of mp) { + console.log(letter) // c, d +} +mp.write('e') +mp.end() +for (let letter of mp) { + console.log(letter) // e +} +for (let letter of mp) { + console.log(letter) // nothing +} +``` + +Asynchronous iteration will continue until the end event is reached, +consuming all of the data. + +```js +const mp = new Minipass({ encoding: 'utf8' }) + +// some source of some data +let i = 5 +const inter = setInterval(() => { + if (i-- > 0) + mp.write(Buffer.from('foo\n', 'utf8')) + else { + mp.end() + clearInterval(inter) + } +}, 100) + +// consume the data with asynchronous iteration +async function consume () { + for await (let chunk of mp) { + console.log(chunk) + } + return 'ok' +} + +consume().then(res => console.log(res)) +// logs `foo\n` 5 times, and then `ok` +``` + +### subclass that `console.log()`s everything written into it + +```js +class Logger extends Minipass { + write (chunk, encoding, callback) { + console.log('WRITE', chunk, encoding) + return super.write(chunk, encoding, callback) + } + end (chunk, encoding, callback) { + console.log('END', chunk, encoding) + return super.end(chunk, encoding, callback) + } +} + +someSource.pipe(new Logger()).pipe(someDest) +``` + +### same thing, but using an inline anonymous class + +```js +// js classes are fun +someSource + .pipe(new (class extends Minipass { + emit (ev, ...data) { + // let's also log events, because debugging some weird thing + console.log('EMIT', ev) + return super.emit(ev, ...data) + } + write (chunk, encoding, callback) { + console.log('WRITE', chunk, encoding) + return super.write(chunk, encoding, callback) + } + end (chunk, encoding, callback) { + console.log('END', chunk, encoding) + return super.end(chunk, encoding, callback) + } + })) + .pipe(someDest) +``` + +### subclass that defers 'end' for some reason + +```js +class SlowEnd extends Minipass { + emit (ev, ...args) { + if (ev === 'end') { + console.log('going to end, hold on a sec') + setTimeout(() => { + console.log('ok, ready to end now') + super.emit('end', ...args) + }, 100) + } else { + return super.emit(ev, ...args) + } + } +} +``` + +### transform that creates newline-delimited JSON + +```js +class NDJSONEncode extends Minipass { + write (obj, cb) { + try { + // JSON.stringify can throw, emit an error on that + return super.write(JSON.stringify(obj) + '\n', 'utf8', cb) + } catch (er) { + this.emit('error', er) + } + } + end (obj, cb) { + if (typeof obj === 'function') { + cb = obj + obj = undefined + } + if (obj !== undefined) { + this.write(obj) + } + return super.end(cb) + } +} +``` + +### transform that parses newline-delimited JSON + +```js +class NDJSONDecode extends Minipass { + constructor (options) { + // always be in object mode, as far as Minipass is concerned + super({ objectMode: true }) + this._jsonBuffer = '' + } + write (chunk, encoding, cb) { + if (typeof chunk === 'string' && + typeof encoding === 'string' && + encoding !== 'utf8') { + chunk = Buffer.from(chunk, encoding).toString() + } else if (Buffer.isBuffer(chunk)) + chunk = chunk.toString() + } + if (typeof encoding === 'function') { + cb = encoding + } + const jsonData = (this._jsonBuffer + chunk).split('\n') + this._jsonBuffer = jsonData.pop() + for (let i = 0; i < jsonData.length; i++) { + try { + // JSON.parse can throw, emit an error on that + super.write(JSON.parse(jsonData[i])) + } catch (er) { + this.emit('error', er) + continue + } + } + if (cb) + cb() + } +} +``` diff --git a/node_modules/fs-minipass/node_modules/minipass/index.d.ts b/node_modules/fs-minipass/node_modules/minipass/index.d.ts new file mode 100644 index 0000000..65faf63 --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/index.d.ts @@ -0,0 +1,155 @@ +/// +import { EventEmitter } from 'events' +import { Stream } from 'stream' + +declare namespace Minipass { + type Encoding = BufferEncoding | 'buffer' | null + + interface Writable extends EventEmitter { + end(): any + write(chunk: any, ...args: any[]): any + } + + interface Readable extends EventEmitter { + pause(): any + resume(): any + pipe(): any + } + + interface Pipe { + src: Minipass + dest: Writable + opts: PipeOptions + } + + type DualIterable = Iterable & AsyncIterable + + type ContiguousData = Buffer | ArrayBufferLike | ArrayBufferView | string + + type BufferOrString = Buffer | string + + interface StringOptions { + encoding: BufferEncoding + objectMode?: boolean + async?: boolean + } + + interface BufferOptions { + encoding?: null | 'buffer' + objectMode?: boolean + async?: boolean + } + + interface ObjectModeOptions { + objectMode: true + async?: boolean + } + + interface PipeOptions { + end?: boolean + proxyErrors?: boolean + } + + type Options = T extends string + ? StringOptions + : T extends Buffer + ? BufferOptions + : ObjectModeOptions +} + +declare class Minipass< + RType extends any = Buffer, + WType extends any = RType extends Minipass.BufferOrString + ? Minipass.ContiguousData + : RType + > + extends Stream + implements Minipass.DualIterable +{ + static isStream(stream: any): stream is Minipass.Readable | Minipass.Writable + + readonly bufferLength: number + readonly flowing: boolean + readonly writable: boolean + readonly readable: boolean + readonly paused: boolean + readonly emittedEnd: boolean + readonly destroyed: boolean + + /** + * Not technically private or readonly, but not safe to mutate. + */ + private readonly buffer: RType[] + private readonly pipes: Minipass.Pipe[] + + /** + * Technically writable, but mutating it can change the type, + * so is not safe to do in TypeScript. + */ + readonly objectMode: boolean + async: boolean + + /** + * Note: encoding is not actually read-only, and setEncoding(enc) + * exists. However, this type definition will insist that TypeScript + * programs declare the type of a Minipass stream up front, and if + * that type is string, then an encoding MUST be set in the ctor. If + * the type is Buffer, then the encoding must be missing, or set to + * 'buffer' or null. If the type is anything else, then objectMode + * must be set in the constructor options. So there is effectively + * no allowed way that a TS program can set the encoding after + * construction, as doing so will destroy any hope of type safety. + * TypeScript does not provide many options for changing the type of + * an object at run-time, which is what changing the encoding does. + */ + readonly encoding: Minipass.Encoding + // setEncoding(encoding: Encoding): void + + // Options required if not reading buffers + constructor( + ...args: RType extends Buffer + ? [] | [Minipass.Options] + : [Minipass.Options] + ) + + write(chunk: WType, cb?: () => void): boolean + write(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): boolean + read(size?: number): RType + end(cb?: () => void): this + end(chunk: any, cb?: () => void): this + end(chunk: any, encoding?: Minipass.Encoding, cb?: () => void): this + pause(): void + resume(): void + promise(): Promise + collect(): Promise + + concat(): RType extends Minipass.BufferOrString ? Promise : never + destroy(er?: any): void + pipe(dest: W, opts?: Minipass.PipeOptions): W + unpipe(dest: W): void + + /** + * alias for on() + */ + addEventHandler(event: string, listener: (...args: any[]) => any): this + + on(event: string, listener: (...args: any[]) => any): this + on(event: 'data', listener: (chunk: RType) => any): this + on(event: 'error', listener: (error: any) => any): this + on( + event: + | 'readable' + | 'drain' + | 'resume' + | 'end' + | 'prefinish' + | 'finish' + | 'close', + listener: () => any + ): this + + [Symbol.iterator](): Iterator + [Symbol.asyncIterator](): AsyncIterator +} + +export = Minipass diff --git a/node_modules/fs-minipass/node_modules/minipass/index.js b/node_modules/fs-minipass/node_modules/minipass/index.js new file mode 100644 index 0000000..e8797aa --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/index.js @@ -0,0 +1,649 @@ +'use strict' +const proc = typeof process === 'object' && process ? process : { + stdout: null, + stderr: null, +} +const EE = require('events') +const Stream = require('stream') +const SD = require('string_decoder').StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +const DESTROYED = Symbol('destroyed') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = doIter && Symbol.asyncIterator + || Symbol('asyncIterator not implemented') +const ITERATOR = doIter && Symbol.iterator + || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => + ev === 'end' || + ev === 'finish' || + ev === 'prefinish' + +const isArrayBuffer = b => b instanceof ArrayBuffer || + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0 + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor (src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe () { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors () {} + end () { + this.unpipe() + if (this.opts.end) + this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe () { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor (src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +module.exports = class Minipass extends Stream { + constructor (options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this.pipes = [] + this.buffer = [] + this[OBJECTMODE] = options && options.objectMode || false + if (this[OBJECTMODE]) + this[ENCODING] = null + else + this[ENCODING] = options && options.encoding || null + if (this[ENCODING] === 'buffer') + this[ENCODING] = null + this[ASYNC] = options && !!options.async || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + } + + get bufferLength () { return this[BUFFERLENGTH] } + + get encoding () { return this[ENCODING] } + set encoding (enc) { + if (this[OBJECTMODE]) + throw new Error('cannot set encoding in objectMode') + + if (this[ENCODING] && enc !== this[ENCODING] && + (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this.buffer.length) + this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding (enc) { + this.encoding = enc + } + + get objectMode () { return this[OBJECTMODE] } + set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om } + + get ['async'] () { return this[ASYNC] } + set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a } + + write (chunk, encoding, cb) { + if (this[EOF]) + throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit('error', Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + )) + return true + } + + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + + if (!encoding) + encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) + chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) + this[FLUSH](true) + + if (this.flowing) + this.emit('data', chunk) + else + this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) + this.emit('readable') + + if (cb) + fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable') + if (cb) + fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) + this[FLUSH](true) + + if (this.flowing) + this.emit('data', chunk) + else + this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) + this.emit('readable') + + if (cb) + fn(cb) + + return this.flowing + } + + read (n) { + if (this[DESTROYED]) + return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) + n = null + + if (this.buffer.length > 1 && !this[OBJECTMODE]) { + if (this.encoding) + this.buffer = [this.buffer.join('')] + else + this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this.buffer[0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ] (n, chunk) { + if (n === chunk.length || n === null) + this[BUFFERSHIFT]() + else { + this.buffer[0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this.buffer.length && !this[EOF]) + this.emit('drain') + + return chunk + } + + end (chunk, encoding, cb) { + if (typeof chunk === 'function') + cb = chunk, chunk = null + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + if (chunk) + this.write(chunk, encoding) + if (cb) + this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) + this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME] () { + if (this[DESTROYED]) + return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this.buffer.length) + this[FLUSH]() + else if (this[EOF]) + this[MAYBE_EMIT_END]() + else + this.emit('drain') + } + + resume () { + return this[RESUME]() + } + + pause () { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed () { + return this[DESTROYED] + } + + get flowing () { + return this[FLOWING] + } + + get paused () { + return this[PAUSED] + } + + [BUFFERPUSH] (chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1 + else + this[BUFFERLENGTH] += chunk.length + this.buffer.push(chunk) + } + + [BUFFERSHIFT] () { + if (this.buffer.length) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1 + else + this[BUFFERLENGTH] -= this.buffer[0].length + } + return this.buffer.shift() + } + + [FLUSH] (noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) + + if (!noDrain && !this.buffer.length && !this[EOF]) + this.emit('drain') + } + + [FLUSHCHUNK] (chunk) { + return chunk ? (this.emit('data', chunk), this.flowing) : false + } + + pipe (dest, opts) { + if (this[DESTROYED]) + return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false + else + opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end() + } else { + this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)) + if (this[ASYNC]) + defer(() => this[RESUME]()) + else + this[RESUME]() + } + + return dest + } + + unpipe (dest) { + const p = this.pipes.find(p => p.dest === dest) + if (p) { + this.pipes.splice(this.pipes.indexOf(p), 1) + p.unpipe() + } + } + + addListener (ev, fn) { + return this.on(ev, fn) + } + + on (ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this.pipes.length && !this.flowing) + this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) + defer(() => fn.call(this, this[EMITTED_ERROR])) + else + fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd () { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END] () { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this.buffer.length === 0 && + this[EOF]) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) + this.emit('close') + this[EMITTING_END] = false + } + } + + emit (ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !data ? false + : this[ASYNC] ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + const ret = super.emit('error', data) + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA] (data) { + for (const p of this.pipes) { + if (p.dest.write(data) === false) + this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND] () { + if (this[EMITTED_END]) + return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) + defer(() => this[EMITEND2]()) + else + this[EMITEND2]() + } + + [EMITEND2] () { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this.pipes) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this.pipes) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect () { + const buf = [] + if (!this[OBJECTMODE]) + buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) + buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat () { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength)) + } + + // stream.promise().then(() => done, er => emitted error) + promise () { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR] () { + const next = () => { + const res = this.read() + if (res !== null) + return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) + return Promise.resolve({ done: true }) + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { next } + } + + // for (let chunk of stream) + [ITERATOR] () { + const next = () => { + const value = this.read() + const done = value === null + return { value, done } + } + return { next } + } + + destroy (er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er) + else + this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this.buffer.length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) + this.close() + + if (er) + this.emit('error', er) + else // if no error to emit, still reject pending promises + this.emit(DESTROYED) + + return this + } + + static isStream (s) { + return !!s && (s instanceof Minipass || s instanceof Stream || + s instanceof EE && ( + typeof s.pipe === 'function' || // readable + (typeof s.write === 'function' && typeof s.end === 'function') // writable + )) + } +} diff --git a/node_modules/fs-minipass/node_modules/minipass/package.json b/node_modules/fs-minipass/node_modules/minipass/package.json new file mode 100644 index 0000000..548d03f --- /dev/null +++ b/node_modules/fs-minipass/node_modules/minipass/package.json @@ -0,0 +1,56 @@ +{ + "name": "minipass", + "version": "3.3.6", + "description": "minimal implementation of a PassThrough stream", + "main": "index.js", + "types": "index.d.ts", + "dependencies": { + "yallist": "^4.0.0" + }, + "devDependencies": { + "@types/node": "^17.0.41", + "end-of-stream": "^1.4.0", + "prettier": "^2.6.2", + "tap": "^16.2.0", + "through2": "^2.0.3", + "ts-node": "^10.8.1", + "typescript": "^4.7.3" + }, + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minipass.git" + }, + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "index.d.ts", + "index.js" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">=8" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/node_modules/fs-minipass/package.json b/node_modules/fs-minipass/package.json new file mode 100644 index 0000000..2f2436c --- /dev/null +++ b/node_modules/fs-minipass/package.json @@ -0,0 +1,39 @@ +{ + "name": "fs-minipass", + "version": "2.1.0", + "main": "index.js", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags" + }, + "keywords": [], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/fs-minipass.git" + }, + "bugs": { + "url": "https://github.com/npm/fs-minipass/issues" + }, + "homepage": "https://github.com/npm/fs-minipass#readme", + "description": "fs read and write streams based on minipass", + "dependencies": { + "minipass": "^3.0.0" + }, + "devDependencies": { + "mutate-fs": "^2.0.1", + "tap": "^14.6.4" + }, + "files": [ + "index.js" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">= 8" + } +} diff --git a/node_modules/fs.realpath/LICENSE b/node_modules/fs.realpath/LICENSE new file mode 100644 index 0000000..5bd884c --- /dev/null +++ b/node_modules/fs.realpath/LICENSE @@ -0,0 +1,43 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---- + +This library bundles a version of the `fs.realpath` and `fs.realpathSync` +methods from Node.js v0.10 under the terms of the Node.js MIT license. + +Node's license follows, also included at the header of `old.js` which contains +the licensed code: + + Copyright Joyent, Inc. and other Node contributors. + + Permission is hereby granted, free of charge, to any person obtaining a + copy of this software and associated documentation files (the "Software"), + to deal in the Software without restriction, including without limitation + the rights to use, copy, modify, merge, publish, distribute, sublicense, + and/or sell copies of the Software, and to permit persons to whom the + Software is furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. diff --git a/node_modules/fs.realpath/README.md b/node_modules/fs.realpath/README.md new file mode 100644 index 0000000..a42ceac --- /dev/null +++ b/node_modules/fs.realpath/README.md @@ -0,0 +1,33 @@ +# fs.realpath + +A backwards-compatible fs.realpath for Node v6 and above + +In Node v6, the JavaScript implementation of fs.realpath was replaced +with a faster (but less resilient) native implementation. That raises +new and platform-specific errors and cannot handle long or excessively +symlink-looping paths. + +This module handles those cases by detecting the new errors and +falling back to the JavaScript implementation. On versions of Node +prior to v6, it has no effect. + +## USAGE + +```js +var rp = require('fs.realpath') + +// async version +rp.realpath(someLongAndLoopingPath, function (er, real) { + // the ELOOP was handled, but it was a bit slower +}) + +// sync version +var real = rp.realpathSync(someLongAndLoopingPath) + +// monkeypatch at your own risk! +// This replaces the fs.realpath/fs.realpathSync builtins +rp.monkeypatch() + +// un-do the monkeypatching +rp.unmonkeypatch() +``` diff --git a/node_modules/fs.realpath/index.js b/node_modules/fs.realpath/index.js new file mode 100644 index 0000000..b09c7c7 --- /dev/null +++ b/node_modules/fs.realpath/index.js @@ -0,0 +1,66 @@ +module.exports = realpath +realpath.realpath = realpath +realpath.sync = realpathSync +realpath.realpathSync = realpathSync +realpath.monkeypatch = monkeypatch +realpath.unmonkeypatch = unmonkeypatch + +var fs = require('fs') +var origRealpath = fs.realpath +var origRealpathSync = fs.realpathSync + +var version = process.version +var ok = /^v[0-5]\./.test(version) +var old = require('./old.js') + +function newError (er) { + return er && er.syscall === 'realpath' && ( + er.code === 'ELOOP' || + er.code === 'ENOMEM' || + er.code === 'ENAMETOOLONG' + ) +} + +function realpath (p, cache, cb) { + if (ok) { + return origRealpath(p, cache, cb) + } + + if (typeof cache === 'function') { + cb = cache + cache = null + } + origRealpath(p, cache, function (er, result) { + if (newError(er)) { + old.realpath(p, cache, cb) + } else { + cb(er, result) + } + }) +} + +function realpathSync (p, cache) { + if (ok) { + return origRealpathSync(p, cache) + } + + try { + return origRealpathSync(p, cache) + } catch (er) { + if (newError(er)) { + return old.realpathSync(p, cache) + } else { + throw er + } + } +} + +function monkeypatch () { + fs.realpath = realpath + fs.realpathSync = realpathSync +} + +function unmonkeypatch () { + fs.realpath = origRealpath + fs.realpathSync = origRealpathSync +} diff --git a/node_modules/fs.realpath/old.js b/node_modules/fs.realpath/old.js new file mode 100644 index 0000000..b40305e --- /dev/null +++ b/node_modules/fs.realpath/old.js @@ -0,0 +1,303 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +var pathModule = require('path'); +var isWindows = process.platform === 'win32'; +var fs = require('fs'); + +// JavaScript implementation of realpath, ported from node pre-v6 + +var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); + +function rethrow() { + // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and + // is fairly slow to generate. + var callback; + if (DEBUG) { + var backtrace = new Error; + callback = debugCallback; + } else + callback = missingCallback; + + return callback; + + function debugCallback(err) { + if (err) { + backtrace.message = err.message; + err = backtrace; + missingCallback(err); + } + } + + function missingCallback(err) { + if (err) { + if (process.throwDeprecation) + throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs + else if (!process.noDeprecation) { + var msg = 'fs: missing callback ' + (err.stack || err.message); + if (process.traceDeprecation) + console.trace(msg); + else + console.error(msg); + } + } + } +} + +function maybeCallback(cb) { + return typeof cb === 'function' ? cb : rethrow(); +} + +var normalize = pathModule.normalize; + +// Regexp that finds the next partion of a (partial) path +// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] +if (isWindows) { + var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; +} else { + var nextPartRe = /(.*?)(?:[\/]+|$)/g; +} + +// Regex to find the device root, including trailing slash. E.g. 'c:\\'. +if (isWindows) { + var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; +} else { + var splitRootRe = /^[\/]*/; +} + +exports.realpathSync = function realpathSync(p, cache) { + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return cache[p]; + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstatSync(base); + knownHard[base] = true; + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + continue; + } + + var resolvedLink; + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // some known symbolic link. no need to stat again. + resolvedLink = cache[base]; + } else { + var stat = fs.lstatSync(base); + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + continue; + } + + // read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + var linkTarget = null; + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + linkTarget = seenLinks[id]; + } + } + if (linkTarget === null) { + fs.statSync(base); + linkTarget = fs.readlinkSync(base); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + // track this, if given a cache. + if (cache) cache[base] = resolvedLink; + if (!isWindows) seenLinks[id] = linkTarget; + } + + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } + + if (cache) cache[original] = p; + + return p; +}; + + +exports.realpath = function realpath(p, cache, cb) { + if (typeof cb !== 'function') { + cb = maybeCallback(cache); + cache = null; + } + + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return process.nextTick(cb.bind(null, null, cache[p])); + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; + + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstat(base, function(err) { + if (err) return cb(err); + knownHard[base] = true; + LOOP(); + }); + } else { + process.nextTick(LOOP); + } + } + + // walk down the path, swapping out linked pathparts for their real + // values + function LOOP() { + // stop if scanned past end of path + if (pos >= p.length) { + if (cache) cache[original] = p; + return cb(null, p); + } + + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; + + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + return process.nextTick(LOOP); + } + + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // known symbolic link. no need to stat again. + return gotResolvedLink(cache[base]); + } + + return fs.lstat(base, gotStat); + } + + function gotStat(err, stat) { + if (err) return cb(err); + + // if not a symlink, skip to the next path part + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + return process.nextTick(LOOP); + } + + // stat & read the link if not read before + // call gotTarget as soon as the link target is known + // dev/ino always return 0 on windows, so skip the check. + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + return gotTarget(null, seenLinks[id], base); + } + } + fs.stat(base, function(err) { + if (err) return cb(err); + + fs.readlink(base, function(err, target) { + if (!isWindows) seenLinks[id] = target; + gotTarget(err, target); + }); + }); + } + + function gotTarget(err, target, base) { + if (err) return cb(err); + + var resolvedLink = pathModule.resolve(previous, target); + if (cache) cache[base] = resolvedLink; + gotResolvedLink(resolvedLink); + } + + function gotResolvedLink(resolvedLink) { + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } +}; diff --git a/node_modules/fs.realpath/package.json b/node_modules/fs.realpath/package.json new file mode 100644 index 0000000..3edc57d --- /dev/null +++ b/node_modules/fs.realpath/package.json @@ -0,0 +1,26 @@ +{ + "name": "fs.realpath", + "version": "1.0.0", + "description": "Use node's fs.realpath, but fall back to the JS implementation if the native one fails", + "main": "index.js", + "dependencies": {}, + "devDependencies": {}, + "scripts": { + "test": "tap test/*.js --cov" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/fs.realpath.git" + }, + "keywords": [ + "realpath", + "fs", + "polyfill" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "old.js", + "index.js" + ] +} diff --git a/node_modules/gauge/CHANGELOG.md b/node_modules/gauge/CHANGELOG.md new file mode 100644 index 0000000..3c5cc91 --- /dev/null +++ b/node_modules/gauge/CHANGELOG.md @@ -0,0 +1,163 @@ +### v3.0.0 +* Drops support for Node v4, v6, v7 and v8 + +### v2.7.4 + +* Reset colors prior to ending a line, to eliminate flicker when a line + is trucated between start and end color sequences. + +### v2.7.3 + +* Only create our onExit handler when we're enabled and remove it when we're + disabled. This stops us from creating multiple onExit handlers when + multiple gauge objects are being used. +* Fix bug where if a theme name were given instead of a theme object, it + would crash. +* Remove supports-color because it's not actually used. Uhm. Yes, I just + updated it. >.> + +### v2.7.2 + +* Use supports-color instead of has-color (as the module has been renamed) + +### v2.7.1 + +* Bug fix: Calls to show/pulse while the progress bar is disabled should still + update our internal representation of what would be shown should it be enabled. + +### v2.7.0 + +* New feature: Add new `isEnabled` method to allow introspection of the gauge's + "enabledness" as controlled by `.enable()` and `.disable()`. + +### v2.6.0 + +* Bug fix: Don't run the code associated with `enable`/`disable` if the gauge + is already enabled or disabled respectively. This prevents leaking event + listeners, amongst other weirdness. +* New feature: Template items can have default values that will be used if no + value was otherwise passed in. + +### v2.5.3 + +* Default to `enabled` only if we have a tty. Users can always override + this by passing in the `enabled` option explicitly or by calling calling + `gauge.enable()`. + +### v2.5.2 + +* Externalized `./console-strings.js` into `console-control-strings`. + +### v2.5.1 + +* Update to `signal-exit@3.0.0`, which fixes a compatibility bug with the + node profiler. +* [#39](https://github.com/iarna/gauge/pull/39) Fix tests on 0.10 and add + a missing devDependency. ([@helloyou2012](https://github.com/helloyou2012)) + +### v2.5.0 + +* Add way to programmatically fetch a list of theme names in a themeset + (`Themeset.getThemeNames`). + +### v2.4.0 + +* Add support for setting themesets on existing gauge objects. +* Add post-IO callback to `gauge.hide()` as it is somtetimes necessary when + your terminal is interleaving output from multiple filehandles (ie, stdout + & stderr). + +### v2.3.1 + +* Fix a refactor bug in setTheme where it wasn't accepting the various types + of args it should. + +### v2.3.0 + +#### FEATURES + +* Add setTemplate & setTheme back in. +* Add support for named themes, you can now ask for things like 'colorASCII' + and 'brailleSpinner'. Of course, you can still pass in theme objects. + Additionally you can now pass in an object with `hasUnicode`, `hasColor` and + `platform` keys in order to override our guesses as to those values when + selecting a default theme from the themeset. +* Make the output stream optional (it defaults to `process.stderr` now). +* Add `setWriteTo(stream[, tty])` to change the output stream and, + optionally, tty. + +#### BUG FIXES & REFACTORING + +* Abort the display phase early if we're supposed to be hidden and we are. +* Stop printing a bunch of spaces at the end of lines, since we're already + using an erase-to-end-of-line code anyway. +* The unicode themes were missing the subsection separator. + +### v2.2.1 + +* Fix image in readme + +### v2.2.0 + +* All new themes API– reference themes by name and pass in custom themes and + themesets (themesets get platform support autodetection done on them to + select the best theme). Theme mixins let you add features to all existing + themes. +* Much, much improved test coverage. + +### v2.1.0 + +* Got rid of ░ in the default platform, noUnicode, hasColor theme. Thanks + to @yongtw123 for pointing out this had snuck in. +* Fiddled with the demo output to make it easier to see the spinner spin. Also + added prints before each platforms test output. +* I forgot to include `signal-exit` in our deps. <.< Thank you @KenanY for + finding this. Then I was lazy and made a new commit instead of using his + PR. Again, thank you for your patience @KenenY. +* Drastically speed up travis testing. +* Add a small javascript demo (demo.js) for showing off the various themes + (and testing them on diff platforms). +* Change: The subsection separator from ⁄ and / (different chars) to >. +* Fix crasher: A show or pulse without a label would cause the template renderer + to complain about a missing value. +* New feature: Add the ability to disable the clean-up-on-exit behavior. + Not something I expect to be widely desirable, but important if you have + multiple distinct gauge instances in your app. +* Use our own color support detection. + The `has-color` module proved too magic for my needs, making assumptions + as to which stream we write to and reading command line arguments. + +### v2.0.0 + +This is a major rewrite of the internals. Externally there are fewer +changes: + +* On node>0.8 gauge object now prints updates at a fixed rate. This means + that when you call `show` it may wate up to `updateInterval` ms before it + actually prints an update. You override this behavior with the + `fixedFramerate` option. +* The gauge object now keeps the cursor hidden as long as it's enabled and + shown. +* The constructor's arguments have changed, now it takes a mandatory output + stream and an optional options object. The stream no longer needs to be + an `ansi`ified stream, although it can be if you want (but we won't make + use of its special features). +* Previously the gauge was disabled by default if `process.stdout` wasn't a + tty. Now it always defaults to enabled. If you want the previous + behavior set the `enabled` option to `process.stdout.isTTY`. +* The constructor's options have changed– see the docs for details. +* Themes are entirely different. If you were using a custom theme, or + referring to one directly (eg via `Gauge.unicode` or `Gauge.ascii`) then + you'll need to change your code. You can get the equivalent of the latter + with: + ``` + var themes = require('gauge/themes') + var unicodeTheme = themes(true, true) // returns the color unicode theme for your platform + ``` + The default themes no longer use any ambiguous width characters, so even + if you choose to display those as wide your progress bar should still + display correctly. +* Templates are entirely different and if you were using a custom one, you + should consult the documentation to learn how to recreate it. If you were + using the default, be aware that it has changed and the result looks quite + a bit different. diff --git a/node_modules/gauge/LICENSE b/node_modules/gauge/LICENSE new file mode 100644 index 0000000..e756052 --- /dev/null +++ b/node_modules/gauge/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2014, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/gauge/README.md b/node_modules/gauge/README.md new file mode 100644 index 0000000..4757649 --- /dev/null +++ b/node_modules/gauge/README.md @@ -0,0 +1,402 @@ +gauge +===== + +A nearly stateless terminal based horizontal gauge / progress bar. + +```javascript +var Gauge = require("gauge") + +var gauge = new Gauge() + +gauge.show("working…", 0) +setTimeout(() => { gauge.pulse(); gauge.show("working…", 0.25) }, 500) +setTimeout(() => { gauge.pulse(); gauge.show("working…", 0.50) }, 1000) +setTimeout(() => { gauge.pulse(); gauge.show("working…", 0.75) }, 1500) +setTimeout(() => { gauge.pulse(); gauge.show("working…", 0.99) }, 2000) +setTimeout(() => gauge.hide(), 2300) +``` + +See also the [demos](demo.js): + +![](gauge-demo.gif) + + +### CHANGES FROM 1.x + +Gauge 2.x is breaking release, please see the [changelog] for details on +what's changed if you were previously a user of this module. + +[changelog]: CHANGELOG.md + +### THE GAUGE CLASS + +This is the typical interface to the module– it provides a pretty +fire-and-forget interface to displaying your status information. + +``` +var Gauge = require("gauge") + +var gauge = new Gauge([stream], [options]) +``` + +* **stream** – *(optional, default STDERR)* A stream that progress bar + updates are to be written to. Gauge honors backpressure and will pause + most writing if it is indicated. +* **options** – *(optional)* An option object. + +Constructs a new gauge. Gauges are drawn on a single line, and are not drawn +if **stream** isn't a tty and a tty isn't explicitly provided. + +If **stream** is a terminal or if you pass in **tty** to **options** then we +will detect terminal resizes and redraw to fit. We do this by watching for +`resize` events on the tty. (To work around a bug in versions of Node prior +to 2.5.0, we watch for them on stdout if the tty is stderr.) Resizes to +larger window sizes will be clean, but shrinking the window will always +result in some cruft. + +**IMPORTANT:** If you previously were passing in a non-tty stream but you still +want output (for example, a stream wrapped by the `ansi` module) then you +need to pass in the **tty** option below, as `gauge` needs access to +the underlying tty in order to do things like terminal resizes and terminal +width detection. + +The **options** object can have the following properties, all of which are +optional: + +* **updateInterval**: How often gauge updates should be drawn, in milliseconds. +* **fixedFramerate**: Defaults to false on node 0.8, true on everything + else. When this is true a timer is created to trigger once every + `updateInterval` ms, when false, updates are printed as soon as they come + in but updates more often than `updateInterval` are ignored. The reason + 0.8 doesn't have this set to true is that it can't `unref` its timer and + so it would stop your program from exiting– if you want to use this + feature with 0.8 just make sure you call `gauge.disable()` before you + expect your program to exit. +* **themes**: A themeset to use when selecting the theme to use. Defaults + to `gauge/themes`, see the [themes] documentation for details. +* **theme**: Select a theme for use, it can be a: + * Theme object, in which case the **themes** is not used. + * The name of a theme, which will be looked up in the current *themes* + object. + * A configuration object with any of `hasUnicode`, `hasColor` or + `platform` keys, which if will be used to override our guesses when making + a default theme selection. + + If no theme is selected then a default is picked using a combination of our + best guesses at your OS, color support and unicode support. +* **template**: Describes what you want your gauge to look like. The + default is what npm uses. Detailed [documentation] is later in this + document. +* **hideCursor**: Defaults to true. If true, then the cursor will be hidden + while the gauge is displayed. +* **tty**: The tty that you're ultimately writing to. Defaults to the same + as **stream**. This is used for detecting the width of the terminal and + resizes. The width used is `tty.columns - 1`. If no tty is available then + a width of `79` is assumed. +* **enabled**: Defaults to true if `tty` is a TTY, false otherwise. If true + the gauge starts enabled. If disabled then all update commands are + ignored and no gauge will be printed until you call `.enable()`. +* **Plumbing**: The class to use to actually generate the gauge for + printing. This defaults to `require('gauge/plumbing')` and ordinarily you + shouldn't need to override this. +* **cleanupOnExit**: Defaults to true. Ordinarily we register an exit + handler to make sure your cursor is turned back on and the progress bar + erased when your process exits, even if you Ctrl-C out or otherwise exit + unexpectedly. You can disable this and it won't register the exit handler. + +[has-unicode]: https://www.npmjs.com/package/has-unicode +[themes]: #themes +[documentation]: #templates + +#### `gauge.show(section | status, [completed])` + +The first argument is either the section, the name of the current thing +contributing to progress, or an object with keys like **section**, +**subsection** & **completed** (or any others you have types for in a custom +template). If you don't want to update or set any of these you can pass +`null` and it will be ignored. + +The second argument is the percent completed as a value between 0 and 1. +Without it, completion is just not updated. You'll also note that completion +can be passed in as part of a status object as the first argument. If both +it and the completed argument are passed in, the completed argument wins. + +#### `gauge.hide([cb])` + +Removes the gauge from the terminal. Optionally, callback `cb` after IO has +had an opportunity to happen (currently this just means after `setImmediate` +has called back.) + +It turns out this is important when you're pausing the progress bar on one +filehandle and printing to another– otherwise (with a big enough print) node +can end up printing the "end progress bar" bits to the progress bar filehandle +while other stuff is printing to another filehandle. These getting interleaved +can cause corruption in some terminals. + +#### `gauge.pulse([subsection])` + +* **subsection** – *(optional)* The specific thing that triggered this pulse + +Spins the spinner in the gauge to show output. If **subsection** is +included then it will be combined with the last name passed to `gauge.show`. + +#### `gauge.disable()` + +Hides the gauge and ignores further calls to `show` or `pulse`. + +#### `gauge.enable()` + +Shows the gauge and resumes updating when `show` or `pulse` is called. + +#### `gauge.isEnabled()` + +Returns true if the gauge is enabled. + +#### `gauge.setThemeset(themes)` + +Change the themeset to select a theme from. The same as the `themes` option +used in the constructor. The theme will be reselected from this themeset. + +#### `gauge.setTheme(theme)` + +Change the active theme, will be displayed with the next show or pulse. This can be: + +* Theme object, in which case the **themes** is not used. +* The name of a theme, which will be looked up in the current *themes* + object. +* A configuration object with any of `hasUnicode`, `hasColor` or + `platform` keys, which if will be used to override our guesses when making + a default theme selection. + +If no theme is selected then a default is picked using a combination of our +best guesses at your OS, color support and unicode support. + +#### `gauge.setTemplate(template)` + +Change the active template, will be displayed with the next show or pulse + +### Tracking Completion + +If you have more than one thing going on that you want to track completion +of, you may find the related [are-we-there-yet] helpful. It's `change` +event can be wired up to the `show` method to get a more traditional +progress bar interface. + +[are-we-there-yet]: https://www.npmjs.com/package/are-we-there-yet + +### THEMES + +``` +var themes = require('gauge/themes') + +// fetch the default color unicode theme for this platform +var ourTheme = themes({hasUnicode: true, hasColor: true}) + +// fetch the default non-color unicode theme for osx +var ourTheme = themes({hasUnicode: true, hasColor: false, platform: 'darwin'}) + +// create a new theme based on the color ascii theme for this platform +// that brackets the progress bar with arrows +var ourTheme = themes.newTheme(themes({hasUnicode: false, hasColor: true}), { + preProgressbar: '→', + postProgressbar: '←' +}) +``` + +The object returned by `gauge/themes` is an instance of the `ThemeSet` class. + +``` +var ThemeSet = require('gauge/theme-set') +var themes = new ThemeSet() +// or +var themes = require('gauge/themes') +var mythemes = themes.newThemeSet() // creates a new themeset based on the default themes +``` + +#### themes(opts) +#### themes.getDefault(opts) + +Theme objects are a function that fetches the default theme based on +platform, unicode and color support. + +Options is an object with the following properties: + +* **hasUnicode** - If true, fetch a unicode theme, if no unicode theme is + available then a non-unicode theme will be used. +* **hasColor** - If true, fetch a color theme, if no color theme is + available a non-color theme will be used. +* **platform** (optional) - Defaults to `process.platform`. If no + platform match is available then `fallback` is used instead. + +If no compatible theme can be found then an error will be thrown with a +`code` of `EMISSINGTHEME`. + +#### themes.addTheme(themeName, themeObj) +#### themes.addTheme(themeName, [parentTheme], newTheme) + +Adds a named theme to the themeset. You can pass in either a theme object, +as returned by `themes.newTheme` or the arguments you'd pass to +`themes.newTheme`. + +#### themes.getThemeNames() + +Return a list of all of the names of the themes in this themeset. Suitable +for use in `themes.getTheme(…)`. + +#### themes.getTheme(name) + +Returns the theme object from this theme set named `name`. + +If `name` does not exist in this themeset an error will be thrown with +a `code` of `EMISSINGTHEME`. + +#### themes.setDefault([opts], themeName) + +`opts` is an object with the following properties. + +* **platform** - Defaults to `'fallback'`. If your theme is platform + specific, specify that here with the platform from `process.platform`, eg, + `win32`, `darwin`, etc. +* **hasUnicode** - Defaults to `false`. If your theme uses unicode you + should set this to true. +* **hasColor** - Defaults to `false`. If your theme uses color you should + set this to true. + +`themeName` is the name of the theme (as given to `addTheme`) to use for +this set of `opts`. + +#### themes.newTheme([parentTheme,] newTheme) + +Create a new theme object based on `parentTheme`. If no `parentTheme` is +provided then a minimal parentTheme that defines functions for rendering the +activity indicator (spinner) and progress bar will be defined. (This +fallback parent is defined in `gauge/base-theme`.) + +newTheme should be a bare object– we'll start by discussing the properties +defined by the default themes: + +* **preProgressbar** - displayed prior to the progress bar, if the progress + bar is displayed. +* **postProgressbar** - displayed after the progress bar, if the progress bar + is displayed. +* **progressBarTheme** - The subtheme passed through to the progress bar + renderer, it's an object with `complete` and `remaining` properties + that are the strings you want repeated for those sections of the progress + bar. +* **activityIndicatorTheme** - The theme for the activity indicator (spinner), + this can either be a string, in which each character is a different step, or + an array of strings. +* **preSubsection** - Displayed as a separator between the `section` and + `subsection` when the latter is printed. + +More generally, themes can have any value that would be a valid value when rendering +templates. The properties in the theme are used when their name matches a type in +the template. Their values can be: + +* **strings & numbers** - They'll be included as is +* **function (values, theme, width)** - Should return what you want in your output. + *values* is an object with values provided via `gauge.show`, + *theme* is the theme specific to this item (see below) or this theme object, + and *width* is the number of characters wide your result should be. + +There are a couple of special prefixes: + +* **pre** - Is shown prior to the property, if its displayed. +* **post** - Is shown after the property, if its displayed. + +And one special suffix: + +* **Theme** - Its value is passed to a function-type item as the theme. + +#### themes.addToAllThemes(theme) + +This *mixes-in* `theme` into all themes currently defined. It also adds it +to the default parent theme for this themeset, so future themes added to +this themeset will get the values from `theme` by default. + +#### themes.newThemeSet() + +Copy the current themeset into a new one. This allows you to easily inherit +one themeset from another. + +### TEMPLATES + +A template is an array of objects and strings that, after being evaluated, +will be turned into the gauge line. The default template is: + +```javascript +[ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', kerning: 1, default: ''}, + {type: 'subsection', kerning: 1, default: ''} +] +``` + +The various template elements can either be **plain strings**, in which case they will +be be included verbatum in the output, or objects with the following properties: + +* *type* can be any of the following plus any keys you pass into `gauge.show` plus + any keys you have on a custom theme. + * `section` – What big thing you're working on now. + * `subsection` – What component of that thing is currently working. + * `activityIndicator` – Shows a spinner using the `activityIndicatorTheme` + from your active theme. + * `progressbar` – A progress bar representing your current `completed` + using the `progressbarTheme` from your active theme. +* *kerning* – Number of spaces that must be between this item and other + items, if this item is displayed at all. +* *maxLength* – The maximum length for this element. If its value is longer it + will be truncated. +* *minLength* – The minimum length for this element. If its value is shorter it + will be padded according to the *align* value. +* *align* – (Default: left) Possible values "left", "right" and "center". Works + as you'd expect from word processors. +* *length* – Provides a single value for both *minLength* and *maxLength*. If both + *length* and *minLength or *maxLength* are specified then the latter take precedence. +* *value* – A literal value to use for this template item. +* *default* – A default value to use for this template item if a value + wasn't otherwise passed in. + +### PLUMBING + +This is the super simple, assume nothing, do no magic internals used by gauge to +implement its ordinary interface. + +``` +var Plumbing = require('gauge/plumbing') +var gauge = new Plumbing(theme, template, width) +``` + +* **theme**: The theme to use. +* **template**: The template to use. +* **width**: How wide your gauge should be + +#### `gauge.setTheme(theme)` + +Change the active theme. + +#### `gauge.setTemplate(template)` + +Change the active template. + +#### `gauge.setWidth(width)` + +Change the width to render at. + +#### `gauge.hide()` + +Return the string necessary to hide the progress bar + +#### `gauge.hideCursor()` + +Return a string to hide the cursor. + +#### `gauge.showCursor()` + +Return a string to show the cursor. + +#### `gauge.show(status)` + +Using `status` for values, render the provided template with the theme and return +a string that is suitable for printing to update the gauge. diff --git a/node_modules/gauge/base-theme.js b/node_modules/gauge/base-theme.js new file mode 100644 index 0000000..0b67638 --- /dev/null +++ b/node_modules/gauge/base-theme.js @@ -0,0 +1,14 @@ +'use strict' +var spin = require('./spin.js') +var progressBar = require('./progress-bar.js') + +module.exports = { + activityIndicator: function (values, theme, width) { + if (values.spun == null) return + return spin(theme, values.spun) + }, + progressbar: function (values, theme, width) { + if (values.completed == null) return + return progressBar(theme, width, values.completed) + } +} diff --git a/node_modules/gauge/error.js b/node_modules/gauge/error.js new file mode 100644 index 0000000..d9914ba --- /dev/null +++ b/node_modules/gauge/error.js @@ -0,0 +1,24 @@ +'use strict' +var util = require('util') + +var User = exports.User = function User (msg) { + var err = new Error(msg) + Error.captureStackTrace(err, User) + err.code = 'EGAUGE' + return err +} + +exports.MissingTemplateValue = function MissingTemplateValue (item, values) { + var err = new User(util.format('Missing template value "%s"', item.type)) + Error.captureStackTrace(err, MissingTemplateValue) + err.template = item + err.values = values + return err +} + +exports.Internal = function Internal (msg) { + var err = new Error(msg) + Error.captureStackTrace(err, Internal) + err.code = 'EGAUGEINTERNAL' + return err +} diff --git a/node_modules/gauge/has-color.js b/node_modules/gauge/has-color.js new file mode 100644 index 0000000..16cba0e --- /dev/null +++ b/node_modules/gauge/has-color.js @@ -0,0 +1,4 @@ +'use strict' +var colorSupport = require('color-support') + +module.exports = colorSupport().hasBasic diff --git a/node_modules/gauge/index.js b/node_modules/gauge/index.js new file mode 100644 index 0000000..87a4bb9 --- /dev/null +++ b/node_modules/gauge/index.js @@ -0,0 +1,233 @@ +'use strict' +var Plumbing = require('./plumbing.js') +var hasUnicode = require('has-unicode') +var hasColor = require('./has-color.js') +var onExit = require('signal-exit') +var defaultThemes = require('./themes') +var setInterval = require('./set-interval.js') +var process = require('./process.js') +var setImmediate = require('./set-immediate') + +module.exports = Gauge + +function callWith (obj, method) { + return function () { + return method.call(obj) + } +} + +function Gauge (arg1, arg2) { + var options, writeTo + if (arg1 && arg1.write) { + writeTo = arg1 + options = arg2 || {} + } else if (arg2 && arg2.write) { + writeTo = arg2 + options = arg1 || {} + } else { + writeTo = process.stderr + options = arg1 || arg2 || {} + } + + this._status = { + spun: 0, + section: '', + subsection: '' + } + this._paused = false // are we paused for back pressure? + this._disabled = true // are all progress bar updates disabled? + this._showing = false // do we WANT the progress bar on screen + this._onScreen = false // IS the progress bar on screen + this._needsRedraw = false // should we print something at next tick? + this._hideCursor = options.hideCursor == null ? true : options.hideCursor + this._fixedFramerate = options.fixedFramerate == null + ? !(/^v0\.8\./.test(process.version)) + : options.fixedFramerate + this._lastUpdateAt = null + this._updateInterval = options.updateInterval == null ? 50 : options.updateInterval + + this._themes = options.themes || defaultThemes + this._theme = options.theme + var theme = this._computeTheme(options.theme) + var template = options.template || [ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', kerning: 1, default: ''}, + {type: 'subsection', kerning: 1, default: ''} + ] + this.setWriteTo(writeTo, options.tty) + var PlumbingClass = options.Plumbing || Plumbing + this._gauge = new PlumbingClass(theme, template, this.getWidth()) + + this._$$doRedraw = callWith(this, this._doRedraw) + this._$$handleSizeChange = callWith(this, this._handleSizeChange) + + this._cleanupOnExit = options.cleanupOnExit == null || options.cleanupOnExit + this._removeOnExit = null + + if (options.enabled || (options.enabled == null && this._tty && this._tty.isTTY)) { + this.enable() + } else { + this.disable() + } +} +Gauge.prototype = {} + +Gauge.prototype.isEnabled = function () { + return !this._disabled +} + +Gauge.prototype.setTemplate = function (template) { + this._gauge.setTemplate(template) + if (this._showing) this._requestRedraw() +} + +Gauge.prototype._computeTheme = function (theme) { + if (!theme) theme = {} + if (typeof theme === 'string') { + theme = this._themes.getTheme(theme) + } else if (theme && (Object.keys(theme).length === 0 || theme.hasUnicode != null || theme.hasColor != null)) { + var useUnicode = theme.hasUnicode == null ? hasUnicode() : theme.hasUnicode + var useColor = theme.hasColor == null ? hasColor : theme.hasColor + theme = this._themes.getDefault({hasUnicode: useUnicode, hasColor: useColor, platform: theme.platform}) + } + return theme +} + +Gauge.prototype.setThemeset = function (themes) { + this._themes = themes + this.setTheme(this._theme) +} + +Gauge.prototype.setTheme = function (theme) { + this._gauge.setTheme(this._computeTheme(theme)) + if (this._showing) this._requestRedraw() + this._theme = theme +} + +Gauge.prototype._requestRedraw = function () { + this._needsRedraw = true + if (!this._fixedFramerate) this._doRedraw() +} + +Gauge.prototype.getWidth = function () { + return ((this._tty && this._tty.columns) || 80) - 1 +} + +Gauge.prototype.setWriteTo = function (writeTo, tty) { + var enabled = !this._disabled + if (enabled) this.disable() + this._writeTo = writeTo + this._tty = tty || + (writeTo === process.stderr && process.stdout.isTTY && process.stdout) || + (writeTo.isTTY && writeTo) || + this._tty + if (this._gauge) this._gauge.setWidth(this.getWidth()) + if (enabled) this.enable() +} + +Gauge.prototype.enable = function () { + if (!this._disabled) return + this._disabled = false + if (this._tty) this._enableEvents() + if (this._showing) this.show() +} + +Gauge.prototype.disable = function () { + if (this._disabled) return + if (this._showing) { + this._lastUpdateAt = null + this._showing = false + this._doRedraw() + this._showing = true + } + this._disabled = true + if (this._tty) this._disableEvents() +} + +Gauge.prototype._enableEvents = function () { + if (this._cleanupOnExit) { + this._removeOnExit = onExit(callWith(this, this.disable)) + } + this._tty.on('resize', this._$$handleSizeChange) + if (this._fixedFramerate) { + this.redrawTracker = setInterval(this._$$doRedraw, this._updateInterval) + if (this.redrawTracker.unref) this.redrawTracker.unref() + } +} + +Gauge.prototype._disableEvents = function () { + this._tty.removeListener('resize', this._$$handleSizeChange) + if (this._fixedFramerate) clearInterval(this.redrawTracker) + if (this._removeOnExit) this._removeOnExit() +} + +Gauge.prototype.hide = function (cb) { + if (this._disabled) return cb && process.nextTick(cb) + if (!this._showing) return cb && process.nextTick(cb) + this._showing = false + this._doRedraw() + cb && setImmediate(cb) +} + +Gauge.prototype.show = function (section, completed) { + this._showing = true + if (typeof section === 'string') { + this._status.section = section + } else if (typeof section === 'object') { + var sectionKeys = Object.keys(section) + for (var ii = 0; ii < sectionKeys.length; ++ii) { + var key = sectionKeys[ii] + this._status[key] = section[key] + } + } + if (completed != null) this._status.completed = completed + if (this._disabled) return + this._requestRedraw() +} + +Gauge.prototype.pulse = function (subsection) { + this._status.subsection = subsection || '' + this._status.spun++ + if (this._disabled) return + if (!this._showing) return + this._requestRedraw() +} + +Gauge.prototype._handleSizeChange = function () { + this._gauge.setWidth(this._tty.columns - 1) + this._requestRedraw() +} + +Gauge.prototype._doRedraw = function () { + if (this._disabled || this._paused) return + if (!this._fixedFramerate) { + var now = Date.now() + if (this._lastUpdateAt && now - this._lastUpdateAt < this._updateInterval) return + this._lastUpdateAt = now + } + if (!this._showing && this._onScreen) { + this._onScreen = false + var result = this._gauge.hide() + if (this._hideCursor) { + result += this._gauge.showCursor() + } + return this._writeTo.write(result) + } + if (!this._showing && !this._onScreen) return + if (this._showing && !this._onScreen) { + this._onScreen = true + this._needsRedraw = true + if (this._hideCursor) { + this._writeTo.write(this._gauge.hideCursor()) + } + } + if (!this._needsRedraw) return + if (!this._writeTo.write(this._gauge.show(this._status))) { + this._paused = true + this._writeTo.on('drain', callWith(this, function () { + this._paused = false + this._doRedraw() + })) + } +} diff --git a/node_modules/gauge/package.json b/node_modules/gauge/package.json new file mode 100644 index 0000000..d3f0354 --- /dev/null +++ b/node_modules/gauge/package.json @@ -0,0 +1,66 @@ +{ + "name": "gauge", + "version": "3.0.2", + "description": "A terminal based horizontal guage", + "main": "index.js", + "scripts": { + "test": "standard && tap test/*.js --coverage" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/gauge" + }, + "keywords": [ + "progressbar", + "progress", + "gauge" + ], + "author": "Rebecca Turner ", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/gauge/issues" + }, + "homepage": "https://github.com/npm/gauge", + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.2", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.1", + "object-assign": "^4.1.1", + "signal-exit": "^3.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.2" + }, + "devDependencies": { + "readable-stream": "^2.0.6", + "require-inject": "^1.4.0", + "standard": "^11.0.1", + "tap": "^12.0.1", + "through2": "^2.0.0" + }, + "files": [ + "base-theme.js", + "CHANGELOG.md", + "error.js", + "has-color.js", + "index.js", + "LICENSE", + "package.json", + "plumbing.js", + "process.js", + "progress-bar.js", + "README.md", + "render-template.js", + "set-immediate.js", + "set-interval.js", + "spin.js", + "template-item.js", + "theme-set.js", + "themes.js", + "wide-truncate.js" + ], + "engines": { + "node": ">=10" + } +} diff --git a/node_modules/gauge/plumbing.js b/node_modules/gauge/plumbing.js new file mode 100644 index 0000000..1afb4af --- /dev/null +++ b/node_modules/gauge/plumbing.js @@ -0,0 +1,48 @@ +'use strict' +var consoleControl = require('console-control-strings') +var renderTemplate = require('./render-template.js') +var validate = require('aproba') + +var Plumbing = module.exports = function (theme, template, width) { + if (!width) width = 80 + validate('OAN', [theme, template, width]) + this.showing = false + this.theme = theme + this.width = width + this.template = template +} +Plumbing.prototype = {} + +Plumbing.prototype.setTheme = function (theme) { + validate('O', [theme]) + this.theme = theme +} + +Plumbing.prototype.setTemplate = function (template) { + validate('A', [template]) + this.template = template +} + +Plumbing.prototype.setWidth = function (width) { + validate('N', [width]) + this.width = width +} + +Plumbing.prototype.hide = function () { + return consoleControl.gotoSOL() + consoleControl.eraseLine() +} + +Plumbing.prototype.hideCursor = consoleControl.hideCursor + +Plumbing.prototype.showCursor = consoleControl.showCursor + +Plumbing.prototype.show = function (status) { + var values = Object.create(this.theme) + for (var key in status) { + values[key] = status[key] + } + + return renderTemplate(this.width, this.template, values).trim() + + consoleControl.color('reset') + + consoleControl.eraseLine() + consoleControl.gotoSOL() +} diff --git a/node_modules/gauge/process.js b/node_modules/gauge/process.js new file mode 100644 index 0000000..05e8569 --- /dev/null +++ b/node_modules/gauge/process.js @@ -0,0 +1,3 @@ +'use strict' +// this exists so we can replace it during testing +module.exports = process diff --git a/node_modules/gauge/progress-bar.js b/node_modules/gauge/progress-bar.js new file mode 100644 index 0000000..1780a8a --- /dev/null +++ b/node_modules/gauge/progress-bar.js @@ -0,0 +1,35 @@ +'use strict' +var validate = require('aproba') +var renderTemplate = require('./render-template.js') +var wideTruncate = require('./wide-truncate') +var stringWidth = require('string-width') + +module.exports = function (theme, width, completed) { + validate('ONN', [theme, width, completed]) + if (completed < 0) completed = 0 + if (completed > 1) completed = 1 + if (width <= 0) return '' + var sofar = Math.round(width * completed) + var rest = width - sofar + var template = [ + {type: 'complete', value: repeat(theme.complete, sofar), length: sofar}, + {type: 'remaining', value: repeat(theme.remaining, rest), length: rest} + ] + return renderTemplate(width, template, theme) +} + +// lodash's way of repeating +function repeat (string, width) { + var result = '' + var n = width + do { + if (n % 2) { + result += string + } + n = Math.floor(n / 2) + /* eslint no-self-assign: 0 */ + string += string + } while (n && stringWidth(result) < width) + + return wideTruncate(result, width) +} diff --git a/node_modules/gauge/render-template.js b/node_modules/gauge/render-template.js new file mode 100644 index 0000000..9764c6e --- /dev/null +++ b/node_modules/gauge/render-template.js @@ -0,0 +1,178 @@ +'use strict' +var align = require('wide-align') +var validate = require('aproba') +var wideTruncate = require('./wide-truncate') +var error = require('./error') +var TemplateItem = require('./template-item') + +function renderValueWithValues (values) { + return function (item) { + return renderValue(item, values) + } +} + +var renderTemplate = module.exports = function (width, template, values) { + var items = prepareItems(width, template, values) + var rendered = items.map(renderValueWithValues(values)).join('') + return align.left(wideTruncate(rendered, width), width) +} + +function preType (item) { + var cappedTypeName = item.type[0].toUpperCase() + item.type.slice(1) + return 'pre' + cappedTypeName +} + +function postType (item) { + var cappedTypeName = item.type[0].toUpperCase() + item.type.slice(1) + return 'post' + cappedTypeName +} + +function hasPreOrPost (item, values) { + if (!item.type) return + return values[preType(item)] || values[postType(item)] +} + +function generatePreAndPost (baseItem, parentValues) { + var item = Object.assign({}, baseItem) + var values = Object.create(parentValues) + var template = [] + var pre = preType(item) + var post = postType(item) + if (values[pre]) { + template.push({value: values[pre]}) + values[pre] = null + } + item.minLength = null + item.length = null + item.maxLength = null + template.push(item) + values[item.type] = values[item.type] + if (values[post]) { + template.push({value: values[post]}) + values[post] = null + } + return function ($1, $2, length) { + return renderTemplate(length, template, values) + } +} + +function prepareItems (width, template, values) { + function cloneAndObjectify (item, index, arr) { + var cloned = new TemplateItem(item, width) + var type = cloned.type + if (cloned.value == null) { + if (!(type in values)) { + if (cloned.default == null) { + throw new error.MissingTemplateValue(cloned, values) + } else { + cloned.value = cloned.default + } + } else { + cloned.value = values[type] + } + } + if (cloned.value == null || cloned.value === '') return null + cloned.index = index + cloned.first = index === 0 + cloned.last = index === arr.length - 1 + if (hasPreOrPost(cloned, values)) cloned.value = generatePreAndPost(cloned, values) + return cloned + } + + var output = template.map(cloneAndObjectify).filter(function (item) { return item != null }) + + var remainingSpace = width + var variableCount = output.length + + function consumeSpace (length) { + if (length > remainingSpace) length = remainingSpace + remainingSpace -= length + } + + function finishSizing (item, length) { + if (item.finished) throw new error.Internal('Tried to finish template item that was already finished') + if (length === Infinity) throw new error.Internal('Length of template item cannot be infinity') + if (length != null) item.length = length + item.minLength = null + item.maxLength = null + --variableCount + item.finished = true + if (item.length == null) item.length = item.getBaseLength() + if (item.length == null) throw new error.Internal('Finished template items must have a length') + consumeSpace(item.getLength()) + } + + output.forEach(function (item) { + if (!item.kerning) return + var prevPadRight = item.first ? 0 : output[item.index - 1].padRight + if (!item.first && prevPadRight < item.kerning) item.padLeft = item.kerning - prevPadRight + if (!item.last) item.padRight = item.kerning + }) + + // Finish any that have a fixed (literal or intuited) length + output.forEach(function (item) { + if (item.getBaseLength() == null) return + finishSizing(item) + }) + + var resized = 0 + var resizing + var hunkSize + do { + resizing = false + hunkSize = Math.round(remainingSpace / variableCount) + output.forEach(function (item) { + if (item.finished) return + if (!item.maxLength) return + if (item.getMaxLength() < hunkSize) { + finishSizing(item, item.maxLength) + resizing = true + } + }) + } while (resizing && resized++ < output.length) + if (resizing) throw new error.Internal('Resize loop iterated too many times while determining maxLength') + + resized = 0 + do { + resizing = false + hunkSize = Math.round(remainingSpace / variableCount) + output.forEach(function (item) { + if (item.finished) return + if (!item.minLength) return + if (item.getMinLength() >= hunkSize) { + finishSizing(item, item.minLength) + resizing = true + } + }) + } while (resizing && resized++ < output.length) + if (resizing) throw new error.Internal('Resize loop iterated too many times while determining minLength') + + hunkSize = Math.round(remainingSpace / variableCount) + output.forEach(function (item) { + if (item.finished) return + finishSizing(item, hunkSize) + }) + + return output +} + +function renderFunction (item, values, length) { + validate('OON', arguments) + if (item.type) { + return item.value(values, values[item.type + 'Theme'] || {}, length) + } else { + return item.value(values, {}, length) + } +} + +function renderValue (item, values) { + var length = item.getBaseLength() + var value = typeof item.value === 'function' ? renderFunction(item, values, length) : item.value + if (value == null || value === '') return '' + var alignWith = align[item.align] || align.left + var leftPadding = item.padLeft ? align.left('', item.padLeft) : '' + var rightPadding = item.padRight ? align.right('', item.padRight) : '' + var truncated = wideTruncate(String(value), length) + var aligned = alignWith(truncated, length) + return leftPadding + aligned + rightPadding +} diff --git a/node_modules/gauge/set-immediate.js b/node_modules/gauge/set-immediate.js new file mode 100644 index 0000000..6650a48 --- /dev/null +++ b/node_modules/gauge/set-immediate.js @@ -0,0 +1,7 @@ +'use strict' +var process = require('./process') +try { + module.exports = setImmediate +} catch (ex) { + module.exports = process.nextTick +} diff --git a/node_modules/gauge/set-interval.js b/node_modules/gauge/set-interval.js new file mode 100644 index 0000000..5761987 --- /dev/null +++ b/node_modules/gauge/set-interval.js @@ -0,0 +1,3 @@ +'use strict' +// this exists so we can replace it during testing +module.exports = setInterval diff --git a/node_modules/gauge/spin.js b/node_modules/gauge/spin.js new file mode 100644 index 0000000..34142ee --- /dev/null +++ b/node_modules/gauge/spin.js @@ -0,0 +1,5 @@ +'use strict' + +module.exports = function spin (spinstr, spun) { + return spinstr[spun % spinstr.length] +} diff --git a/node_modules/gauge/template-item.js b/node_modules/gauge/template-item.js new file mode 100644 index 0000000..4f02fef --- /dev/null +++ b/node_modules/gauge/template-item.js @@ -0,0 +1,72 @@ +'use strict' +var stringWidth = require('string-width') + +module.exports = TemplateItem + +function isPercent (num) { + if (typeof num !== 'string') return false + return num.slice(-1) === '%' +} + +function percent (num) { + return Number(num.slice(0, -1)) / 100 +} + +function TemplateItem (values, outputLength) { + this.overallOutputLength = outputLength + this.finished = false + this.type = null + this.value = null + this.length = null + this.maxLength = null + this.minLength = null + this.kerning = null + this.align = 'left' + this.padLeft = 0 + this.padRight = 0 + this.index = null + this.first = null + this.last = null + if (typeof values === 'string') { + this.value = values + } else { + for (var prop in values) this[prop] = values[prop] + } + // Realize percents + if (isPercent(this.length)) { + this.length = Math.round(this.overallOutputLength * percent(this.length)) + } + if (isPercent(this.minLength)) { + this.minLength = Math.round(this.overallOutputLength * percent(this.minLength)) + } + if (isPercent(this.maxLength)) { + this.maxLength = Math.round(this.overallOutputLength * percent(this.maxLength)) + } + return this +} + +TemplateItem.prototype = {} + +TemplateItem.prototype.getBaseLength = function () { + var length = this.length + if (length == null && typeof this.value === 'string' && this.maxLength == null && this.minLength == null) { + length = stringWidth(this.value) + } + return length +} + +TemplateItem.prototype.getLength = function () { + var length = this.getBaseLength() + if (length == null) return null + return length + this.padLeft + this.padRight +} + +TemplateItem.prototype.getMaxLength = function () { + if (this.maxLength == null) return null + return this.maxLength + this.padLeft + this.padRight +} + +TemplateItem.prototype.getMinLength = function () { + if (this.minLength == null) return null + return this.minLength + this.padLeft + this.padRight +} diff --git a/node_modules/gauge/theme-set.js b/node_modules/gauge/theme-set.js new file mode 100644 index 0000000..c022d61 --- /dev/null +++ b/node_modules/gauge/theme-set.js @@ -0,0 +1,114 @@ +'use strict' +var objectAssign = require('object-assign') + +module.exports = function () { + return ThemeSetProto.newThemeSet() +} + +var ThemeSetProto = {} + +ThemeSetProto.baseTheme = require('./base-theme.js') + +ThemeSetProto.newTheme = function (parent, theme) { + if (!theme) { + theme = parent + parent = this.baseTheme + } + return objectAssign({}, parent, theme) +} + +ThemeSetProto.getThemeNames = function () { + return Object.keys(this.themes) +} + +ThemeSetProto.addTheme = function (name, parent, theme) { + this.themes[name] = this.newTheme(parent, theme) +} + +ThemeSetProto.addToAllThemes = function (theme) { + var themes = this.themes + Object.keys(themes).forEach(function (name) { + objectAssign(themes[name], theme) + }) + objectAssign(this.baseTheme, theme) +} + +ThemeSetProto.getTheme = function (name) { + if (!this.themes[name]) throw this.newMissingThemeError(name) + return this.themes[name] +} + +ThemeSetProto.setDefault = function (opts, name) { + if (name == null) { + name = opts + opts = {} + } + var platform = opts.platform == null ? 'fallback' : opts.platform + var hasUnicode = !!opts.hasUnicode + var hasColor = !!opts.hasColor + if (!this.defaults[platform]) this.defaults[platform] = {true: {}, false: {}} + this.defaults[platform][hasUnicode][hasColor] = name +} + +ThemeSetProto.getDefault = function (opts) { + if (!opts) opts = {} + var platformName = opts.platform || process.platform + var platform = this.defaults[platformName] || this.defaults.fallback + var hasUnicode = !!opts.hasUnicode + var hasColor = !!opts.hasColor + if (!platform) throw this.newMissingDefaultThemeError(platformName, hasUnicode, hasColor) + if (!platform[hasUnicode][hasColor]) { + if (hasUnicode && hasColor && platform[!hasUnicode][hasColor]) { + hasUnicode = false + } else if (hasUnicode && hasColor && platform[hasUnicode][!hasColor]) { + hasColor = false + } else if (hasUnicode && hasColor && platform[!hasUnicode][!hasColor]) { + hasUnicode = false + hasColor = false + } else if (hasUnicode && !hasColor && platform[!hasUnicode][hasColor]) { + hasUnicode = false + } else if (!hasUnicode && hasColor && platform[hasUnicode][!hasColor]) { + hasColor = false + } else if (platform === this.defaults.fallback) { + throw this.newMissingDefaultThemeError(platformName, hasUnicode, hasColor) + } + } + if (platform[hasUnicode][hasColor]) { + return this.getTheme(platform[hasUnicode][hasColor]) + } else { + return this.getDefault(objectAssign({}, opts, {platform: 'fallback'})) + } +} + +ThemeSetProto.newMissingThemeError = function newMissingThemeError (name) { + var err = new Error('Could not find a gauge theme named "' + name + '"') + Error.captureStackTrace.call(err, newMissingThemeError) + err.theme = name + err.code = 'EMISSINGTHEME' + return err +} + +ThemeSetProto.newMissingDefaultThemeError = function newMissingDefaultThemeError (platformName, hasUnicode, hasColor) { + var err = new Error( + 'Could not find a gauge theme for your platform/unicode/color use combo:\n' + + ' platform = ' + platformName + '\n' + + ' hasUnicode = ' + hasUnicode + '\n' + + ' hasColor = ' + hasColor) + Error.captureStackTrace.call(err, newMissingDefaultThemeError) + err.platform = platformName + err.hasUnicode = hasUnicode + err.hasColor = hasColor + err.code = 'EMISSINGTHEME' + return err +} + +ThemeSetProto.newThemeSet = function () { + var themeset = function (opts) { + return themeset.getDefault(opts) + } + return objectAssign(themeset, ThemeSetProto, { + themes: objectAssign({}, this.themes), + baseTheme: objectAssign({}, this.baseTheme), + defaults: JSON.parse(JSON.stringify(this.defaults || {})) + }) +} diff --git a/node_modules/gauge/themes.js b/node_modules/gauge/themes.js new file mode 100644 index 0000000..df1184d --- /dev/null +++ b/node_modules/gauge/themes.js @@ -0,0 +1,56 @@ +'use strict' +var color = require('console-control-strings').color +var ThemeSet = require('./theme-set.js') + +var themes = module.exports = new ThemeSet() + +themes.addTheme('ASCII', { + preProgressbar: '[', + postProgressbar: ']', + progressbarTheme: { + complete: '#', + remaining: '.' + }, + activityIndicatorTheme: '-\\|/', + preSubsection: '>' +}) + +themes.addTheme('colorASCII', themes.getTheme('ASCII'), { + progressbarTheme: { + preComplete: color('bgBrightWhite', 'brightWhite'), + complete: '#', + postComplete: color('reset'), + preRemaining: color('bgBrightBlack', 'brightBlack'), + remaining: '.', + postRemaining: color('reset') + } +}) + +themes.addTheme('brailleSpinner', { + preProgressbar: '⸨', + postProgressbar: '⸩', + progressbarTheme: { + complete: '#', + remaining: '⠂' + }, + activityIndicatorTheme: '⠋⠙⠹⠸⠼⠴⠦⠧⠇⠏', + preSubsection: '>' +}) + +themes.addTheme('colorBrailleSpinner', themes.getTheme('brailleSpinner'), { + progressbarTheme: { + preComplete: color('bgBrightWhite', 'brightWhite'), + complete: '#', + postComplete: color('reset'), + preRemaining: color('bgBrightBlack', 'brightBlack'), + remaining: '⠂', + postRemaining: color('reset') + } +}) + +themes.setDefault({}, 'ASCII') +themes.setDefault({hasColor: true}, 'colorASCII') +themes.setDefault({platform: 'darwin', hasUnicode: true}, 'brailleSpinner') +themes.setDefault({platform: 'darwin', hasUnicode: true, hasColor: true}, 'colorBrailleSpinner') +themes.setDefault({platform: 'linux', hasUnicode: true}, 'brailleSpinner') +themes.setDefault({platform: 'linux', hasUnicode: true, hasColor: true}, 'colorBrailleSpinner') diff --git a/node_modules/gauge/wide-truncate.js b/node_modules/gauge/wide-truncate.js new file mode 100644 index 0000000..c531bc4 --- /dev/null +++ b/node_modules/gauge/wide-truncate.js @@ -0,0 +1,25 @@ +'use strict' +var stringWidth = require('string-width') +var stripAnsi = require('strip-ansi') + +module.exports = wideTruncate + +function wideTruncate (str, target) { + if (stringWidth(str) === 0) return str + if (target <= 0) return '' + if (stringWidth(str) <= target) return str + + // We compute the number of bytes of ansi sequences here and add + // that to our initial truncation to ensure that we don't slice one + // that we want to keep in half. + var noAnsi = stripAnsi(str) + var ansiSize = str.length + noAnsi.length + var truncated = str.slice(0, target + ansiSize) + + // we have to shrink the result to account for our ansi sequence buffer + // (if an ansi sequence was truncated) and double width characters. + while (stringWidth(truncated) > target) { + truncated = truncated.slice(0, -1) + } + return truncated +} diff --git a/node_modules/generic-pool/README.md b/node_modules/generic-pool/README.md new file mode 100644 index 0000000..9a3850d --- /dev/null +++ b/node_modules/generic-pool/README.md @@ -0,0 +1,407 @@ +[![build status](https://secure.travis-ci.org/coopernurse/node-pool.png)](http://travis-ci.org/coopernurse/node-pool) + +# Generic Pool + +## About + + Generic resource pool with Promise based API. Can be used to reuse or throttle usage of expensive resources such as database connections. + + + +**V3 upgrade warning** + +Version 3 contains many breaking changes. The differences are mostly minor and I hope easy to accommodate. There is a very rough and basic [upgrade guide](https://gist.github.com/sandfox/5ca20648b60a0cb959638c0cd6fcd02d) I've written, improvements and other attempts most welcome. + +If you are after the older version 2 of this library you should look at the [current github branch](https://github.com/coopernurse/node-pool/tree/v2.5) for it. + + +## History + +The history has been moved to the [CHANGELOG](CHANGELOG.md) + + +## Installation + +```sh +$ npm install generic-pool [--save] +``` + + +## Example + +Here is an example using a fictional generic database driver that doesn't implement any pooling whatsoever itself. + +```js +const genericPool = require("generic-pool"); +const DbDriver = require("some-db-driver"); + +/** + * Step 1 - Create pool using a factory object + */ +const factory = { + create: function() { + return DbDriver.createClient(); + }, + destroy: function(client) { + client.disconnect(); + } +}; + +const opts = { + max: 10, // maximum size of the pool + min: 2 // minimum size of the pool +}; + +const myPool = genericPool.createPool(factory, opts); + +/** + * Step 2 - Use pool in your code to acquire/release resources + */ + +// acquire connection - Promise is resolved +// once a resource becomes available +const resourcePromise = myPool.acquire(); + +resourcePromise + .then(function(client) { + client.query("select * from foo", [], function() { + // return object back to pool + myPool.release(client); + }); + }) + .catch(function(err) { + // handle error - this is generally a timeout or maxWaitingClients + // error + }); + +/** + * Step 3 - Drain pool during shutdown (optional) + */ +// Only call this once in your application -- at the point you want +// to shutdown and stop using this pool. +myPool.drain().then(function() { + myPool.clear(); +}); + +``` + + +## Documentation + + + +### Creating a pool + +Whilst it is possible to directly instantiate the Pool class directly, it is recommended to use the `createPool` function exported by module as the constructor method signature may change in the future. + +### createPool + +The createPool function takes two arguments: + +- `factory` : an object containing functions to create/destroy/test resources for the `Pool` +- `opts` : an optional object/dictonary to allow configuring/altering behaviour of the `Pool` + +```js +const genericPool = require('generic-pool') +const pool = genericPool.createPool(factory, opts) +``` + +**factory** + +Can be any object/instance but must have the following properties: + +- `create` : a function that the pool will call when it wants a new resource. It should return a Promise that either resolves to a `resource` or rejects to an `Error` if it is unable to create a resource for whatever reason. +- `destroy`: a function that the pool will call when it wants to destroy a resource. It should accept one argument `resource` where `resource` is whatever `factory.create` made. The `destroy` function should return a `Promise` that resolves once it has destroyed the resource. + + +optionally it can also have the following property: + +- `validate`: a function that the pool will call if it wants to validate a resource. It should accept one argument `resource` where `resource` is whatever `factory.create` made. Should return a `Promise` that resolves a `boolean` where `true` indicates the resource is still valid or `false` if the resource is invalid. + +_Note: The values returned from `create`, `destroy`, and `validate` are all wrapped in a `Promise.resolve` by the pool before being used internally._ + +**opts** + +An optional object/dictionary with the any of the following properties: + +- `max`: maximum number of resources to create at any given time. (default=1) +- `min`: minimum number of resources to keep in pool at any given time. If this is set >= max, the pool will silently set the min to equal `max`. (default=0) +- `maxWaitingClients`: maximum number of queued requests allowed, additional `acquire` calls will be callback with an `err` in a future cycle of the event loop. +- `testOnBorrow`: `boolean`: should the pool validate resources before giving them to clients. Requires that `factory.validate` is specified. +- `acquireTimeoutMillis`: max milliseconds an `acquire` call will wait for a resource before timing out. (default no limit), if supplied should non-zero positive integer. +- `destroyTimeoutMillis`: max milliseconds a `destroy` call will wait for a resource before timing out. (default no limit), if supplied should non-zero positive integer. +- `fifo` : if true the oldest resources will be first to be allocated. If false the most recently released resources will be the first to be allocated. This in effect turns the pool's behaviour from a queue into a stack. `boolean`, (default true) +- `priorityRange`: int between 1 and x - if set, borrowers can specify their relative priority in the queue if no resources are available. + see example. (default 1) +- `autostart`: boolean, should the pool start creating resources, initialize the evictor, etc once the constructor is called. If false, the pool can be started by calling `pool.start`, otherwise the first call to `acquire` will start the pool. (default true) +- `evictionRunIntervalMillis`: How often to run eviction checks. Default: 0 (does not run). +- `numTestsPerEvictionRun`: Number of resources to check each eviction run. Default: 3. +- `softIdleTimeoutMillis`: amount of time an object may sit idle in the pool before it is eligible for eviction by the idle object evictor (if any), with the extra condition that at least "min idle" object instances remain in the pool. Default -1 (nothing can get evicted) +- `idleTimeoutMillis`: the minimum amount of time that an object may sit idle in the pool before it is eligible for eviction due to idle time. Supercedes `softIdleTimeoutMillis` Default: 30000 +- `Promise`: Promise lib, a Promises/A+ implementation that the pool should use. Defaults to whatever `global.Promise` is (usually native promises). + +### pool.acquire + +```js +const onfulfilled = function(resource){ + resource.doStuff() + // release/destroy/etc +} + +pool.acquire().then(onfulfilled) +//or +const priority = 2 +pool.acquire(priority).then(onfulfilled) +``` + +This function is for when you want to "borrow" a resource from the pool. + +`acquire` takes one optional argument: + +- `priority`: optional, number, see **Priority Queueing** below. + +and returns a `Promise` +Once a resource in the pool is available, the promise will be resolved with a `resource` (whatever `factory.create` makes for you). If the Pool is unable to give a resource (e.g timeout) then the promise will be rejected with an `Error` + +### pool.release + +```js +pool.release(resource) +``` + +This function is for when you want to return a resource to the pool. + +`release` takes one required argument: + +- `resource`: a previously borrowed resource + +and returns a `Promise`. This promise will resolve once the `resource` is accepted by the pool, or reject if the pool is unable to accept the `resource` for any reason (e.g `resource` is not a resource or object that came from the pool). If you do not care the outcome it is safe to ignore this promise. + +### pool.isBorrowedResource + +```js +pool.isBorrowedResource(resource) +``` + +This function is for when you need to check if a resource has been acquired from the pool and not yet released/destroyed. + +`isBorrowedResource` takes one required argument: + +- `resource`: any object which you need to test + +and returns true (primitive, not Promise) if resource is currently borrowed from the pool, false otherwise. + +### pool.destroy + +```js +pool.destroy(resource) +``` + +This function is for when you want to return a resource to the pool but want it destroyed rather than being made available to other resources. E.g you may know the resource has timed out or crashed. + +`destroy` takes one required argument: + +- `resource`: a previously borrowed resource + +and returns a `Promise`. This promise will resolve once the `resource` is accepted by the pool, or reject if the pool is unable to accept the `resource` for any reason (e.g `resource` is not a resource or object that came from the pool). If you do not care the outcome it is safe to ignore this promise. + +### pool.on + +```js +pool.on('factoryCreateError', function(err){ + //log stuff maybe +}) + +pool.on('factoryDestroyError', function(err){ + //log stuff maybe +}) +``` + +The pool is an event emitter. Below are the events it emits and any args for those events + +- `factoryCreateError` : emitted when a promise returned by `factory.create` is rejected. If this event has no listeners then the `error` will be silently discarded + - `error`: whatever `reason` the promise was rejected with. + +- `factoryDestroyError` : emitted when a promise returned by `factory.destroy` is rejected. If this event has no listeners then the `error` will be silently discarded + - `error`: whatever `reason` the promise was rejected with. + +### pool.start + +```js +pool.start() +``` + +If `autostart` is `false` then this method can be used to start the pool and therefore begin creation of resources, start the evictor, and any other internal logic. + +### pool.ready + +```js +pool.ready() +``` + +Waits for the pool to fully start. + +### pool.use + +```js + +const myTask = dbClient => { + return new Promise( (resolve, reject) => { + // do something with the client and resolve/reject + }) +} + +pool.use(myTask).then(/* a promise that will run after myTask resolves */) +``` + +This method handles acquiring a `resource` from the pool, handing it to your function and then calling `pool.release` or `pool.destroy` with resource after your function has finished. + +`use` takes one required argument: + +- `fn`: a function that accepts a `resource` and returns a `Promise`. Once that promise `resolve`s the `resource` is returned to the pool, else if it `reject`s then the resource is destroyed. +- `priority`: Optionally, you can specify the priority as number. See [Priority Queueing](#priority-queueing) section. + +and returns a `Promise` that either `resolve`s with the value from the user supplied `fn` or `reject`s with an error. + +## Idle Object Eviction + +The pool has an evictor (off by default) which will inspect idle items in the pool and `destroy` them if they are too old. + +By default the evictor does not run, to enable it you must set the `evictionRunIntervalMillis` option to a non-zero value. Once enable the evictor will check at most `numTestsPerEvictionRun` each time, this is to stop it blocking your application if you have lots of resources in the pool. + + +## Priority Queueing + +The pool supports optional priority queueing. This becomes relevant when no resources are available and the caller has to wait. `acquire()` accepts an optional priority int which +specifies the caller's relative position in the queue. Each priority slot has it's own internal queue created for it. When a resource is available for borrowing, the first request in the highest priority queue will be given it. + +Specifying a `priority` to `acquire` that is outside the `priorityRange` set at `Pool` creation time will result in the `priority` being converted the lowest possible `priority` + +```js +// create pool with priorityRange of 3 +// borrowers can specify a priority 0 to 2 +const opts = { + priorityRange : 3 +} +const pool = genericPool.createPool(someFactory,opts); + +// acquire connection - no priority specified - will go onto lowest priority queue +pool.acquire().then(function(client) { + pool.release(client); +}); + +// acquire connection - high priority - will go into highest priority queue +pool.acquire(0).then(function(client) { + pool.release(client); +}); + +// acquire connection - medium priority - will go into 'mid' priority queue +pool.acquire(1).then(function(client) { + pool.release(client); +}); + +// etc.. +``` + +## Draining + +If you are shutting down a long-lived process, you may notice +that node fails to exit for 30 seconds or so. This is a side +effect of the idleTimeoutMillis behavior -- the pool has a +setTimeout() call registered that is in the event loop queue, so +node won't terminate until all resources have timed out, and the pool +stops trying to manage them. + +This behavior will be more problematic when you set factory.min > 0, +as the pool will never become empty, and the setTimeout calls will +never end. + +In these cases, use the pool.drain() function. This sets the pool +into a "draining" state which will gracefully wait until all +idle resources have timed out. For example, you can call: + +If you do this, your node process will exit gracefully. + +If you know you would like to terminate all the available resources in your pool before any timeouts they might have are reached, you can use `clear()` in conjunction with `drain()`: + +```js +const p = pool.drain() +.then(function() { + return pool.clear(); +}); +``` +The `promise` returned will resolve once all waiting clients have acquired and return resources, and any available resources have been destroyed + +One side-effect of calling `drain()` is that subsequent calls to `acquire()` +will throw an Error. + +## Pooled function decoration + +This has now been extracted out it's own module [generic-pool-decorator](https://github.com/sandfox/generic-pool-decorator) + +## Pool info + +The following properties will let you get information about the pool: + +```js + +// How many many more resources can the pool manage/create +pool.spareResourceCapacity + +// returns number of resources in the pool regardless of +// whether they are free or in use +pool.size + +// returns number of unused resources in the pool +pool.available + +// number of resources that are currently acquired by userland code +pool.borrowed + +// returns number of callers waiting to acquire a resource +pool.pending + +// returns number of maxixmum number of resources allowed by pool +pool.max + +// returns number of minimum number of resources allowed by pool +pool.min + +``` + +## Run Tests + + $ npm install + $ npm test + +The tests are run/written using Tap. Most are ports from the old espresso tests and are not in great condition. Most cases are inside `test/generic-pool-test.js` with newer cases in their own files (legacy reasons). + +## Linting + +We use eslint combined with prettier + + +## License + +(The MIT License) + +Copyright (c) 2010-2016 James Cooper <james@bitmechanic.com> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/generic-pool/index.d.ts b/node_modules/generic-pool/index.d.ts new file mode 100644 index 0000000..8b4bae8 --- /dev/null +++ b/node_modules/generic-pool/index.d.ts @@ -0,0 +1,51 @@ +// Type definitions for node-pool 3.1 +// Derived from https://github.com/DefinitelyTyped/DefinitelyTyped +// -> https://github.com/DefinitelyTyped/DefinitelyTyped/blob/454dcbcbe5e932010b128dca9793758dd28adb45/types/generic-pool/index.d.ts + +/// + +import { EventEmitter } from "events"; + +export class Pool extends EventEmitter { + spareResourceCapacity: number; + size: number; + available: number; + borrowed: number; + pending: number; + max: number; + min: number; + + start(): void; + acquire(priority?: number): Promise; + release(resource: T): Promise; + destroy(resource: T): Promise; + drain(): Promise; + clear(): Promise; + use(cb: (resource: T) => U | Promise): Promise; + isBorrowedResource(resource: T): boolean; + ready(): Promise; +} + +export interface Factory { + create(): Promise; + destroy(client: T): Promise; + validate?(client: T): Promise; +} + +export interface Options { + max?: number; + min?: number; + maxWaitingClients?: number; + testOnBorrow?: boolean; + testOnReturn?: boolean; + acquireTimeoutMillis?: number; + fifo?: boolean; + priorityRange?: number; + autostart?: boolean; + evictionRunIntervalMillis?: number; + numTestsPerEvictionRun?: number; + softIdleTimeoutMillis?: number; + idleTimeoutMillis?: number; +} + +export function createPool(factory: Factory, opts?: Options): Pool; diff --git a/node_modules/generic-pool/index.js b/node_modules/generic-pool/index.js new file mode 100644 index 0000000..ddb53ae --- /dev/null +++ b/node_modules/generic-pool/index.js @@ -0,0 +1,13 @@ +const Pool = require("./lib/Pool"); +const Deque = require("./lib/Deque"); +const PriorityQueue = require("./lib/PriorityQueue"); +const DefaultEvictor = require("./lib/DefaultEvictor"); +module.exports = { + Pool: Pool, + Deque: Deque, + PriorityQueue: PriorityQueue, + DefaultEvictor: DefaultEvictor, + createPool: function(factory, config) { + return new Pool(DefaultEvictor, Deque, PriorityQueue, factory, config); + } +}; diff --git a/node_modules/generic-pool/lib/DefaultEvictor.js b/node_modules/generic-pool/lib/DefaultEvictor.js new file mode 100644 index 0000000..658d345 --- /dev/null +++ b/node_modules/generic-pool/lib/DefaultEvictor.js @@ -0,0 +1,23 @@ +"use strict"; + +class DefaultEvictor { + evict(config, pooledResource, availableObjectsCount) { + const idleTime = Date.now() - pooledResource.lastIdleTime; + + if ( + config.softIdleTimeoutMillis > 0 && + config.softIdleTimeoutMillis < idleTime && + config.min < availableObjectsCount + ) { + return true; + } + + if (config.idleTimeoutMillis < idleTime) { + return true; + } + + return false; + } +} + +module.exports = DefaultEvictor; diff --git a/node_modules/generic-pool/lib/Deferred.js b/node_modules/generic-pool/lib/Deferred.js new file mode 100644 index 0000000..b38c22a --- /dev/null +++ b/node_modules/generic-pool/lib/Deferred.js @@ -0,0 +1,49 @@ +"use strict"; + +/** + * This is apparently a bit like a Jquery deferred, hence the name + */ + +class Deferred { + constructor(Promise) { + this._state = Deferred.PENDING; + this._resolve = undefined; + this._reject = undefined; + + this._promise = new Promise((resolve, reject) => { + this._resolve = resolve; + this._reject = reject; + }); + } + + get state() { + return this._state; + } + + get promise() { + return this._promise; + } + + reject(reason) { + if (this._state !== Deferred.PENDING) { + return; + } + this._state = Deferred.REJECTED; + this._reject(reason); + } + + resolve(value) { + if (this._state !== Deferred.PENDING) { + return; + } + this._state = Deferred.FULFILLED; + this._resolve(value); + } +} + +// TODO: should these really live here? or be a seperate 'state' enum +Deferred.PENDING = "PENDING"; +Deferred.FULFILLED = "FULFILLED"; +Deferred.REJECTED = "REJECTED"; + +module.exports = Deferred; diff --git a/node_modules/generic-pool/lib/Deque.js b/node_modules/generic-pool/lib/Deque.js new file mode 100644 index 0000000..552ec3b --- /dev/null +++ b/node_modules/generic-pool/lib/Deque.js @@ -0,0 +1,106 @@ +"use strict"; + +const DoublyLinkedList = require("./DoublyLinkedList"); +const DequeIterator = require("./DequeIterator"); +/** + * DoublyLinkedList backed double ended queue + * implements just enough to keep the Pool + */ +class Deque { + constructor() { + this._list = new DoublyLinkedList(); + } + + /** + * removes and returns the first element from the queue + * @return {any} [description] + */ + shift() { + if (this.length === 0) { + return undefined; + } + + const node = this._list.head; + this._list.remove(node); + + return node.data; + } + + /** + * adds one elemts to the beginning of the queue + * @param {any} element [description] + * @return {any} [description] + */ + unshift(element) { + const node = DoublyLinkedList.createNode(element); + + this._list.insertBeginning(node); + } + + /** + * adds one to the end of the queue + * @param {any} element [description] + * @return {any} [description] + */ + push(element) { + const node = DoublyLinkedList.createNode(element); + + this._list.insertEnd(node); + } + + /** + * removes and returns the last element from the queue + */ + pop() { + if (this.length === 0) { + return undefined; + } + + const node = this._list.tail; + this._list.remove(node); + + return node.data; + } + + [Symbol.iterator]() { + return new DequeIterator(this._list); + } + + iterator() { + return new DequeIterator(this._list); + } + + reverseIterator() { + return new DequeIterator(this._list, true); + } + + /** + * get a reference to the item at the head of the queue + * @return {any} [description] + */ + get head() { + if (this.length === 0) { + return undefined; + } + const node = this._list.head; + return node.data; + } + + /** + * get a reference to the item at the tail of the queue + * @return {any} [description] + */ + get tail() { + if (this.length === 0) { + return undefined; + } + const node = this._list.tail; + return node.data; + } + + get length() { + return this._list.length; + } +} + +module.exports = Deque; diff --git a/node_modules/generic-pool/lib/DequeIterator.js b/node_modules/generic-pool/lib/DequeIterator.js new file mode 100644 index 0000000..0f1f461 --- /dev/null +++ b/node_modules/generic-pool/lib/DequeIterator.js @@ -0,0 +1,20 @@ +"use strict"; + +const DoublyLinkedListIterator = require("./DoublyLinkedListIterator"); +/** + * Thin wrapper around an underlying DDL iterator + */ +class DequeIterator extends DoublyLinkedListIterator { + next() { + const result = super.next(); + + // unwrap the node... + if (result.value) { + result.value = result.value.data; + } + + return result; + } +} + +module.exports = DequeIterator; diff --git a/node_modules/generic-pool/lib/DoublyLinkedList.js b/node_modules/generic-pool/lib/DoublyLinkedList.js new file mode 100644 index 0000000..a207a40 --- /dev/null +++ b/node_modules/generic-pool/lib/DoublyLinkedList.js @@ -0,0 +1,94 @@ +"use strict"; + +/** + * A Doubly Linked List, because there aren't enough in the world... + * this is pretty much a direct JS port of the one wikipedia + * https://en.wikipedia.org/wiki/Doubly_linked_list + * + * For most usage 'insertBeginning' and 'insertEnd' should be enough + * + * nodes are expected to something like a POJSO like + * { + * prev: null, + * next: null, + * something: 'whatever you like' + * } + */ +class DoublyLinkedList { + constructor() { + this.head = null; + this.tail = null; + this.length = 0; + } + + insertBeginning(node) { + if (this.head === null) { + this.head = node; + this.tail = node; + node.prev = null; + node.next = null; + this.length++; + } else { + this.insertBefore(this.head, node); + } + } + + insertEnd(node) { + if (this.tail === null) { + this.insertBeginning(node); + } else { + this.insertAfter(this.tail, node); + } + } + + insertAfter(node, newNode) { + newNode.prev = node; + newNode.next = node.next; + if (node.next === null) { + this.tail = newNode; + } else { + node.next.prev = newNode; + } + node.next = newNode; + this.length++; + } + + insertBefore(node, newNode) { + newNode.prev = node.prev; + newNode.next = node; + if (node.prev === null) { + this.head = newNode; + } else { + node.prev.next = newNode; + } + node.prev = newNode; + this.length++; + } + + remove(node) { + if (node.prev === null) { + this.head = node.next; + } else { + node.prev.next = node.next; + } + if (node.next === null) { + this.tail = node.prev; + } else { + node.next.prev = node.prev; + } + node.prev = null; + node.next = null; + this.length--; + } + + // FIXME: this should not live here and has become a dumping ground... + static createNode(data) { + return { + prev: null, + next: null, + data: data + }; + } +} + +module.exports = DoublyLinkedList; diff --git a/node_modules/generic-pool/lib/DoublyLinkedListIterator.js b/node_modules/generic-pool/lib/DoublyLinkedListIterator.js new file mode 100644 index 0000000..7304a1a --- /dev/null +++ b/node_modules/generic-pool/lib/DoublyLinkedListIterator.js @@ -0,0 +1,100 @@ +"use strict"; + +/** + * Creates an interator for a DoublyLinkedList starting at the given node + * It's internal cursor will remains relative to the last "iterated" node as that + * node moves through the list until it either iterates to the end of the list, + * or the the node it's tracking is removed from the list. Until the first 'next' + * call it tracks the head/tail of the linked list. This means that one can create + * an iterator on an empty list, then add nodes, and then the iterator will follow + * those nodes. Because the DoublyLinkedList nodes don't track their owning "list" and + * it's highly inefficient to walk the list for every iteration, the iterator won't know + * if the node has been detached from one List and added to another list, or if the iterator + * + * The created object is an es6 compatible iterator + */ +class DoublyLinkedListIterator { + /** + * @param {Object} doublyLinkedList a node that is part of a doublyLinkedList + * @param {Boolean} [reverse=false] is this a reverse iterator? default: false + */ + constructor(doublyLinkedList, reverse) { + this._list = doublyLinkedList; + // NOTE: these key names are tied to the DoublyLinkedListIterator + this._direction = reverse === true ? "prev" : "next"; + this._startPosition = reverse === true ? "tail" : "head"; + this._started = false; + this._cursor = null; + this._done = false; + } + + _start() { + this._cursor = this._list[this._startPosition]; + this._started = true; + } + + _advanceCursor() { + if (this._started === false) { + this._started = true; + this._cursor = this._list[this._startPosition]; + return; + } + this._cursor = this._cursor[this._direction]; + } + + reset() { + this._done = false; + this._started = false; + this._cursor = null; + } + + remove() { + if ( + this._started === false || + this._done === true || + this._isCursorDetached() + ) { + return false; + } + this._list.remove(this._cursor); + } + + next() { + if (this._done === true) { + return { done: true }; + } + + this._advanceCursor(); + + // if there is no node at the cursor or the node at the cursor is no longer part of + // a doubly linked list then we are done/finished/kaput + if (this._cursor === null || this._isCursorDetached()) { + this._done = true; + return { done: true }; + } + + return { + value: this._cursor, + done: false + }; + } + + /** + * Is the node detached from a list? + * NOTE: you can trick/bypass/confuse this check by removing a node from one DoublyLinkedList + * and adding it to another. + * TODO: We can make this smarter by checking the direction of travel and only checking + * the required next/prev/head/tail rather than all of them + * @return {Boolean} [description] + */ + _isCursorDetached() { + return ( + this._cursor.prev === null && + this._cursor.next === null && + this._list.tail !== this._cursor && + this._list.head !== this._cursor + ); + } +} + +module.exports = DoublyLinkedListIterator; diff --git a/node_modules/generic-pool/lib/Pool.js b/node_modules/generic-pool/lib/Pool.js new file mode 100644 index 0000000..d014052 --- /dev/null +++ b/node_modules/generic-pool/lib/Pool.js @@ -0,0 +1,744 @@ +"use strict"; + +const EventEmitter = require("events").EventEmitter; + +const factoryValidator = require("./factoryValidator"); +const PoolOptions = require("./PoolOptions"); +const ResourceRequest = require("./ResourceRequest"); +const ResourceLoan = require("./ResourceLoan"); +const PooledResource = require("./PooledResource"); +const DefaultEvictor = require("./DefaultEvictor"); +const Deque = require("./Deque"); +const Deferred = require("./Deferred"); +const PriorityQueue = require("./PriorityQueue"); +const DequeIterator = require("./DequeIterator"); + +const reflector = require("./utils").reflector; + +/** + * TODO: move me + */ +const FACTORY_CREATE_ERROR = "factoryCreateError"; +const FACTORY_DESTROY_ERROR = "factoryDestroyError"; + +class Pool extends EventEmitter { + /** + * Generate an Object pool with a specified `factory` and `config`. + * + * @param {typeof DefaultEvictor} Evictor + * @param {typeof Deque} Deque + * @param {typeof PriorityQueue} PriorityQueue + * @param {Object} factory + * Factory to be used for generating and destroying the items. + * @param {Function} factory.create + * Should create the item to be acquired, + * and call it's first callback argument with the generated item as it's argument. + * @param {Function} factory.destroy + * Should gently close any resources that the item is using. + * Called before the items is destroyed. + * @param {Function} factory.validate + * Test if a resource is still valid .Should return a promise that resolves to a boolean, true if resource is still valid and false + * If it should be removed from pool. + * @param {Object} options + */ + constructor(Evictor, Deque, PriorityQueue, factory, options) { + super(); + + factoryValidator(factory); + + this._config = new PoolOptions(options); + + // TODO: fix up this ugly glue-ing + this._Promise = this._config.Promise; + + this._factory = factory; + this._draining = false; + this._started = false; + /** + * Holds waiting clients + * @type {PriorityQueue} + */ + this._waitingClientsQueue = new PriorityQueue(this._config.priorityRange); + + /** + * Collection of promises for resource creation calls made by the pool to factory.create + * @type {Set} + */ + this._factoryCreateOperations = new Set(); + + /** + * Collection of promises for resource destruction calls made by the pool to factory.destroy + * @type {Set} + */ + this._factoryDestroyOperations = new Set(); + + /** + * A queue/stack of pooledResources awaiting acquisition + * TODO: replace with LinkedList backed array + * @type {Deque} + */ + this._availableObjects = new Deque(); + + /** + * Collection of references for any resource that are undergoing validation before being acquired + * @type {Set} + */ + this._testOnBorrowResources = new Set(); + + /** + * Collection of references for any resource that are undergoing validation before being returned + * @type {Set} + */ + this._testOnReturnResources = new Set(); + + /** + * Collection of promises for any validations currently in process + * @type {Set} + */ + this._validationOperations = new Set(); + + /** + * All objects associated with this pool in any state (except destroyed) + * @type {Set} + */ + this._allObjects = new Set(); + + /** + * Loans keyed by the borrowed resource + * @type {Map} + */ + this._resourceLoans = new Map(); + + /** + * Infinitely looping iterator over available object + * @type {DequeIterator} + */ + this._evictionIterator = this._availableObjects.iterator(); + + this._evictor = new Evictor(); + + /** + * handle for setTimeout for next eviction run + * @type {(number|null)} + */ + this._scheduledEviction = null; + + // create initial resources (if factory.min > 0) + if (this._config.autostart === true) { + this.start(); + } + } + + _destroy(pooledResource) { + // FIXME: do we need another state for "in destruction"? + pooledResource.invalidate(); + this._allObjects.delete(pooledResource); + // NOTE: this maybe very bad promise usage? + const destroyPromise = this._factory.destroy(pooledResource.obj); + const wrappedDestroyPromise = this._config.destroyTimeoutMillis + ? this._Promise.resolve(this._applyDestroyTimeout(destroyPromise)) + : this._Promise.resolve(destroyPromise); + + this._trackOperation( + wrappedDestroyPromise, + this._factoryDestroyOperations + ).catch(reason => { + this.emit(FACTORY_DESTROY_ERROR, reason); + }); + + // TODO: maybe ensuring minimum pool size should live outside here + this._ensureMinimum(); + } + + _applyDestroyTimeout(promise) { + const timeoutPromise = new this._Promise((resolve, reject) => { + setTimeout(() => { + reject(new Error("destroy timed out")); + }, this._config.destroyTimeoutMillis).unref(); + }); + return this._Promise.race([timeoutPromise, promise]); + } + + /** + * Attempt to move an available resource into test and then onto a waiting client + * @return {Boolean} could we move an available resource into test + */ + _testOnBorrow() { + if (this._availableObjects.length < 1) { + return false; + } + + const pooledResource = this._availableObjects.shift(); + // Mark the resource as in test + pooledResource.test(); + this._testOnBorrowResources.add(pooledResource); + const validationPromise = this._factory.validate(pooledResource.obj); + const wrappedValidationPromise = this._Promise.resolve(validationPromise); + + this._trackOperation( + wrappedValidationPromise, + this._validationOperations + ).then(isValid => { + this._testOnBorrowResources.delete(pooledResource); + + if (isValid === false) { + pooledResource.invalidate(); + this._destroy(pooledResource); + this._dispense(); + return; + } + this._dispatchPooledResourceToNextWaitingClient(pooledResource); + }); + + return true; + } + + /** + * Attempt to move an available resource to a waiting client + * @return {Boolean} [description] + */ + _dispatchResource() { + if (this._availableObjects.length < 1) { + return false; + } + + const pooledResource = this._availableObjects.shift(); + this._dispatchPooledResourceToNextWaitingClient(pooledResource); + return false; + } + + /** + * Attempt to resolve an outstanding resource request using an available resource from + * the pool, or creating new ones + * + * @private + */ + _dispense() { + /** + * Local variables for ease of reading/writing + * these don't (shouldn't) change across the execution of this fn + */ + const numWaitingClients = this._waitingClientsQueue.length; + + // If there aren't any waiting requests then there is nothing to do + // so lets short-circuit + if (numWaitingClients < 1) { + return; + } + + const resourceShortfall = + numWaitingClients - this._potentiallyAllocableResourceCount; + + const actualNumberOfResourcesToCreate = Math.min( + this.spareResourceCapacity, + resourceShortfall + ); + for (let i = 0; actualNumberOfResourcesToCreate > i; i++) { + this._createResource(); + } + + // If we are doing test-on-borrow see how many more resources need to be moved into test + // to help satisfy waitingClients + if (this._config.testOnBorrow === true) { + // how many available resources do we need to shift into test + const desiredNumberOfResourcesToMoveIntoTest = + numWaitingClients - this._testOnBorrowResources.size; + const actualNumberOfResourcesToMoveIntoTest = Math.min( + this._availableObjects.length, + desiredNumberOfResourcesToMoveIntoTest + ); + for (let i = 0; actualNumberOfResourcesToMoveIntoTest > i; i++) { + this._testOnBorrow(); + } + } + + // if we aren't testing-on-borrow then lets try to allocate what we can + if (this._config.testOnBorrow === false) { + const actualNumberOfResourcesToDispatch = Math.min( + this._availableObjects.length, + numWaitingClients + ); + for (let i = 0; actualNumberOfResourcesToDispatch > i; i++) { + this._dispatchResource(); + } + } + } + + /** + * Dispatches a pooledResource to the next waiting client (if any) else + * puts the PooledResource back on the available list + * @param {PooledResource} pooledResource [description] + * @return {Boolean} [description] + */ + _dispatchPooledResourceToNextWaitingClient(pooledResource) { + const clientResourceRequest = this._waitingClientsQueue.dequeue(); + if ( + clientResourceRequest === undefined || + clientResourceRequest.state !== Deferred.PENDING + ) { + // While we were away either all the waiting clients timed out + // or were somehow fulfilled. put our pooledResource back. + this._addPooledResourceToAvailableObjects(pooledResource); + // TODO: do need to trigger anything before we leave? + return false; + } + const loan = new ResourceLoan(pooledResource, this._Promise); + this._resourceLoans.set(pooledResource.obj, loan); + pooledResource.allocate(); + clientResourceRequest.resolve(pooledResource.obj); + return true; + } + + /** + * tracks on operation using given set + * handles adding/removing from the set and resolve/rejects the value/reason + * @param {Promise} operation + * @param {Set} set Set holding operations + * @return {Promise} Promise that resolves once operation has been removed from set + */ + _trackOperation(operation, set) { + set.add(operation); + + return operation.then( + v => { + set.delete(operation); + return this._Promise.resolve(v); + }, + e => { + set.delete(operation); + return this._Promise.reject(e); + } + ); + } + + /** + * @private + */ + _createResource() { + // An attempt to create a resource + const factoryPromise = this._factory.create(); + const wrappedFactoryPromise = this._Promise + .resolve(factoryPromise) + .then(resource => { + const pooledResource = new PooledResource(resource); + this._allObjects.add(pooledResource); + this._addPooledResourceToAvailableObjects(pooledResource); + }); + + this._trackOperation(wrappedFactoryPromise, this._factoryCreateOperations) + .then(() => { + this._dispense(); + // Stop bluebird complaining about this side-effect only handler + // - a promise was created in a handler but was not returned from it + // https://goo.gl/rRqMUw + return null; + }) + .catch(reason => { + this.emit(FACTORY_CREATE_ERROR, reason); + this._dispense(); + }); + } + + /** + * @private + */ + _ensureMinimum() { + if (this._draining === true) { + return; + } + const minShortfall = this._config.min - this._count; + for (let i = 0; i < minShortfall; i++) { + this._createResource(); + } + } + + _evict() { + const testsToRun = Math.min( + this._config.numTestsPerEvictionRun, + this._availableObjects.length + ); + const evictionConfig = { + softIdleTimeoutMillis: this._config.softIdleTimeoutMillis, + idleTimeoutMillis: this._config.idleTimeoutMillis, + min: this._config.min + }; + for (let testsHaveRun = 0; testsHaveRun < testsToRun; ) { + const iterationResult = this._evictionIterator.next(); + + // Safety check incase we could get stuck in infinite loop because we + // somehow emptied the array after checking its length. + if (iterationResult.done === true && this._availableObjects.length < 1) { + this._evictionIterator.reset(); + return; + } + // If this happens it should just mean we reached the end of the + // list and can reset the cursor. + if (iterationResult.done === true && this._availableObjects.length > 0) { + this._evictionIterator.reset(); + continue; + } + + const resource = iterationResult.value; + + const shouldEvict = this._evictor.evict( + evictionConfig, + resource, + this._availableObjects.length + ); + testsHaveRun++; + + if (shouldEvict === true) { + // take it out of the _availableObjects list + this._evictionIterator.remove(); + this._destroy(resource); + } + } + } + + _scheduleEvictorRun() { + // Start eviction if set + if (this._config.evictionRunIntervalMillis > 0) { + // @ts-ignore + this._scheduledEviction = setTimeout(() => { + this._evict(); + this._scheduleEvictorRun(); + }, this._config.evictionRunIntervalMillis).unref(); + } + } + + _descheduleEvictorRun() { + if (this._scheduledEviction) { + clearTimeout(this._scheduledEviction); + } + this._scheduledEviction = null; + } + + start() { + if (this._draining === true) { + return; + } + if (this._started === true) { + return; + } + this._started = true; + this._scheduleEvictorRun(); + this._ensureMinimum(); + } + + /** + * Request a new resource. The callback will be called, + * when a new resource is available, passing the resource to the callback. + * TODO: should we add a seperate "acquireWithPriority" function + * + * @param {Number} [priority=0] + * Optional. Integer between 0 and (priorityRange - 1). Specifies the priority + * of the caller if there are no available resources. Lower numbers mean higher + * priority. + * + * @returns {Promise} + */ + acquire(priority) { + if (this._started === false && this._config.autostart === false) { + this.start(); + } + + if (this._draining) { + return this._Promise.reject( + new Error("pool is draining and cannot accept work") + ); + } + + // TODO: should we defer this check till after this event loop incase "the situation" changes in the meantime + if ( + this.spareResourceCapacity < 1 && + this._availableObjects.length < 1 && + this._config.maxWaitingClients !== undefined && + this._waitingClientsQueue.length >= this._config.maxWaitingClients + ) { + return this._Promise.reject( + new Error("max waitingClients count exceeded") + ); + } + + const resourceRequest = new ResourceRequest( + this._config.acquireTimeoutMillis, + this._Promise + ); + this._waitingClientsQueue.enqueue(resourceRequest, priority); + this._dispense(); + + return resourceRequest.promise; + } + + /** + * [use method, aquires a resource, passes the resource to a user supplied function and releases it] + * @param {Function} fn [a function that accepts a resource and returns a promise that resolves/rejects once it has finished using the resource] + * @return {Promise} [resolves once the resource is released to the pool] + */ + use(fn, priority) { + return this.acquire(priority).then(resource => { + return fn(resource).then( + result => { + this.release(resource); + return result; + }, + err => { + this.destroy(resource); + throw err; + } + ); + }); + } + + /** + * Check if resource is currently on loan from the pool + * + * @param {Function} resource + * Resource for checking. + * + * @returns {Boolean} + * True if resource belongs to this pool and false otherwise + */ + isBorrowedResource(resource) { + return this._resourceLoans.has(resource); + } + + /** + * Return the resource to the pool when it is no longer required. + * + * @param {Object} resource + * The acquired object to be put back to the pool. + */ + release(resource) { + // check for an outstanding loan + const loan = this._resourceLoans.get(resource); + + if (loan === undefined) { + return this._Promise.reject( + new Error("Resource not currently part of this pool") + ); + } + + this._resourceLoans.delete(resource); + loan.resolve(); + const pooledResource = loan.pooledResource; + + pooledResource.deallocate(); + this._addPooledResourceToAvailableObjects(pooledResource); + + this._dispense(); + return this._Promise.resolve(); + } + + /** + * Request the resource to be destroyed. The factory's destroy handler + * will also be called. + * + * This should be called within an acquire() block as an alternative to release(). + * + * @param {Object} resource + * The acquired resource to be destoyed. + */ + destroy(resource) { + // check for an outstanding loan + const loan = this._resourceLoans.get(resource); + + if (loan === undefined) { + return this._Promise.reject( + new Error("Resource not currently part of this pool") + ); + } + + this._resourceLoans.delete(resource); + loan.resolve(); + const pooledResource = loan.pooledResource; + + pooledResource.deallocate(); + this._destroy(pooledResource); + + this._dispense(); + return this._Promise.resolve(); + } + + _addPooledResourceToAvailableObjects(pooledResource) { + pooledResource.idle(); + if (this._config.fifo === true) { + this._availableObjects.push(pooledResource); + } else { + this._availableObjects.unshift(pooledResource); + } + } + + /** + * Disallow any new acquire calls and let the request backlog dissapate. + * The Pool will no longer attempt to maintain a "min" number of resources + * and will only make new resources on demand. + * Resolves once all resource requests are fulfilled and all resources are returned to pool and available... + * Should probably be called "drain work" + * @returns {Promise} + */ + drain() { + this._draining = true; + return this.__allResourceRequestsSettled() + .then(() => { + return this.__allResourcesReturned(); + }) + .then(() => { + this._descheduleEvictorRun(); + }); + } + + __allResourceRequestsSettled() { + if (this._waitingClientsQueue.length > 0) { + // wait for last waiting client to be settled + // FIXME: what if they can "resolve" out of order....? + return reflector(this._waitingClientsQueue.tail.promise); + } + return this._Promise.resolve(); + } + + // FIXME: this is a horrific mess + __allResourcesReturned() { + const ps = Array.from(this._resourceLoans.values()) + .map(loan => loan.promise) + .map(reflector); + return this._Promise.all(ps); + } + + /** + * Forcibly destroys all available resources regardless of timeout. Intended to be + * invoked as part of a drain. Does not prevent the creation of new + * resources as a result of subsequent calls to acquire. + * + * Note that if factory.min > 0 and the pool isn't "draining", the pool will destroy all idle resources + * in the pool, but replace them with newly created resources up to the + * specified factory.min value. If this is not desired, set factory.min + * to zero before calling clear() + * + */ + clear() { + const reflectedCreatePromises = Array.from( + this._factoryCreateOperations + ).map(reflector); + + // wait for outstanding factory.create to complete + return this._Promise.all(reflectedCreatePromises).then(() => { + // Destroy existing resources + // @ts-ignore + for (const resource of this._availableObjects) { + this._destroy(resource); + } + const reflectedDestroyPromises = Array.from( + this._factoryDestroyOperations + ).map(reflector); + return reflector(this._Promise.all(reflectedDestroyPromises)); + }); + } + + /** + * Waits until the pool is ready. + * We define ready by checking if the current resource number is at least + * the minimum number defined. + * @returns {Promise} that resolves when the minimum number is ready. + */ + ready() { + return new this._Promise(resolve => { + const isReady = () => { + if (this.available >= this.min) { + resolve(); + } else { + setTimeout(isReady, 100); + } + }; + + isReady(); + }); + } + + /** + * How many resources are available to allocated + * (includes resources that have not been tested and may faul validation) + * NOTE: internal for now as the name is awful and might not be useful to anyone + * @return {Number} number of resources the pool has to allocate + */ + get _potentiallyAllocableResourceCount() { + return ( + this._availableObjects.length + + this._testOnBorrowResources.size + + this._testOnReturnResources.size + + this._factoryCreateOperations.size + ); + } + + /** + * The combined count of the currently created objects and those in the + * process of being created + * Does NOT include resources in the process of being destroyed + * sort of legacy... + * @return {Number} + */ + get _count() { + return this._allObjects.size + this._factoryCreateOperations.size; + } + + /** + * How many more resources does the pool have room for + * @return {Number} number of resources the pool could create before hitting any limits + */ + get spareResourceCapacity() { + return ( + this._config.max - + (this._allObjects.size + this._factoryCreateOperations.size) + ); + } + + /** + * see _count above + * @return {Number} [description] + */ + get size() { + return this._count; + } + + /** + * number of available resources + * @return {Number} [description] + */ + get available() { + return this._availableObjects.length; + } + + /** + * number of resources that are currently acquired + * @return {Number} [description] + */ + get borrowed() { + return this._resourceLoans.size; + } + + /** + * number of waiting acquire calls + * @return {Number} [description] + */ + get pending() { + return this._waitingClientsQueue.length; + } + + /** + * maximum size of the pool + * @return {Number} [description] + */ + get max() { + return this._config.max; + } + + /** + * minimum size of the pool + * @return {Number} [description] + */ + get min() { + return this._config.min; + } +} + +module.exports = Pool; diff --git a/node_modules/generic-pool/lib/PoolDefaults.js b/node_modules/generic-pool/lib/PoolDefaults.js new file mode 100644 index 0000000..56d004c --- /dev/null +++ b/node_modules/generic-pool/lib/PoolDefaults.js @@ -0,0 +1,34 @@ +"use strict"; +/** + * Create the default settings used by the pool + * + * @class + */ +class PoolDefaults { + constructor() { + this.fifo = true; + this.priorityRange = 1; + + this.testOnBorrow = false; + this.testOnReturn = false; + + this.autostart = true; + + this.evictionRunIntervalMillis = 0; + this.numTestsPerEvictionRun = 3; + this.softIdleTimeoutMillis = -1; + this.idleTimeoutMillis = 30000; + + // FIXME: no defaults! + this.acquireTimeoutMillis = null; + this.destroyTimeoutMillis = null; + this.maxWaitingClients = null; + + this.min = null; + this.max = null; + // FIXME: this seems odd? + this.Promise = Promise; + } +} + +module.exports = PoolDefaults; diff --git a/node_modules/generic-pool/lib/PoolOptions.js b/node_modules/generic-pool/lib/PoolOptions.js new file mode 100644 index 0000000..f355948 --- /dev/null +++ b/node_modules/generic-pool/lib/PoolOptions.js @@ -0,0 +1,109 @@ +"use strict"; + +const PoolDefaults = require("./PoolDefaults"); + +class PoolOptions { + /** + * @param {Object} opts + * configuration for the pool + * @param {Number} [opts.max=null] + * Maximum number of items that can exist at the same time. Default: 1. + * Any further acquire requests will be pushed to the waiting list. + * @param {Number} [opts.min=null] + * Minimum number of items in pool (including in-use). Default: 0. + * When the pool is created, or a resource destroyed, this minimum will + * be checked. If the pool resource count is below the minimum, a new + * resource will be created and added to the pool. + * @param {Number} [opts.maxWaitingClients=null] + * maximum number of queued requests allowed after which acquire calls will be rejected + * @param {Boolean} [opts.testOnBorrow=false] + * should the pool validate resources before giving them to clients. Requires that + * `factory.validate` is specified. + * @param {Boolean} [opts.testOnReturn=false] + * should the pool validate resources before returning them to the pool. + * @param {Number} [opts.acquireTimeoutMillis=null] + * Delay in milliseconds after which the an `acquire` call will fail. optional. + * Default: undefined. Should be positive and non-zero + * @param {Number} [opts.destroyTimeoutMillis=null] + * Delay in milliseconds after which the an `destroy` call will fail, causing it to emit a factoryDestroyError event. optional. + * Default: undefined. Should be positive and non-zero + * @param {Number} [opts.priorityRange=1] + * The range from 1 to be treated as a valid priority + * @param {Boolean} [opts.fifo=true] + * Sets whether the pool has LIFO (last in, first out) behaviour with respect to idle objects. + * if false then pool has FIFO behaviour + * @param {Boolean} [opts.autostart=true] + * Should the pool start creating resources etc once the constructor is called + * @param {Number} [opts.evictionRunIntervalMillis=0] + * How often to run eviction checks. Default: 0 (does not run). + * @param {Number} [opts.numTestsPerEvictionRun=3] + * Number of resources to check each eviction run. Default: 3. + * @param {Number} [opts.softIdleTimeoutMillis=-1] + * amount of time an object may sit idle in the pool before it is eligible + * for eviction by the idle object evictor (if any), with the extra condition + * that at least "min idle" object instances remain in the pool. Default -1 (nothing can get evicted) + * @param {Number} [opts.idleTimeoutMillis=30000] + * the minimum amount of time that an object may sit idle in the pool before it is eligible for eviction + * due to idle time. Supercedes "softIdleTimeoutMillis" Default: 30000 + * @param {typeof Promise} [opts.Promise=Promise] + * What promise implementation should the pool use, defaults to native promises. + */ + constructor(opts) { + const poolDefaults = new PoolDefaults(); + + opts = opts || {}; + + this.fifo = typeof opts.fifo === "boolean" ? opts.fifo : poolDefaults.fifo; + this.priorityRange = opts.priorityRange || poolDefaults.priorityRange; + + this.testOnBorrow = + typeof opts.testOnBorrow === "boolean" + ? opts.testOnBorrow + : poolDefaults.testOnBorrow; + this.testOnReturn = + typeof opts.testOnReturn === "boolean" + ? opts.testOnReturn + : poolDefaults.testOnReturn; + + this.autostart = + typeof opts.autostart === "boolean" + ? opts.autostart + : poolDefaults.autostart; + + if (opts.acquireTimeoutMillis) { + // @ts-ignore + this.acquireTimeoutMillis = parseInt(opts.acquireTimeoutMillis, 10); + } + + if (opts.destroyTimeoutMillis) { + // @ts-ignore + this.destroyTimeoutMillis = parseInt(opts.destroyTimeoutMillis, 10); + } + + if (opts.maxWaitingClients !== undefined) { + // @ts-ignore + this.maxWaitingClients = parseInt(opts.maxWaitingClients, 10); + } + + // @ts-ignore + this.max = parseInt(opts.max, 10); + // @ts-ignore + this.min = parseInt(opts.min, 10); + + this.max = Math.max(isNaN(this.max) ? 1 : this.max, 1); + this.min = Math.min(isNaN(this.min) ? 0 : this.min, this.max); + + this.evictionRunIntervalMillis = + opts.evictionRunIntervalMillis || poolDefaults.evictionRunIntervalMillis; + this.numTestsPerEvictionRun = + opts.numTestsPerEvictionRun || poolDefaults.numTestsPerEvictionRun; + this.softIdleTimeoutMillis = + opts.softIdleTimeoutMillis || poolDefaults.softIdleTimeoutMillis; + this.idleTimeoutMillis = + opts.idleTimeoutMillis || poolDefaults.idleTimeoutMillis; + + this.Promise = opts.Promise != null ? opts.Promise : poolDefaults.Promise; + } +} + +module.exports = PoolOptions; diff --git a/node_modules/generic-pool/lib/PooledResource.js b/node_modules/generic-pool/lib/PooledResource.js new file mode 100644 index 0000000..9748748 --- /dev/null +++ b/node_modules/generic-pool/lib/PooledResource.js @@ -0,0 +1,49 @@ +"use strict"; + +const PooledResourceStateEnum = require("./PooledResourceStateEnum"); + +/** + * @class + * @private + */ +class PooledResource { + constructor(resource) { + this.creationTime = Date.now(); + this.lastReturnTime = null; + this.lastBorrowTime = null; + this.lastIdleTime = null; + this.obj = resource; + this.state = PooledResourceStateEnum.IDLE; + } + + // mark the resource as "allocated" + allocate() { + this.lastBorrowTime = Date.now(); + this.state = PooledResourceStateEnum.ALLOCATED; + } + + // mark the resource as "deallocated" + deallocate() { + this.lastReturnTime = Date.now(); + this.state = PooledResourceStateEnum.IDLE; + } + + invalidate() { + this.state = PooledResourceStateEnum.INVALID; + } + + test() { + this.state = PooledResourceStateEnum.VALIDATION; + } + + idle() { + this.lastIdleTime = Date.now(); + this.state = PooledResourceStateEnum.IDLE; + } + + returning() { + this.state = PooledResourceStateEnum.RETURNING; + } +} + +module.exports = PooledResource; diff --git a/node_modules/generic-pool/lib/PooledResourceStateEnum.js b/node_modules/generic-pool/lib/PooledResourceStateEnum.js new file mode 100644 index 0000000..aaab2d7 --- /dev/null +++ b/node_modules/generic-pool/lib/PooledResourceStateEnum.js @@ -0,0 +1,11 @@ +"use strict"; + +const PooledResourceStateEnum = { + ALLOCATED: "ALLOCATED", // In use + IDLE: "IDLE", // In the queue, not in use. + INVALID: "INVALID", // Failed validation + RETURNING: "RETURNING", // Resource is in process of returning + VALIDATION: "VALIDATION" // Currently being tested +}; + +module.exports = PooledResourceStateEnum; diff --git a/node_modules/generic-pool/lib/PriorityQueue.js b/node_modules/generic-pool/lib/PriorityQueue.js new file mode 100644 index 0000000..ca9916b --- /dev/null +++ b/node_modules/generic-pool/lib/PriorityQueue.js @@ -0,0 +1,69 @@ +"use strict"; + +const Queue = require("./Queue"); + +/** + * @class + * @private + */ +class PriorityQueue { + constructor(size) { + this._size = Math.max(+size | 0, 1); + /** @type {Queue[]} */ + this._slots = []; + // initialize arrays to hold queue elements + for (let i = 0; i < this._size; i++) { + this._slots.push(new Queue()); + } + } + + get length() { + let _length = 0; + for (let i = 0, slots = this._slots.length; i < slots; i++) { + _length += this._slots[i].length; + } + return _length; + } + + enqueue(obj, priority) { + // Convert to integer with a default value of 0. + priority = (priority && +priority | 0) || 0; + + if (priority) { + if (priority < 0 || priority >= this._size) { + priority = this._size - 1; + // put obj at the end of the line + } + } + this._slots[priority].push(obj); + } + + dequeue() { + for (let i = 0, sl = this._slots.length; i < sl; i += 1) { + if (this._slots[i].length) { + return this._slots[i].shift(); + } + } + return; + } + + get head() { + for (let i = 0, sl = this._slots.length; i < sl; i += 1) { + if (this._slots[i].length > 0) { + return this._slots[i].head; + } + } + return; + } + + get tail() { + for (let i = this._slots.length - 1; i >= 0; i--) { + if (this._slots[i].length > 0) { + return this._slots[i].tail; + } + } + return; + } +} + +module.exports = PriorityQueue; diff --git a/node_modules/generic-pool/lib/Queue.js b/node_modules/generic-pool/lib/Queue.js new file mode 100644 index 0000000..94cab13 --- /dev/null +++ b/node_modules/generic-pool/lib/Queue.js @@ -0,0 +1,35 @@ +"use strict"; + +const DoublyLinkedList = require("./DoublyLinkedList"); +const Deque = require("./Deque"); + +/** + * Sort of a internal queue for holding the waiting + * resource requets for a given "priority". + * Also handles managing timeouts rejections on items (is this the best place for this?) + * This is the last point where we know which queue a resourceRequest is in + * + */ +class Queue extends Deque { + /** + * Adds the obj to the end of the list for this slot + * we completely override the parent method because we need access to the + * node for our rejection handler + * @param {any} resourceRequest [description] + */ + push(resourceRequest) { + const node = DoublyLinkedList.createNode(resourceRequest); + resourceRequest.promise.catch(this._createTimeoutRejectionHandler(node)); + this._list.insertEnd(node); + } + + _createTimeoutRejectionHandler(node) { + return reason => { + if (reason.name === "TimeoutError") { + this._list.remove(node); + } + }; + } +} + +module.exports = Queue; diff --git a/node_modules/generic-pool/lib/ResourceLoan.js b/node_modules/generic-pool/lib/ResourceLoan.js new file mode 100644 index 0000000..f657cb4 --- /dev/null +++ b/node_modules/generic-pool/lib/ResourceLoan.js @@ -0,0 +1,29 @@ +"use strict"; + +const Deferred = require("./Deferred"); + +/** + * Plan is to maybe add tracking via Error objects + * and other fun stuff! + */ + +class ResourceLoan extends Deferred { + /** + * + * @param {any} pooledResource the PooledResource this loan belongs to + * @return {any} [description] + */ + constructor(pooledResource, Promise) { + super(Promise); + this._creationTimestamp = Date.now(); + this.pooledResource = pooledResource; + } + + reject() { + /** + * Loans can only be resolved at the moment + */ + } +} + +module.exports = ResourceLoan; diff --git a/node_modules/generic-pool/lib/ResourceRequest.js b/node_modules/generic-pool/lib/ResourceRequest.js new file mode 100644 index 0000000..aa30619 --- /dev/null +++ b/node_modules/generic-pool/lib/ResourceRequest.js @@ -0,0 +1,76 @@ +"use strict"; + +const Deferred = require("./Deferred"); +const errors = require("./errors"); + +function fbind(fn, ctx) { + return function bound() { + return fn.apply(ctx, arguments); + }; +} + +/** + * Wraps a users request for a resource + * Basically a promise mashed in with a timeout + * @private + */ +class ResourceRequest extends Deferred { + /** + * [constructor description] + * @param {Number} ttl timeout + */ + constructor(ttl, Promise) { + super(Promise); + this._creationTimestamp = Date.now(); + this._timeout = null; + + if (ttl !== undefined) { + this.setTimeout(ttl); + } + } + + setTimeout(delay) { + if (this._state !== ResourceRequest.PENDING) { + return; + } + const ttl = parseInt(delay, 10); + + if (isNaN(ttl) || ttl <= 0) { + throw new Error("delay must be a positive int"); + } + + const age = Date.now() - this._creationTimestamp; + + if (this._timeout) { + this.removeTimeout(); + } + + this._timeout = setTimeout( + fbind(this._fireTimeout, this), + Math.max(ttl - age, 0) + ); + } + + removeTimeout() { + if (this._timeout) { + clearTimeout(this._timeout); + } + this._timeout = null; + } + + _fireTimeout() { + this.reject(new errors.TimeoutError("ResourceRequest timed out")); + } + + reject(reason) { + this.removeTimeout(); + super.reject(reason); + } + + resolve(value) { + this.removeTimeout(); + super.resolve(value); + } +} + +module.exports = ResourceRequest; diff --git a/node_modules/generic-pool/lib/errors.js b/node_modules/generic-pool/lib/errors.js new file mode 100644 index 0000000..b02d822 --- /dev/null +++ b/node_modules/generic-pool/lib/errors.js @@ -0,0 +1,27 @@ +"use strict"; + +class ExtendableError extends Error { + constructor(message) { + super(message); + // @ts-ignore + this.name = this.constructor.name; + this.message = message; + if (typeof Error.captureStackTrace === "function") { + Error.captureStackTrace(this, this.constructor); + } else { + this.stack = new Error(message).stack; + } + } +} + +/* eslint-disable no-useless-constructor */ +class TimeoutError extends ExtendableError { + constructor(m) { + super(m); + } +} +/* eslint-enable no-useless-constructor */ + +module.exports = { + TimeoutError: TimeoutError +}; diff --git a/node_modules/generic-pool/lib/factoryValidator.js b/node_modules/generic-pool/lib/factoryValidator.js new file mode 100644 index 0000000..2f2fb5e --- /dev/null +++ b/node_modules/generic-pool/lib/factoryValidator.js @@ -0,0 +1,16 @@ +module.exports = function(factory) { + if (typeof factory.create !== "function") { + throw new TypeError("factory.create must be a function"); + } + + if (typeof factory.destroy !== "function") { + throw new TypeError("factory.destroy must be a function"); + } + + if ( + typeof factory.validate !== "undefined" && + typeof factory.validate !== "function" + ) { + throw new TypeError("factory.validate must be a function"); + } +}; diff --git a/node_modules/generic-pool/lib/utils.js b/node_modules/generic-pool/lib/utils.js new file mode 100644 index 0000000..19d7f31 --- /dev/null +++ b/node_modules/generic-pool/lib/utils.js @@ -0,0 +1,13 @@ +"use strict"; + +function noop() {} + +/** + * Reflects a promise but does not expose any + * underlying value or rejection from that promise. + * @param {Promise} promise [description] + * @return {Promise} [description] + */ +exports.reflector = function(promise) { + return promise.then(noop, noop); +}; diff --git a/node_modules/generic-pool/package.json b/node_modules/generic-pool/package.json new file mode 100644 index 0000000..4daf20a --- /dev/null +++ b/node_modules/generic-pool/package.json @@ -0,0 +1,297 @@ +{ + "name": "generic-pool", + "description": "Generic resource pooling for Node.JS", + "homepage": "https://github.com/coopernurse/node-pool#readme", + "version": "3.9.0", + "main": "index.js", + "author": { + "email": "james@bitmechanic.com", + "name": "James Cooper" + }, + "contributors": [ + { + "name": "James Butler", + "email": "james.butler@sandfox.co.uk" + }, + { + "name": "Kiko Beats", + "email": "josefrancisco.verdu@gmail.com" + }, + { + "name": "Felipe Machado", + "email": "felipou@gmail.com" + }, + { + "name": "Idan Attias", + "email": "idana@wix.com" + }, + { + "name": "Bryan Donovan", + "email": "bdondo@gmail.com" + }, + { + "name": "C-h-e-r-r-y", + "email": "C-h-e-r-r-y@users.noreply.github.com" + }, + { + "name": "rebareba", + "email": "forcdc1990@gmail.com" + }, + { + "name": "t3hmrman", + "email": "t3hmrman@gmail.com" + }, + { + "name": "Thomas Dimson", + "email": "tdimson@gmail.com" + }, + { + "name": "Anup Baldawa", + "email": "anup@joinhoney.com" + }, + { + "name": "Kevin Burke", + "email": "burke@shyp.com" + }, + { + "name": "Teow Hua Jun", + "email": "huajun@Teows-MacBook-Pro.local" + }, + { + "name": "Joe Z", + "email": "jzarate@gmail.com" + }, + { + "name": "Peter Galiba", + "email": "poetro@poetro.hu" + }, + { + "name": "Asbjørn Sannes", + "email": "asbjorn.sannes@interhost.no" + }, + { + "name": "san00", + "email": "c5d59d07@opayq.com" + }, + { + "name": "Christian d'Heureuse", + "email": "chdh@inventec.ch" + }, + { + "name": "Ryan Dao", + "email": "ddao@paypal.com" + }, + { + "name": "Diego Rodríguez Baquero", + "email": "diegorbaquero@gmail.com" + }, + { + "name": "Felix Becker", + "email": "felix.b@outlook.com" + }, + { + "name": "Geovani de Souza", + "email": "geovanisouza92@gmail.com" + }, + { + "name": "Jemila", + "email": "jemila.abulhawa@gmail.com" + }, + { + "name": "Justin Robinson", + "email": "jrobinson@redventures.com" + }, + { + "name": "linchuang", + "email": "linchuang@tencent.com" + }, + { + "name": "Nayana Hettiarachchi", + "email": "nayana@corp-gems.com" + }, + { + "name": "restjohn", + "email": "restjohn@users.noreply.github.com" + }, + { + "name": "Sushant", + "email": "sushantdhiman@outlook.com" + }, + { + "name": "travis4all", + "email": "travis4all@diamon.dz" + }, + { + "name": "Will Shaver", + "email": "will.shaver@emberex.com" + }, + { + "name": "windyrobin", + "email": "windyrobin@Gmail.com" + }, + { + "name": "王秋石", + "email": "12qiushi@163.com" + }, + { + "name": "Stephen Cresswell", + "email": "229672+cressie176@users.noreply.github.com" + }, + { + "name": "Arek Flinik", + "email": "aflinik@gmail.com" + }, + { + "name": "Alexander Tesfamichael", + "email": "Alex.Tesfamichael@gmail.com" + }, + { + "name": "calibr", + "email": "awcalibr@gmail.com" + }, + { + "name": "benny-medflyt", + "email": "benny@medflyt.com" + }, + { + "name": "Bryan Kaplan", + "email": "bryan@pinchit.com" + }, + { + "name": "Dumitru Uzun", + "email": "contact@duzun.me" + }, + { + "name": "Douglas Christopher Wilson", + "email": "doug@somethingdoug.com" + }, + { + "name": "Drew R", + "email": "drewrathbone@gmail.com" + }, + { + "name": "Magnus Eide", + "email": "eide@iterate.no" + }, + { + "name": "gdusbabek", + "email": "gdusbabek@gmail.com" + }, + { + "name": "Jason Rhodes", + "email": "jason.matthew.rhodes@gmail.com" + }, + { + "name": "John Dooley", + "email": "john.j.dooley@gmail.com" + }, + { + "name": "Lewis Ellis", + "email": "lewis@getsentry.com" + }, + { + "name": "Tom MacWright", + "email": "macwright@gmail.com" + }, + { + "name": "Louis Roché", + "email": "mail+github@louisroche.net" + }, + { + "name": "Mike Morris", + "email": "mikemorris@users.noreply.github.com" + }, + { + "name": "molipet", + "email": "molipet@gmail.com" + }, + { + "name": "An Nguyen Le", + "email": "nguyenan169@gmail.com" + }, + { + "name": "Piotr", + "email": "pwalc@agora.pl" + }, + { + "name": "Randall Leeds", + "email": "randall.leeds@gmail.com" + }, + { + "name": "Roy Binux", + "email": "root@binux.me" + }, + { + "name": "Sandro Santilli", + "email": "strk@keybit.net" + }, + { + "name": "Teemu Ikonen", + "email": "teemu.ikonen@iki.fi" + }, + { + "name": "Tevye Krynski", + "email": "tevye@mog.com" + }, + { + "name": "Thom Seddon", + "email": "thom@nightworld.com" + }, + { + "name": "Thomas Watson Steen", + "email": "w@tson.dk" + }, + { + "name": "Wilfred van der Deijl", + "email": "wilfred@vanderdeijl.com" + }, + { + "name": "Yanlong Wang", + "email": "yanlong.wang@naiver.org" + }, + { + "name": "Young Hahn", + "email": "young@developmentseed.org" + }, + { + "name": "Rajesh kumar", + "email": "zazzel.cvs@gmail.com" + } + ], + "repository": { + "type": "git", + "url": "git+ssh://git@github.com/coopernurse/node-pool.git" + }, + "bugs": { + "url": "https://github.com/coopernurse/node-pool/issues" + }, + "keywords": [ + "pool", + "pooling", + "throttle" + ], + "devDependencies": { + "@types/node": "^8.5.1", + "eslint": "^4.9.0", + "eslint-config-prettier": "^2.6.0", + "eslint-plugin-prettier": "^2.3.1", + "eslint-plugin-promise": "^3.3.0", + "prettier": "^1.7.4", + "tap": "^8.0.0", + "typescript": "^2.6.2" + }, + "engines": { + "node": ">= 4" + }, + "files": [ + "index.d.ts", + "index.js", + "lib" + ], + "license": "MIT", + "scripts": { + "lint": "eslint lib test index.js .eslintrc.js", + "lint-fix": "eslint --fix lib test index.js .eslintrc.js", + "test": "tap test/*-test.js " + } +} \ No newline at end of file diff --git a/node_modules/glob/LICENSE b/node_modules/glob/LICENSE new file mode 100644 index 0000000..42ca266 --- /dev/null +++ b/node_modules/glob/LICENSE @@ -0,0 +1,21 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +## Glob Logo + +Glob's logo created by Tanya Brassie , licensed +under a Creative Commons Attribution-ShareAlike 4.0 International License +https://creativecommons.org/licenses/by-sa/4.0/ diff --git a/node_modules/glob/README.md b/node_modules/glob/README.md new file mode 100644 index 0000000..83f0c83 --- /dev/null +++ b/node_modules/glob/README.md @@ -0,0 +1,378 @@ +# Glob + +Match files using the patterns the shell uses, like stars and stuff. + +[![Build Status](https://travis-ci.org/isaacs/node-glob.svg?branch=master)](https://travis-ci.org/isaacs/node-glob/) [![Build Status](https://ci.appveyor.com/api/projects/status/kd7f3yftf7unxlsx?svg=true)](https://ci.appveyor.com/project/isaacs/node-glob) [![Coverage Status](https://coveralls.io/repos/isaacs/node-glob/badge.svg?branch=master&service=github)](https://coveralls.io/github/isaacs/node-glob?branch=master) + +This is a glob implementation in JavaScript. It uses the `minimatch` +library to do its matching. + +![a fun cartoon logo made of glob characters](logo/glob.png) + +## Usage + +Install with npm + +``` +npm i glob +``` + +```javascript +var glob = require("glob") + +// options is optional +glob("**/*.js", options, function (er, files) { + // files is an array of filenames. + // If the `nonull` option is set, and nothing + // was found, then files is ["**/*.js"] + // er is an error object or null. +}) +``` + +## Glob Primer + +"Globs" are the patterns you type when you do stuff like `ls *.js` on +the command line, or put `build/*` in a `.gitignore` file. + +Before parsing the path part patterns, braced sections are expanded +into a set. Braced sections start with `{` and end with `}`, with any +number of comma-delimited sections within. Braced sections may contain +slash characters, so `a{/b/c,bcd}` would expand into `a/b/c` and `abcd`. + +The following characters have special magic meaning when used in a +path portion: + +* `*` Matches 0 or more characters in a single path portion +* `?` Matches 1 character +* `[...]` Matches a range of characters, similar to a RegExp range. + If the first character of the range is `!` or `^` then it matches + any character not in the range. +* `!(pattern|pattern|pattern)` Matches anything that does not match + any of the patterns provided. +* `?(pattern|pattern|pattern)` Matches zero or one occurrence of the + patterns provided. +* `+(pattern|pattern|pattern)` Matches one or more occurrences of the + patterns provided. +* `*(a|b|c)` Matches zero or more occurrences of the patterns provided +* `@(pattern|pat*|pat?erN)` Matches exactly one of the patterns + provided +* `**` If a "globstar" is alone in a path portion, then it matches + zero or more directories and subdirectories searching for matches. + It does not crawl symlinked directories. + +### Dots + +If a file or directory path portion has a `.` as the first character, +then it will not match any glob pattern unless that pattern's +corresponding path part also has a `.` as its first character. + +For example, the pattern `a/.*/c` would match the file at `a/.b/c`. +However the pattern `a/*/c` would not, because `*` does not start with +a dot character. + +You can make glob treat dots as normal characters by setting +`dot:true` in the options. + +### Basename Matching + +If you set `matchBase:true` in the options, and the pattern has no +slashes in it, then it will seek for any file anywhere in the tree +with a matching basename. For example, `*.js` would match +`test/simple/basic.js`. + +### Empty Sets + +If no matching files are found, then an empty array is returned. This +differs from the shell, where the pattern itself is returned. For +example: + + $ echo a*s*d*f + a*s*d*f + +To get the bash-style behavior, set the `nonull:true` in the options. + +### See Also: + +* `man sh` +* `man bash` (Search for "Pattern Matching") +* `man 3 fnmatch` +* `man 5 gitignore` +* [minimatch documentation](https://github.com/isaacs/minimatch) + +## glob.hasMagic(pattern, [options]) + +Returns `true` if there are any special characters in the pattern, and +`false` otherwise. + +Note that the options affect the results. If `noext:true` is set in +the options object, then `+(a|b)` will not be considered a magic +pattern. If the pattern has a brace expansion, like `a/{b/c,x/y}` +then that is considered magical, unless `nobrace:true` is set in the +options. + +## glob(pattern, [options], cb) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* `cb` `{Function}` + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Perform an asynchronous glob search. + +## glob.sync(pattern, [options]) + +* `pattern` `{String}` Pattern to be matched +* `options` `{Object}` +* return: `{Array}` filenames found matching the pattern + +Perform a synchronous glob search. + +## Class: glob.Glob + +Create a Glob object by instantiating the `glob.Glob` class. + +```javascript +var Glob = require("glob").Glob +var mg = new Glob(pattern, options, cb) +``` + +It's an EventEmitter, and starts walking the filesystem to find matches +immediately. + +### new glob.Glob(pattern, [options], [cb]) + +* `pattern` `{String}` pattern to search for +* `options` `{Object}` +* `cb` `{Function}` Called when an error occurs, or matches are found + * `err` `{Error | null}` + * `matches` `{Array}` filenames found matching the pattern + +Note that if the `sync` flag is set in the options, then matches will +be immediately available on the `g.found` member. + +### Properties + +* `minimatch` The minimatch object that the glob uses. +* `options` The options object passed in. +* `aborted` Boolean which is set to true when calling `abort()`. There + is no way at this time to continue a glob search after aborting, but + you can re-use the statCache to avoid having to duplicate syscalls. +* `cache` Convenience object. Each field has the following possible + values: + * `false` - Path does not exist + * `true` - Path exists + * `'FILE'` - Path exists, and is not a directory + * `'DIR'` - Path exists, and is a directory + * `[file, entries, ...]` - Path exists, is a directory, and the + array value is the results of `fs.readdir` +* `statCache` Cache of `fs.stat` results, to prevent statting the same + path multiple times. +* `symlinks` A record of which paths are symbolic links, which is + relevant in resolving `**` patterns. +* `realpathCache` An optional object which is passed to `fs.realpath` + to minimize unnecessary syscalls. It is stored on the instantiated + Glob object, and may be re-used. + +### Events + +* `end` When the matching is finished, this is emitted with all the + matches found. If the `nonull` option is set, and no match was found, + then the `matches` list contains the original pattern. The matches + are sorted, unless the `nosort` flag is set. +* `match` Every time a match is found, this is emitted with the specific + thing that matched. It is not deduplicated or resolved to a realpath. +* `error` Emitted when an unexpected error is encountered, or whenever + any fs error occurs if `options.strict` is set. +* `abort` When `abort()` is called, this event is raised. + +### Methods + +* `pause` Temporarily stop the search +* `resume` Resume the search +* `abort` Stop the search forever + +### Options + +All the options that can be passed to Minimatch can also be passed to +Glob to change pattern matching behavior. Also, some have been added, +or have glob-specific ramifications. + +All options are false by default, unless otherwise noted. + +All options are added to the Glob object, as well. + +If you are running many `glob` operations, you can pass a Glob object +as the `options` argument to a subsequent operation to shortcut some +`stat` and `readdir` calls. At the very least, you may pass in shared +`symlinks`, `statCache`, `realpathCache`, and `cache` options, so that +parallel glob operations will be sped up by sharing information about +the filesystem. + +* `cwd` The current working directory in which to search. Defaults + to `process.cwd()`. +* `root` The place where patterns starting with `/` will be mounted + onto. Defaults to `path.resolve(options.cwd, "/")` (`/` on Unix + systems, and `C:\` or some such on Windows.) +* `dot` Include `.dot` files in normal matches and `globstar` matches. + Note that an explicit dot in a portion of the pattern will always + match dot files. +* `nomount` By default, a pattern starting with a forward-slash will be + "mounted" onto the root setting, so that a valid filesystem path is + returned. Set this flag to disable that behavior. +* `mark` Add a `/` character to directory matches. Note that this + requires additional stat calls. +* `nosort` Don't sort the results. +* `stat` Set to true to stat *all* results. This reduces performance + somewhat, and is completely unnecessary, unless `readdir` is presumed + to be an untrustworthy indicator of file existence. +* `silent` When an unusual error is encountered when attempting to + read a directory, a warning will be printed to stderr. Set the + `silent` option to true to suppress these warnings. +* `strict` When an unusual error is encountered when attempting to + read a directory, the process will just continue on in search of + other matches. Set the `strict` option to raise an error in these + cases. +* `cache` See `cache` property above. Pass in a previously generated + cache object to save some fs calls. +* `statCache` A cache of results of filesystem information, to prevent + unnecessary stat calls. While it should not normally be necessary + to set this, you may pass the statCache from one glob() call to the + options object of another, if you know that the filesystem will not + change between calls. (See "Race Conditions" below.) +* `symlinks` A cache of known symbolic links. You may pass in a + previously generated `symlinks` object to save `lstat` calls when + resolving `**` matches. +* `sync` DEPRECATED: use `glob.sync(pattern, opts)` instead. +* `nounique` In some cases, brace-expanded patterns can result in the + same file showing up multiple times in the result set. By default, + this implementation prevents duplicates in the result set. Set this + flag to disable that behavior. +* `nonull` Set to never return an empty set, instead returning a set + containing the pattern itself. This is the default in glob(3). +* `debug` Set to enable debug logging in minimatch and glob. +* `nobrace` Do not expand `{a,b}` and `{1..3}` brace sets. +* `noglobstar` Do not match `**` against multiple filenames. (Ie, + treat it as a normal `*` instead.) +* `noext` Do not match `+(a|b)` "extglob" patterns. +* `nocase` Perform a case-insensitive match. Note: on + case-insensitive filesystems, non-magic patterns will match by + default, since `stat` and `readdir` will not raise errors. +* `matchBase` Perform a basename-only match if the pattern does not + contain any slash characters. That is, `*.js` would be treated as + equivalent to `**/*.js`, matching all js files in all directories. +* `nodir` Do not match directories, only files. (Note: to match + *only* directories, simply put a `/` at the end of the pattern.) +* `ignore` Add a pattern or an array of glob patterns to exclude matches. + Note: `ignore` patterns are *always* in `dot:true` mode, regardless + of any other settings. +* `follow` Follow symlinked directories when expanding `**` patterns. + Note that this can result in a lot of duplicate references in the + presence of cyclic links. +* `realpath` Set to true to call `fs.realpath` on all of the results. + In the case of a symlink that cannot be resolved, the full absolute + path to the matched entry is returned (though it will usually be a + broken symlink) +* `absolute` Set to true to always receive absolute paths for matched + files. Unlike `realpath`, this also affects the values returned in + the `match` event. +* `fs` File-system object with Node's `fs` API. By default, the built-in + `fs` module will be used. Set to a volume provided by a library like + `memfs` to avoid using the "real" file-system. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between node-glob and other +implementations, and are intentional. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.3, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +Note that symlinked directories are not crawled as part of a `**`, +though their contents may match against subsequent portions of the +pattern. This prevents infinite loops and duplicates and the like. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then glob returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`glob.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. + +### Comments and Negation + +Previously, this module let you mark a pattern as a "comment" if it +started with a `#` character, or a "negated" pattern if it started +with a `!` character. + +These options were deprecated in version 5, and removed in version 6. + +To specify things that should not match, use the `ignore` option. + +## Windows + +**Please only use forward-slashes in glob expressions.** + +Though windows uses either `/` or `\` as its path separator, only `/` +characters are used by this glob implementation. You must use +forward-slashes **only** in glob expressions. Back-slashes will always +be interpreted as escape characters, not path separators. + +Results from absolute patterns such as `/foo/*` are mounted onto the +root setting using `path.join`. On windows, this will by default result +in `/foo/*` matching `C:\foo\bar.txt`. + +## Race Conditions + +Glob searching, by its very nature, is susceptible to race conditions, +since it relies on directory walking and such. + +As a result, it is possible that a file that exists when glob looks for +it may have been deleted or modified by the time it returns the result. + +As part of its internal implementation, this program caches all stat +and readdir calls that it makes, in order to cut down on system +overhead. However, this also makes it even more susceptible to races, +especially if the cache or statCache objects are reused between glob +calls. + +Users are thus advised not to use a glob result as a guarantee of +filesystem state in the face of rapid changes. For the vast majority +of operations, this is never a problem. + +## Glob Logo +Glob's logo was created by [Tanya Brassie](http://tanyabrassie.com/). Logo files can be found [here](https://github.com/isaacs/node-glob/tree/master/logo). + +The logo is licensed under a [Creative Commons Attribution-ShareAlike 4.0 International License](https://creativecommons.org/licenses/by-sa/4.0/). + +## Contributing + +Any change to behavior (including bugfixes) must come with a test. + +Patches that fail tests or reduce performance will be rejected. + +``` +# to run tests +npm test + +# to re-generate test fixtures +npm run test-regen + +# to benchmark against bash/zsh +npm run bench + +# to profile javascript +npm run prof +``` + +![](oh-my-glob.gif) diff --git a/node_modules/glob/common.js b/node_modules/glob/common.js new file mode 100644 index 0000000..424c46e --- /dev/null +++ b/node_modules/glob/common.js @@ -0,0 +1,238 @@ +exports.setopts = setopts +exports.ownProp = ownProp +exports.makeAbs = makeAbs +exports.finish = finish +exports.mark = mark +exports.isIgnored = isIgnored +exports.childrenIgnored = childrenIgnored + +function ownProp (obj, field) { + return Object.prototype.hasOwnProperty.call(obj, field) +} + +var fs = require("fs") +var path = require("path") +var minimatch = require("minimatch") +var isAbsolute = require("path-is-absolute") +var Minimatch = minimatch.Minimatch + +function alphasort (a, b) { + return a.localeCompare(b, 'en') +} + +function setupIgnores (self, options) { + self.ignore = options.ignore || [] + + if (!Array.isArray(self.ignore)) + self.ignore = [self.ignore] + + if (self.ignore.length) { + self.ignore = self.ignore.map(ignoreMap) + } +} + +// ignore patterns are always in dot:true mode. +function ignoreMap (pattern) { + var gmatcher = null + if (pattern.slice(-3) === '/**') { + var gpattern = pattern.replace(/(\/\*\*)+$/, '') + gmatcher = new Minimatch(gpattern, { dot: true }) + } + + return { + matcher: new Minimatch(pattern, { dot: true }), + gmatcher: gmatcher + } +} + +function setopts (self, pattern, options) { + if (!options) + options = {} + + // base-matching: just use globstar for that. + if (options.matchBase && -1 === pattern.indexOf("/")) { + if (options.noglobstar) { + throw new Error("base matching requires globstar") + } + pattern = "**/" + pattern + } + + self.silent = !!options.silent + self.pattern = pattern + self.strict = options.strict !== false + self.realpath = !!options.realpath + self.realpathCache = options.realpathCache || Object.create(null) + self.follow = !!options.follow + self.dot = !!options.dot + self.mark = !!options.mark + self.nodir = !!options.nodir + if (self.nodir) + self.mark = true + self.sync = !!options.sync + self.nounique = !!options.nounique + self.nonull = !!options.nonull + self.nosort = !!options.nosort + self.nocase = !!options.nocase + self.stat = !!options.stat + self.noprocess = !!options.noprocess + self.absolute = !!options.absolute + self.fs = options.fs || fs + + self.maxLength = options.maxLength || Infinity + self.cache = options.cache || Object.create(null) + self.statCache = options.statCache || Object.create(null) + self.symlinks = options.symlinks || Object.create(null) + + setupIgnores(self, options) + + self.changedCwd = false + var cwd = process.cwd() + if (!ownProp(options, "cwd")) + self.cwd = cwd + else { + self.cwd = path.resolve(options.cwd) + self.changedCwd = self.cwd !== cwd + } + + self.root = options.root || path.resolve(self.cwd, "/") + self.root = path.resolve(self.root) + if (process.platform === "win32") + self.root = self.root.replace(/\\/g, "/") + + // TODO: is an absolute `cwd` supposed to be resolved against `root`? + // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') + self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) + if (process.platform === "win32") + self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") + self.nomount = !!options.nomount + + // disable comments and negation in Minimatch. + // Note that they are not supported in Glob itself anyway. + options.nonegate = true + options.nocomment = true + // always treat \ in patterns as escapes, not path separators + options.allowWindowsEscape = false + + self.minimatch = new Minimatch(pattern, options) + self.options = self.minimatch.options +} + +function finish (self) { + var nou = self.nounique + var all = nou ? [] : Object.create(null) + + for (var i = 0, l = self.matches.length; i < l; i ++) { + var matches = self.matches[i] + if (!matches || Object.keys(matches).length === 0) { + if (self.nonull) { + // do like the shell, and spit out the literal glob + var literal = self.minimatch.globSet[i] + if (nou) + all.push(literal) + else + all[literal] = true + } + } else { + // had matches + var m = Object.keys(matches) + if (nou) + all.push.apply(all, m) + else + m.forEach(function (m) { + all[m] = true + }) + } + } + + if (!nou) + all = Object.keys(all) + + if (!self.nosort) + all = all.sort(alphasort) + + // at *some* point we statted all of these + if (self.mark) { + for (var i = 0; i < all.length; i++) { + all[i] = self._mark(all[i]) + } + if (self.nodir) { + all = all.filter(function (e) { + var notDir = !(/\/$/.test(e)) + var c = self.cache[e] || self.cache[makeAbs(self, e)] + if (notDir && c) + notDir = c !== 'DIR' && !Array.isArray(c) + return notDir + }) + } + } + + if (self.ignore.length) + all = all.filter(function(m) { + return !isIgnored(self, m) + }) + + self.found = all +} + +function mark (self, p) { + var abs = makeAbs(self, p) + var c = self.cache[abs] + var m = p + if (c) { + var isDir = c === 'DIR' || Array.isArray(c) + var slash = p.slice(-1) === '/' + + if (isDir && !slash) + m += '/' + else if (!isDir && slash) + m = m.slice(0, -1) + + if (m !== p) { + var mabs = makeAbs(self, m) + self.statCache[mabs] = self.statCache[abs] + self.cache[mabs] = self.cache[abs] + } + } + + return m +} + +// lotta situps... +function makeAbs (self, f) { + var abs = f + if (f.charAt(0) === '/') { + abs = path.join(self.root, f) + } else if (isAbsolute(f) || f === '') { + abs = f + } else if (self.changedCwd) { + abs = path.resolve(self.cwd, f) + } else { + abs = path.resolve(f) + } + + if (process.platform === 'win32') + abs = abs.replace(/\\/g, '/') + + return abs +} + + +// Return true, if pattern ends with globstar '**', for the accompanying parent directory. +// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents +function isIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) + }) +} + +function childrenIgnored (self, path) { + if (!self.ignore.length) + return false + + return self.ignore.some(function(item) { + return !!(item.gmatcher && item.gmatcher.match(path)) + }) +} diff --git a/node_modules/glob/glob.js b/node_modules/glob/glob.js new file mode 100644 index 0000000..37a4d7e --- /dev/null +++ b/node_modules/glob/glob.js @@ -0,0 +1,790 @@ +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. + +module.exports = glob + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var inherits = require('inherits') +var EE = require('events').EventEmitter +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var globSync = require('./sync.js') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = require('inflight') +var util = require('util') +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +var once = require('once') + +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} + + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } + + return new Glob(pattern, options, cb) +} + +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync + +// old api surface +glob.glob = glob + +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } + + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin +} + +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true + + var g = new Glob(pattern, options) + var set = g.minimatch.set + + if (!pattern) + return false + + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true + } + + return false +} + +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) + } + + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) + + setopts(this, pattern, options) + this._didRealPath = false + + // process each pattern in the minimatch set + var n = this.minimatch.set.length + + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) + + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) + } + + var self = this + this._processing = 0 + + this._emitQueue = [] + this._processQueue = [] + this.paused = false + + if (this.noprocess) + return this + + if (n === 0) + return done() + + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false + + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } + } + } +} + +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return + + if (this.realpath && !this._didRealpath) + return this._realpath() + + common.finish(this) + this.emit('end', this.found) +} + +Glob.prototype._realpath = function () { + if (this._didRealpath) + return + + this._didRealpath = true + + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) + + function next () { + if (--n === 0) + self._finish() + } +} + +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() + + var found = Object.keys(matchset) + var self = this + var n = found.length + + if (n === 0) + return cb() + + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here + + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} + +Glob.prototype._mark = function (p) { + return common.mark(this, p) +} + +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} + +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} + +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} + +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } + } +} + +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') + + if (this.aborted) + return + + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return + } + + //console.error('PROCESS %d', this._processing, pattern) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) +} + +Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + +Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return cb() + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return cb() + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return cb() + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) + } + cb() +} + +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return + + if (isIgnored(this, e)) + return + + if (this.paused) { + this._emitQueue.push([index, e]) + return + } + + var abs = isAbsolute(e) ? e : this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) + e = abs + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) + + this.emit('match', e) +} + +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return + + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) + + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + self.fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) + } +} + +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return + + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return + + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() + + if (Array.isArray(c)) + return cb(null, c) + } + + var self = this + self.fs.readdir(abs, readdirCb(this, abs, cb)) +} + +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} + +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return + + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + return cb(null, entries) +} + +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return + + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() + } + if (!this.silent) + console.error('glob error', er) + break + } + + return cb() +} + +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} + + +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) + + var isSym = this.symlinks[abs] + var len = entries.length + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) + } + + cb() +} + +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { + + //console.error('ps2', prefix, exists) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} + +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return cb() + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + + if (needDir && c === 'FILE') + return cb() + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } + } + + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + self.fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return self.fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } + } +} + +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() + } + + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat + + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) +} diff --git a/node_modules/glob/package.json b/node_modules/glob/package.json new file mode 100644 index 0000000..5940b64 --- /dev/null +++ b/node_modules/glob/package.json @@ -0,0 +1,55 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "glob", + "description": "a little globber", + "version": "7.2.3", + "publishConfig": { + "tag": "v7-legacy" + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "main": "glob.js", + "files": [ + "glob.js", + "sync.js", + "common.js" + ], + "engines": { + "node": "*" + }, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "devDependencies": { + "memfs": "^3.2.0", + "mkdirp": "0", + "rimraf": "^2.2.8", + "tap": "^15.0.6", + "tick": "0.0.6" + }, + "tap": { + "before": "test/00-setup.js", + "after": "test/zz-cleanup.js", + "jobs": 1 + }, + "scripts": { + "prepublish": "npm run benchclean", + "profclean": "rm -f v8.log profile.txt", + "test": "tap", + "test-regen": "npm run profclean && TEST_REGEN=1 node test/00-setup.js", + "bench": "bash benchmark.sh", + "prof": "bash prof.sh && cat profile.txt", + "benchclean": "node benchclean.js" + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + } +} diff --git a/node_modules/glob/sync.js b/node_modules/glob/sync.js new file mode 100644 index 0000000..2c4f480 --- /dev/null +++ b/node_modules/glob/sync.js @@ -0,0 +1,486 @@ +module.exports = globSync +globSync.GlobSync = GlobSync + +var rp = require('fs.realpath') +var minimatch = require('minimatch') +var Minimatch = minimatch.Minimatch +var Glob = require('./glob.js').Glob +var util = require('util') +var path = require('path') +var assert = require('assert') +var isAbsolute = require('path-is-absolute') +var common = require('./common.js') +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found +} + +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') + + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) + + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) + } + this._finish() +} + +GlobSync.prototype._finish = function () { + assert.ok(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } + } + }) + } + common.finish(this) +} + + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert.ok(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break + } + + var remain = pattern.slice(n) + + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || + isAbsolute(pattern.map(function (p) { + return typeof p === 'string' ? p : '[*]' + }).join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix + + var abs = this._makeAbs(read) + + //if ignored, skip processing + if (childrenIgnored(this, read)) + return + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} + + +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) + + // if the abs isn't a dir, then nothing can match! + if (!entries) + return + + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' + + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } + + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return + + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. + + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) + } + // This was the last one, and no stats were needed + return + } + + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} + + +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return + + var abs = this._makeAbs(e) + + if (this.mark) + e = this._mark(e) + + if (this.absolute) { + e = abs + } + + if (this.matches[index][e]) + return + + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } + + this.matches[index][e] = true + + if (this.stat) + this._stat(e) +} + + +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) + + var entries + var lstat + var stat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null + } + } + + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} + +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries + + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) + + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null + + if (Array.isArray(c)) + return c + } + + try { + return this._readdirEntries(abs, this.fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } +} + +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } + } + + this.cache[abs] = entries + + // mark and cache dir-ness + return entries +} + +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break + + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break + + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} + +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + + var entries = this._readdir(abs, inGlobStar) + + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return + + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) + + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + + var len = entries.length + var isSym = this.symlinks[abs] + + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return + + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue + + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) + + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } +} + +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) + + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + // If it doesn't exist, then just mark the lack of results + if (!exists) + return + + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') + + // Mark this as a match + this._emitMatch(index, prefix) +} + +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false + + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] + + if (Array.isArray(c)) + c = 'DIR' + + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c + + if (needDir && c === 'FILE') + return false + + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. + } + + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = this.fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } + + if (lstat && lstat.isSymbolicLink()) { + try { + stat = this.fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } + + this.statCache[abs] = stat + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + + this.cache[abs] = this.cache[abs] || c + + if (needDir && c === 'FILE') + return false + + return c +} + +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} + +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} diff --git a/node_modules/has-unicode/LICENSE b/node_modules/has-unicode/LICENSE new file mode 100644 index 0000000..d42e25e --- /dev/null +++ b/node_modules/has-unicode/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2014, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/has-unicode/README.md b/node_modules/has-unicode/README.md new file mode 100644 index 0000000..5a03e59 --- /dev/null +++ b/node_modules/has-unicode/README.md @@ -0,0 +1,43 @@ +has-unicode +=========== + +Try to guess if your terminal supports unicode + +```javascript +var hasUnicode = require("has-unicode") + +if (hasUnicode()) { + // the terminal probably has unicode support +} +``` +```javascript +var hasUnicode = require("has-unicode").tryHarder +hasUnicode(function(unicodeSupported) { + if (unicodeSupported) { + // the terminal probably has unicode support + } +}) +``` + +## Detecting Unicode + +What we actually detect is UTF-8 support, as that's what Node itself supports. +If you have a UTF-16 locale then you won't be detected as unicode capable. + +### Windows + +Since at least Windows 7, `cmd` and `powershell` have been unicode capable, +but unfortunately even then it's not guaranteed. In many localizations it +still uses legacy code pages and there's no facility short of running +programs or linking C++ that will let us detect this. As such, we +report any Windows installation as NOT unicode capable, and recommend +that you encourage your users to override this via config. + +### Unix Like Operating Systems + +We look at the environment variables `LC_ALL`, `LC_CTYPE`, and `LANG` in +that order. For `LC_ALL` and `LANG`, it looks for `.UTF-8` in the value. +For `LC_CTYPE` it looks to see if the value is `UTF-8`. This is sufficient +for most POSIX systems. While locale data can be put in `/etc/locale.conf` +as well, AFAIK it's always copied into the environment. + diff --git a/node_modules/has-unicode/index.js b/node_modules/has-unicode/index.js new file mode 100644 index 0000000..9b0fe44 --- /dev/null +++ b/node_modules/has-unicode/index.js @@ -0,0 +1,16 @@ +"use strict" +var os = require("os") + +var hasUnicode = module.exports = function () { + // Recent Win32 platforms (>XP) CAN support unicode in the console but + // don't have to, and in non-english locales often use traditional local + // code pages. There's no way, short of windows system calls or execing + // the chcp command line program to figure this out. As such, we default + // this to false and encourage your users to override it via config if + // appropriate. + if (os.type() == "Windows_NT") { return false } + + var isUTF8 = /UTF-?8$/i + var ctype = process.env.LC_ALL || process.env.LC_CTYPE || process.env.LANG + return isUTF8.test(ctype) +} diff --git a/node_modules/has-unicode/package.json b/node_modules/has-unicode/package.json new file mode 100644 index 0000000..ebe9d76 --- /dev/null +++ b/node_modules/has-unicode/package.json @@ -0,0 +1,30 @@ +{ + "name": "has-unicode", + "version": "2.0.1", + "description": "Try to guess if your terminal supports unicode", + "main": "index.js", + "scripts": { + "test": "tap test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/iarna/has-unicode" + }, + "keywords": [ + "unicode", + "terminal" + ], + "files": [ + "index.js" + ], + "author": "Rebecca Turner ", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/has-unicode/issues" + }, + "homepage": "https://github.com/iarna/has-unicode", + "devDependencies": { + "require-inject": "^1.3.0", + "tap": "^2.3.1" + } +} diff --git a/node_modules/https-proxy-agent/README.md b/node_modules/https-proxy-agent/README.md new file mode 100644 index 0000000..328656a --- /dev/null +++ b/node_modules/https-proxy-agent/README.md @@ -0,0 +1,137 @@ +https-proxy-agent +================ +### An HTTP(s) proxy `http.Agent` implementation for HTTPS +[![Build Status](https://github.com/TooTallNate/node-https-proxy-agent/workflows/Node%20CI/badge.svg)](https://github.com/TooTallNate/node-https-proxy-agent/actions?workflow=Node+CI) + +This module provides an `http.Agent` implementation that connects to a specified +HTTP or HTTPS proxy server, and can be used with the built-in `https` module. + +Specifically, this `Agent` implementation connects to an intermediary "proxy" +server and issues the [CONNECT HTTP method][CONNECT], which tells the proxy to +open a direct TCP connection to the destination server. + +Since this agent implements the CONNECT HTTP method, it also works with other +protocols that use this method when connecting over proxies (i.e. WebSockets). +See the "Examples" section below for more. + + +Installation +------------ + +Install with `npm`: + +``` bash +$ npm install https-proxy-agent +``` + + +Examples +-------- + +#### `https` module example + +``` js +var url = require('url'); +var https = require('https'); +var HttpsProxyAgent = require('https-proxy-agent'); + +// HTTP/HTTPS proxy to connect to +var proxy = process.env.http_proxy || 'http://168.63.76.32:3128'; +console.log('using proxy server %j', proxy); + +// HTTPS endpoint for the proxy to connect to +var endpoint = process.argv[2] || 'https://graph.facebook.com/tootallnate'; +console.log('attempting to GET %j', endpoint); +var options = url.parse(endpoint); + +// create an instance of the `HttpsProxyAgent` class with the proxy server information +var agent = new HttpsProxyAgent(proxy); +options.agent = agent; + +https.get(options, function (res) { + console.log('"response" event!', res.headers); + res.pipe(process.stdout); +}); +``` + +#### `ws` WebSocket connection example + +``` js +var url = require('url'); +var WebSocket = require('ws'); +var HttpsProxyAgent = require('https-proxy-agent'); + +// HTTP/HTTPS proxy to connect to +var proxy = process.env.http_proxy || 'http://168.63.76.32:3128'; +console.log('using proxy server %j', proxy); + +// WebSocket endpoint for the proxy to connect to +var endpoint = process.argv[2] || 'ws://echo.websocket.org'; +var parsed = url.parse(endpoint); +console.log('attempting to connect to WebSocket %j', endpoint); + +// create an instance of the `HttpsProxyAgent` class with the proxy server information +var options = url.parse(proxy); + +var agent = new HttpsProxyAgent(options); + +// finally, initiate the WebSocket connection +var socket = new WebSocket(endpoint, { agent: agent }); + +socket.on('open', function () { + console.log('"open" event!'); + socket.send('hello world'); +}); + +socket.on('message', function (data, flags) { + console.log('"message" event! %j %j', data, flags); + socket.close(); +}); +``` + +API +--- + +### new HttpsProxyAgent(Object options) + +The `HttpsProxyAgent` class implements an `http.Agent` subclass that connects +to the specified "HTTP(s) proxy server" in order to proxy HTTPS and/or WebSocket +requests. This is achieved by using the [HTTP `CONNECT` method][CONNECT]. + +The `options` argument may either be a string URI of the proxy server to use, or an +"options" object with more specific properties: + + * `host` - String - Proxy host to connect to (may use `hostname` as well). Required. + * `port` - Number - Proxy port to connect to. Required. + * `protocol` - String - If `https:`, then use TLS to connect to the proxy. + * `headers` - Object - Additional HTTP headers to be sent on the HTTP CONNECT method. + * Any other options given are passed to the `net.connect()`/`tls.connect()` functions. + + +License +------- + +(The MIT License) + +Copyright (c) 2013 Nathan Rajlich <nathan@tootallnate.net> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +[CONNECT]: http://en.wikipedia.org/wiki/HTTP_tunnel#HTTP_CONNECT_Tunneling diff --git a/node_modules/https-proxy-agent/dist/agent.d.ts b/node_modules/https-proxy-agent/dist/agent.d.ts new file mode 100644 index 0000000..4f1c636 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/agent.d.ts @@ -0,0 +1,30 @@ +/// +import net from 'net'; +import { Agent, ClientRequest, RequestOptions } from 'agent-base'; +import { HttpsProxyAgentOptions } from '.'; +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +export default class HttpsProxyAgent extends Agent { + private secureProxy; + private proxy; + constructor(_opts: string | HttpsProxyAgentOptions); + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req: ClientRequest, opts: RequestOptions): Promise; +} diff --git a/node_modules/https-proxy-agent/dist/agent.js b/node_modules/https-proxy-agent/dist/agent.js new file mode 100644 index 0000000..75d1136 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/agent.js @@ -0,0 +1,177 @@ +"use strict"; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const net_1 = __importDefault(require("net")); +const tls_1 = __importDefault(require("tls")); +const url_1 = __importDefault(require("url")); +const assert_1 = __importDefault(require("assert")); +const debug_1 = __importDefault(require("debug")); +const agent_base_1 = require("agent-base"); +const parse_proxy_response_1 = __importDefault(require("./parse-proxy-response")); +const debug = debug_1.default('https-proxy-agent:agent'); +/** + * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to + * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. + * + * Outgoing HTTP requests are first tunneled through the proxy server using the + * `CONNECT` HTTP request method to establish a connection to the proxy server, + * and then the proxy server connects to the destination target and issues the + * HTTP request from the proxy server. + * + * `https:` requests have their socket connection upgraded to TLS once + * the connection to the proxy server has been established. + * + * @api public + */ +class HttpsProxyAgent extends agent_base_1.Agent { + constructor(_opts) { + let opts; + if (typeof _opts === 'string') { + opts = url_1.default.parse(_opts); + } + else { + opts = _opts; + } + if (!opts) { + throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); + } + debug('creating new HttpsProxyAgent instance: %o', opts); + super(opts); + const proxy = Object.assign({}, opts); + // If `true`, then connect to the proxy server over TLS. + // Defaults to `false`. + this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); + // Prefer `hostname` over `host`, and set the `port` if needed. + proxy.host = proxy.hostname || proxy.host; + if (typeof proxy.port === 'string') { + proxy.port = parseInt(proxy.port, 10); + } + if (!proxy.port && proxy.host) { + proxy.port = this.secureProxy ? 443 : 80; + } + // ALPN is supported by Node.js >= v5. + // attempt to negotiate http/1.1 for proxy servers that support http/2 + if (this.secureProxy && !('ALPNProtocols' in proxy)) { + proxy.ALPNProtocols = ['http 1.1']; + } + if (proxy.host && proxy.path) { + // If both a `host` and `path` are specified then it's most likely + // the result of a `url.parse()` call... we need to remove the + // `path` portion so that `net.connect()` doesn't attempt to open + // that as a Unix socket file. + delete proxy.path; + delete proxy.pathname; + } + this.proxy = proxy; + } + /** + * Called when the node-core HTTP client library is creating a + * new HTTP request. + * + * @api protected + */ + callback(req, opts) { + return __awaiter(this, void 0, void 0, function* () { + const { proxy, secureProxy } = this; + // Create a socket connection to the proxy server. + let socket; + if (secureProxy) { + debug('Creating `tls.Socket`: %o', proxy); + socket = tls_1.default.connect(proxy); + } + else { + debug('Creating `net.Socket`: %o', proxy); + socket = net_1.default.connect(proxy); + } + const headers = Object.assign({}, proxy.headers); + const hostname = `${opts.host}:${opts.port}`; + let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; + // Inject the `Proxy-Authorization` header if necessary. + if (proxy.auth) { + headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; + } + // The `Host` header should only include the port + // number when it is not the default port. + let { host, port, secureEndpoint } = opts; + if (!isDefaultPort(port, secureEndpoint)) { + host += `:${port}`; + } + headers.Host = host; + headers.Connection = 'close'; + for (const name of Object.keys(headers)) { + payload += `${name}: ${headers[name]}\r\n`; + } + const proxyResponsePromise = parse_proxy_response_1.default(socket); + socket.write(`${payload}\r\n`); + const { statusCode, buffered } = yield proxyResponsePromise; + if (statusCode === 200) { + req.once('socket', resume); + if (opts.secureEndpoint) { + // The proxy is connecting to a TLS server, so upgrade + // this socket connection to a TLS connection. + debug('Upgrading socket connection to TLS'); + const servername = opts.servername || opts.host; + return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, + servername })); + } + return socket; + } + // Some other status code that's not 200... need to re-play the HTTP + // header "data" events onto the socket once the HTTP machinery is + // attached so that the node core `http` can parse and handle the + // error status code. + // Close the original socket, and a new "fake" socket is returned + // instead, so that the proxy doesn't get the HTTP request + // written to it (which may contain `Authorization` headers or other + // sensitive data). + // + // See: https://hackerone.com/reports/541502 + socket.destroy(); + const fakeSocket = new net_1.default.Socket({ writable: false }); + fakeSocket.readable = true; + // Need to wait for the "socket" event to re-play the "data" events. + req.once('socket', (s) => { + debug('replaying proxy buffer for failed request'); + assert_1.default(s.listenerCount('data') > 0); + // Replay the "buffered" Buffer onto the fake `socket`, since at + // this point the HTTP module machinery has been hooked up for + // the user. + s.push(buffered); + s.push(null); + }); + return fakeSocket; + }); + } +} +exports.default = HttpsProxyAgent; +function resume(socket) { + socket.resume(); +} +function isDefaultPort(port, secure) { + return Boolean((!secure && port === 80) || (secure && port === 443)); +} +function isHTTPS(protocol) { + return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; +} +function omit(obj, ...keys) { + const ret = {}; + let key; + for (key in obj) { + if (!keys.includes(key)) { + ret[key] = obj[key]; + } + } + return ret; +} +//# sourceMappingURL=agent.js.map \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/agent.js.map b/node_modules/https-proxy-agent/dist/agent.js.map new file mode 100644 index 0000000..0af6c17 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/agent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"agent.js","sourceRoot":"","sources":["../src/agent.ts"],"names":[],"mappings":";;;;;;;;;;;;;;AAAA,8CAAsB;AACtB,8CAAsB;AACtB,8CAAsB;AACtB,oDAA4B;AAC5B,kDAAgC;AAEhC,2CAAkE;AAElE,kFAAwD;AAExD,MAAM,KAAK,GAAG,eAAW,CAAC,yBAAyB,CAAC,CAAC;AAErD;;;;;;;;;;;;;GAaG;AACH,MAAqB,eAAgB,SAAQ,kBAAK;IAIjD,YAAY,KAAsC;QACjD,IAAI,IAA4B,CAAC;QACjC,IAAI,OAAO,KAAK,KAAK,QAAQ,EAAE;YAC9B,IAAI,GAAG,aAAG,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC;SACxB;aAAM;YACN,IAAI,GAAG,KAAK,CAAC;SACb;QACD,IAAI,CAAC,IAAI,EAAE;YACV,MAAM,IAAI,KAAK,CACd,8DAA8D,CAC9D,CAAC;SACF;QACD,KAAK,CAAC,2CAA2C,EAAE,IAAI,CAAC,CAAC;QACzD,KAAK,CAAC,IAAI,CAAC,CAAC;QAEZ,MAAM,KAAK,qBAAgC,IAAI,CAAE,CAAC;QAElD,wDAAwD;QACxD,uBAAuB;QACvB,IAAI,CAAC,WAAW,GAAG,IAAI,CAAC,WAAW,IAAI,OAAO,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC;QAE/D,+DAA+D;QAC/D,KAAK,CAAC,IAAI,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,IAAI,CAAC;QAC1C,IAAI,OAAO,KAAK,CAAC,IAAI,KAAK,QAAQ,EAAE;YACnC,KAAK,CAAC,IAAI,GAAG,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;SACtC;QACD,IAAI,CAAC,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC9B,KAAK,CAAC,IAAI,GAAG,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC;SACzC;QAED,sCAAsC;QACtC,sEAAsE;QACtE,IAAI,IAAI,CAAC,WAAW,IAAI,CAAC,CAAC,eAAe,IAAI,KAAK,CAAC,EAAE;YACpD,KAAK,CAAC,aAAa,GAAG,CAAC,UAAU,CAAC,CAAC;SACnC;QAED,IAAI,KAAK,CAAC,IAAI,IAAI,KAAK,CAAC,IAAI,EAAE;YAC7B,kEAAkE;YAClE,8DAA8D;YAC9D,iEAAiE;YACjE,8BAA8B;YAC9B,OAAO,KAAK,CAAC,IAAI,CAAC;YAClB,OAAO,KAAK,CAAC,QAAQ,CAAC;SACtB;QAED,IAAI,CAAC,KAAK,GAAG,KAAK,CAAC;IACpB,CAAC;IAED;;;;;OAKG;IACG,QAAQ,CACb,GAAkB,EAClB,IAAoB;;YAEpB,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,GAAG,IAAI,CAAC;YAEpC,kDAAkD;YAClD,IAAI,MAAkB,CAAC;YACvB,IAAI,WAAW,EAAE;gBAChB,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA8B,CAAC,CAAC;aACrD;iBAAM;gBACN,KAAK,CAAC,2BAA2B,EAAE,KAAK,CAAC,CAAC;gBAC1C,MAAM,GAAG,aAAG,CAAC,OAAO,CAAC,KAA2B,CAAC,CAAC;aAClD;YAED,MAAM,OAAO,qBAA6B,KAAK,CAAC,OAAO,CAAE,CAAC;YAC1D,MAAM,QAAQ,GAAG,GAAG,IAAI,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI,EAAE,CAAC;YAC7C,IAAI,OAAO,GAAG,WAAW,QAAQ,eAAe,CAAC;YAEjD,wDAAwD;YACxD,IAAI,KAAK,CAAC,IAAI,EAAE;gBACf,OAAO,CAAC,qBAAqB,CAAC,GAAG,SAAS,MAAM,CAAC,IAAI,CACpD,KAAK,CAAC,IAAI,CACV,CAAC,QAAQ,CAAC,QAAQ,CAAC,EAAE,CAAC;aACvB;YAED,iDAAiD;YACjD,0CAA0C;YAC1C,IAAI,EAAE,IAAI,EAAE,IAAI,EAAE,cAAc,EAAE,GAAG,IAAI,CAAC;YAC1C,IAAI,CAAC,aAAa,CAAC,IAAI,EAAE,cAAc,CAAC,EAAE;gBACzC,IAAI,IAAI,IAAI,IAAI,EAAE,CAAC;aACnB;YACD,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;YAEpB,OAAO,CAAC,UAAU,GAAG,OAAO,CAAC;YAC7B,KAAK,MAAM,IAAI,IAAI,MAAM,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE;gBACxC,OAAO,IAAI,GAAG,IAAI,KAAK,OAAO,CAAC,IAAI,CAAC,MAAM,CAAC;aAC3C;YAED,MAAM,oBAAoB,GAAG,8BAAkB,CAAC,MAAM,CAAC,CAAC;YAExD,MAAM,CAAC,KAAK,CAAC,GAAG,OAAO,MAAM,CAAC,CAAC;YAE/B,MAAM,EACL,UAAU,EACV,QAAQ,EACR,GAAG,MAAM,oBAAoB,CAAC;YAE/B,IAAI,UAAU,KAAK,GAAG,EAAE;gBACvB,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,MAAM,CAAC,CAAC;gBAE3B,IAAI,IAAI,CAAC,cAAc,EAAE;oBACxB,sDAAsD;oBACtD,8CAA8C;oBAC9C,KAAK,CAAC,oCAAoC,CAAC,CAAC;oBAC5C,MAAM,UAAU,GAAG,IAAI,CAAC,UAAU,IAAI,IAAI,CAAC,IAAI,CAAC;oBAChD,OAAO,aAAG,CAAC,OAAO,iCACd,IAAI,CAAC,IAAI,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,MAAM,CAAC,KACjD,MAAM;wBACN,UAAU,IACT,CAAC;iBACH;gBAED,OAAO,MAAM,CAAC;aACd;YAED,oEAAoE;YACpE,kEAAkE;YAClE,iEAAiE;YACjE,qBAAqB;YAErB,iEAAiE;YACjE,0DAA0D;YAC1D,oEAAoE;YACpE,mBAAmB;YACnB,EAAE;YACF,4CAA4C;YAC5C,MAAM,CAAC,OAAO,EAAE,CAAC;YAEjB,MAAM,UAAU,GAAG,IAAI,aAAG,CAAC,MAAM,CAAC,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAC;YACvD,UAAU,CAAC,QAAQ,GAAG,IAAI,CAAC;YAE3B,oEAAoE;YACpE,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,CAAC,CAAa,EAAE,EAAE;gBACpC,KAAK,CAAC,2CAA2C,CAAC,CAAC;gBACnD,gBAAM,CAAC,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC;gBAEpC,gEAAgE;gBAChE,8DAA8D;gBAC9D,YAAY;gBACZ,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;gBACjB,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;YACd,CAAC,CAAC,CAAC;YAEH,OAAO,UAAU,CAAC;QACnB,CAAC;KAAA;CACD;AA3JD,kCA2JC;AAED,SAAS,MAAM,CAAC,MAAkC;IACjD,MAAM,CAAC,MAAM,EAAE,CAAC;AACjB,CAAC;AAED,SAAS,aAAa,CAAC,IAAY,EAAE,MAAe;IACnD,OAAO,OAAO,CAAC,CAAC,CAAC,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC,IAAI,CAAC,MAAM,IAAI,IAAI,KAAK,GAAG,CAAC,CAAC,CAAC;AACtE,CAAC;AAED,SAAS,OAAO,CAAC,QAAwB;IACxC,OAAO,OAAO,QAAQ,KAAK,QAAQ,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC;AAC3E,CAAC;AAED,SAAS,IAAI,CACZ,GAAM,EACN,GAAG,IAAO;IAIV,MAAM,GAAG,GAAG,EAEX,CAAC;IACF,IAAI,GAAqB,CAAC;IAC1B,KAAK,GAAG,IAAI,GAAG,EAAE;QAChB,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,EAAE;YACxB,GAAG,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC;SACpB;KACD;IACD,OAAO,GAAG,CAAC;AACZ,CAAC"} \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/index.d.ts b/node_modules/https-proxy-agent/dist/index.d.ts new file mode 100644 index 0000000..0d60062 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/index.d.ts @@ -0,0 +1,23 @@ +/// +import net from 'net'; +import tls from 'tls'; +import { Url } from 'url'; +import { AgentOptions } from 'agent-base'; +import { OutgoingHttpHeaders } from 'http'; +import _HttpsProxyAgent from './agent'; +declare function createHttpsProxyAgent(opts: string | createHttpsProxyAgent.HttpsProxyAgentOptions): _HttpsProxyAgent; +declare namespace createHttpsProxyAgent { + interface BaseHttpsProxyAgentOptions { + headers?: OutgoingHttpHeaders; + secureProxy?: boolean; + host?: string | null; + path?: string | null; + port?: string | number | null; + } + export interface HttpsProxyAgentOptions extends AgentOptions, BaseHttpsProxyAgentOptions, Partial> { + } + export type HttpsProxyAgent = _HttpsProxyAgent; + export const HttpsProxyAgent: typeof _HttpsProxyAgent; + export {}; +} +export = createHttpsProxyAgent; diff --git a/node_modules/https-proxy-agent/dist/index.js b/node_modules/https-proxy-agent/dist/index.js new file mode 100644 index 0000000..b03e763 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/index.js @@ -0,0 +1,14 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +const agent_1 = __importDefault(require("./agent")); +function createHttpsProxyAgent(opts) { + return new agent_1.default(opts); +} +(function (createHttpsProxyAgent) { + createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; + createHttpsProxyAgent.prototype = agent_1.default.prototype; +})(createHttpsProxyAgent || (createHttpsProxyAgent = {})); +module.exports = createHttpsProxyAgent; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/index.js.map b/node_modules/https-proxy-agent/dist/index.js.map new file mode 100644 index 0000000..f3ce559 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":";;;;AAKA,oDAAuC;AAEvC,SAAS,qBAAqB,CAC7B,IAA2D;IAE3D,OAAO,IAAI,eAAgB,CAAC,IAAI,CAAC,CAAC;AACnC,CAAC;AAED,WAAU,qBAAqB;IAoBjB,qCAAe,GAAG,eAAgB,CAAC;IAEhD,qBAAqB,CAAC,SAAS,GAAG,eAAgB,CAAC,SAAS,CAAC;AAC9D,CAAC,EAvBS,qBAAqB,KAArB,qBAAqB,QAuB9B;AAED,iBAAS,qBAAqB,CAAC"} \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts b/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts new file mode 100644 index 0000000..7565674 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/parse-proxy-response.d.ts @@ -0,0 +1,7 @@ +/// +import { Readable } from 'stream'; +export interface ProxyResponse { + statusCode: number; + buffered: Buffer; +} +export default function parseProxyResponse(socket: Readable): Promise; diff --git a/node_modules/https-proxy-agent/dist/parse-proxy-response.js b/node_modules/https-proxy-agent/dist/parse-proxy-response.js new file mode 100644 index 0000000..aa5ce3c --- /dev/null +++ b/node_modules/https-proxy-agent/dist/parse-proxy-response.js @@ -0,0 +1,66 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const debug_1 = __importDefault(require("debug")); +const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); +function parseProxyResponse(socket) { + return new Promise((resolve, reject) => { + // we need to buffer any HTTP traffic that happens with the proxy before we get + // the CONNECT response, so that if the response is anything other than an "200" + // response code, then we can re-play the "data" events on the socket once the + // HTTP parser is hooked up... + let buffersLength = 0; + const buffers = []; + function read() { + const b = socket.read(); + if (b) + ondata(b); + else + socket.once('readable', read); + } + function cleanup() { + socket.removeListener('end', onend); + socket.removeListener('error', onerror); + socket.removeListener('close', onclose); + socket.removeListener('readable', read); + } + function onclose(err) { + debug('onclose had error %o', err); + } + function onend() { + debug('onend'); + } + function onerror(err) { + cleanup(); + debug('onerror %o', err); + reject(err); + } + function ondata(b) { + buffers.push(b); + buffersLength += b.length; + const buffered = Buffer.concat(buffers, buffersLength); + const endOfHeaders = buffered.indexOf('\r\n\r\n'); + if (endOfHeaders === -1) { + // keep buffering + debug('have not received end of HTTP headers yet...'); + read(); + return; + } + const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); + const statusCode = +firstLine.split(' ')[1]; + debug('got proxy server response: %o', firstLine); + resolve({ + statusCode, + buffered + }); + } + socket.on('error', onerror); + socket.on('close', onclose); + socket.on('end', onend); + read(); + }); +} +exports.default = parseProxyResponse; +//# sourceMappingURL=parse-proxy-response.js.map \ No newline at end of file diff --git a/node_modules/https-proxy-agent/dist/parse-proxy-response.js.map b/node_modules/https-proxy-agent/dist/parse-proxy-response.js.map new file mode 100644 index 0000000..bacdb84 --- /dev/null +++ b/node_modules/https-proxy-agent/dist/parse-proxy-response.js.map @@ -0,0 +1 @@ +{"version":3,"file":"parse-proxy-response.js","sourceRoot":"","sources":["../src/parse-proxy-response.ts"],"names":[],"mappings":";;;;;AAAA,kDAAgC;AAGhC,MAAM,KAAK,GAAG,eAAW,CAAC,wCAAwC,CAAC,CAAC;AAOpE,SAAwB,kBAAkB,CACzC,MAAgB;IAEhB,OAAO,IAAI,OAAO,CAAC,CAAC,OAAO,EAAE,MAAM,EAAE,EAAE;QACtC,+EAA+E;QAC/E,gFAAgF;QAChF,8EAA8E;QAC9E,8BAA8B;QAC9B,IAAI,aAAa,GAAG,CAAC,CAAC;QACtB,MAAM,OAAO,GAAa,EAAE,CAAC;QAE7B,SAAS,IAAI;YACZ,MAAM,CAAC,GAAG,MAAM,CAAC,IAAI,EAAE,CAAC;YACxB,IAAI,CAAC;gBAAE,MAAM,CAAC,CAAC,CAAC,CAAC;;gBACZ,MAAM,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;QACpC,CAAC;QAED,SAAS,OAAO;YACf,MAAM,CAAC,cAAc,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;YACpC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;YACxC,MAAM,CAAC,cAAc,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;YACxC,MAAM,CAAC,cAAc,CAAC,UAAU,EAAE,IAAI,CAAC,CAAC;QACzC,CAAC;QAED,SAAS,OAAO,CAAC,GAAW;YAC3B,KAAK,CAAC,sBAAsB,EAAE,GAAG,CAAC,CAAC;QACpC,CAAC;QAED,SAAS,KAAK;YACb,KAAK,CAAC,OAAO,CAAC,CAAC;QAChB,CAAC;QAED,SAAS,OAAO,CAAC,GAAU;YAC1B,OAAO,EAAE,CAAC;YACV,KAAK,CAAC,YAAY,EAAE,GAAG,CAAC,CAAC;YACzB,MAAM,CAAC,GAAG,CAAC,CAAC;QACb,CAAC;QAED,SAAS,MAAM,CAAC,CAAS;YACxB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;YAChB,aAAa,IAAI,CAAC,CAAC,MAAM,CAAC;YAE1B,MAAM,QAAQ,GAAG,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE,aAAa,CAAC,CAAC;YACvD,MAAM,YAAY,GAAG,QAAQ,CAAC,OAAO,CAAC,UAAU,CAAC,CAAC;YAElD,IAAI,YAAY,KAAK,CAAC,CAAC,EAAE;gBACxB,iBAAiB;gBACjB,KAAK,CAAC,8CAA8C,CAAC,CAAC;gBACtD,IAAI,EAAE,CAAC;gBACP,OAAO;aACP;YAED,MAAM,SAAS,GAAG,QAAQ,CAAC,QAAQ,CAClC,OAAO,EACP,CAAC,EACD,QAAQ,CAAC,OAAO,CAAC,MAAM,CAAC,CACxB,CAAC;YACF,MAAM,UAAU,GAAG,CAAC,SAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;YAC5C,KAAK,CAAC,+BAA+B,EAAE,SAAS,CAAC,CAAC;YAClD,OAAO,CAAC;gBACP,UAAU;gBACV,QAAQ;aACR,CAAC,CAAC;QACJ,CAAC;QAED,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,CAAC,CAAC;QAC5B,MAAM,CAAC,EAAE,CAAC,KAAK,EAAE,KAAK,CAAC,CAAC;QAExB,IAAI,EAAE,CAAC;IACR,CAAC,CAAC,CAAC;AACJ,CAAC;AAvED,qCAuEC"} \ No newline at end of file diff --git a/node_modules/https-proxy-agent/node_modules/debug/LICENSE b/node_modules/https-proxy-agent/node_modules/debug/LICENSE new file mode 100644 index 0000000..1a9820e --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/LICENSE @@ -0,0 +1,20 @@ +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software +and associated documentation files (the 'Software'), to deal in the Software without restriction, +including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial +portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT +LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/node_modules/https-proxy-agent/node_modules/debug/README.md b/node_modules/https-proxy-agent/node_modules/debug/README.md new file mode 100644 index 0000000..9ebdfbf --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/README.md @@ -0,0 +1,481 @@ +# debug +[![OpenCollective](https://opencollective.com/debug/backers/badge.svg)](#backers) +[![OpenCollective](https://opencollective.com/debug/sponsors/badge.svg)](#sponsors) + + + +A tiny JavaScript debugging utility modelled after Node.js core's debugging +technique. Works in Node.js and web browsers. + +## Installation + +```bash +$ npm install debug +``` + +## Usage + +`debug` exposes a function; simply pass this function the name of your module, and it will return a decorated version of `console.error` for you to pass debug statements to. This will allow you to toggle the debug output for different parts of your module as well as the module as a whole. + +Example [_app.js_](./examples/node/app.js): + +```js +var debug = require('debug')('http') + , http = require('http') + , name = 'My App'; + +// fake app + +debug('booting %o', name); + +http.createServer(function(req, res){ + debug(req.method + ' ' + req.url); + res.end('hello\n'); +}).listen(3000, function(){ + debug('listening'); +}); + +// fake worker of some kind + +require('./worker'); +``` + +Example [_worker.js_](./examples/node/worker.js): + +```js +var a = require('debug')('worker:a') + , b = require('debug')('worker:b'); + +function work() { + a('doing lots of uninteresting work'); + setTimeout(work, Math.random() * 1000); +} + +work(); + +function workb() { + b('doing some work'); + setTimeout(workb, Math.random() * 2000); +} + +workb(); +``` + +The `DEBUG` environment variable is then used to enable these based on space or +comma-delimited names. + +Here are some examples: + +screen shot 2017-08-08 at 12 53 04 pm +screen shot 2017-08-08 at 12 53 38 pm +screen shot 2017-08-08 at 12 53 25 pm + +#### Windows command prompt notes + +##### CMD + +On Windows the environment variable is set using the `set` command. + +```cmd +set DEBUG=*,-not_this +``` + +Example: + +```cmd +set DEBUG=* & node app.js +``` + +##### PowerShell (VS Code default) + +PowerShell uses different syntax to set environment variables. + +```cmd +$env:DEBUG = "*,-not_this" +``` + +Example: + +```cmd +$env:DEBUG='app';node app.js +``` + +Then, run the program to be debugged as usual. + +npm script example: +```js + "windowsDebug": "@powershell -Command $env:DEBUG='*';node app.js", +``` + +## Namespace Colors + +Every debug instance has a color generated for it based on its namespace name. +This helps when visually parsing the debug output to identify which debug instance +a debug line belongs to. + +#### Node.js + +In Node.js, colors are enabled when stderr is a TTY. You also _should_ install +the [`supports-color`](https://npmjs.org/supports-color) module alongside debug, +otherwise debug will only use a small handful of basic colors. + + + +#### Web Browser + +Colors are also enabled on "Web Inspectors" that understand the `%c` formatting +option. These are WebKit web inspectors, Firefox ([since version +31](https://hacks.mozilla.org/2014/05/editable-box-model-multiple-selection-sublime-text-keys-much-more-firefox-developer-tools-episode-31/)) +and the Firebug plugin for Firefox (any version). + + + + +## Millisecond diff + +When actively developing an application it can be useful to see when the time spent between one `debug()` call and the next. Suppose for example you invoke `debug()` before requesting a resource, and after as well, the "+NNNms" will show you how much time was spent between calls. + + + +When stdout is not a TTY, `Date#toISOString()` is used, making it more useful for logging the debug information as shown below: + + + + +## Conventions + +If you're using this in one or more of your libraries, you _should_ use the name of your library so that developers may toggle debugging as desired without guessing names. If you have more than one debuggers you _should_ prefix them with your library name and use ":" to separate features. For example "bodyParser" from Connect would then be "connect:bodyParser". If you append a "*" to the end of your name, it will always be enabled regardless of the setting of the DEBUG environment variable. You can then use it for normal output as well as debug output. + +## Wildcards + +The `*` character may be used as a wildcard. Suppose for example your library has +debuggers named "connect:bodyParser", "connect:compress", "connect:session", +instead of listing all three with +`DEBUG=connect:bodyParser,connect:compress,connect:session`, you may simply do +`DEBUG=connect:*`, or to run everything using this module simply use `DEBUG=*`. + +You can also exclude specific debuggers by prefixing them with a "-" character. +For example, `DEBUG=*,-connect:*` would include all debuggers except those +starting with "connect:". + +## Environment Variables + +When running through Node.js, you can set a few environment variables that will +change the behavior of the debug logging: + +| Name | Purpose | +|-----------|-------------------------------------------------| +| `DEBUG` | Enables/disables specific debugging namespaces. | +| `DEBUG_HIDE_DATE` | Hide date from debug output (non-TTY). | +| `DEBUG_COLORS`| Whether or not to use colors in the debug output. | +| `DEBUG_DEPTH` | Object inspection depth. | +| `DEBUG_SHOW_HIDDEN` | Shows hidden properties on inspected objects. | + + +__Note:__ The environment variables beginning with `DEBUG_` end up being +converted into an Options object that gets used with `%o`/`%O` formatters. +See the Node.js documentation for +[`util.inspect()`](https://nodejs.org/api/util.html#util_util_inspect_object_options) +for the complete list. + +## Formatters + +Debug uses [printf-style](https://wikipedia.org/wiki/Printf_format_string) formatting. +Below are the officially supported formatters: + +| Formatter | Representation | +|-----------|----------------| +| `%O` | Pretty-print an Object on multiple lines. | +| `%o` | Pretty-print an Object all on a single line. | +| `%s` | String. | +| `%d` | Number (both integer and float). | +| `%j` | JSON. Replaced with the string '[Circular]' if the argument contains circular references. | +| `%%` | Single percent sign ('%'). This does not consume an argument. | + + +### Custom formatters + +You can add custom formatters by extending the `debug.formatters` object. +For example, if you wanted to add support for rendering a Buffer as hex with +`%h`, you could do something like: + +```js +const createDebug = require('debug') +createDebug.formatters.h = (v) => { + return v.toString('hex') +} + +// …elsewhere +const debug = createDebug('foo') +debug('this is hex: %h', new Buffer('hello world')) +// foo this is hex: 68656c6c6f20776f726c6421 +0ms +``` + + +## Browser Support + +You can build a browser-ready script using [browserify](https://github.com/substack/node-browserify), +or just use the [browserify-as-a-service](https://wzrd.in/) [build](https://wzrd.in/standalone/debug@latest), +if you don't want to build it yourself. + +Debug's enable state is currently persisted by `localStorage`. +Consider the situation shown below where you have `worker:a` and `worker:b`, +and wish to debug both. You can enable this using `localStorage.debug`: + +```js +localStorage.debug = 'worker:*' +``` + +And then refresh the page. + +```js +a = debug('worker:a'); +b = debug('worker:b'); + +setInterval(function(){ + a('doing some work'); +}, 1000); + +setInterval(function(){ + b('doing some work'); +}, 1200); +``` + +In Chromium-based web browsers (e.g. Brave, Chrome, and Electron), the JavaScript console will—by default—only show messages logged by `debug` if the "Verbose" log level is _enabled_. + + + +## Output streams + + By default `debug` will log to stderr, however this can be configured per-namespace by overriding the `log` method: + +Example [_stdout.js_](./examples/node/stdout.js): + +```js +var debug = require('debug'); +var error = debug('app:error'); + +// by default stderr is used +error('goes to stderr!'); + +var log = debug('app:log'); +// set this namespace to log via console.log +log.log = console.log.bind(console); // don't forget to bind to console! +log('goes to stdout'); +error('still goes to stderr!'); + +// set all output to go via console.info +// overrides all per-namespace log settings +debug.log = console.info.bind(console); +error('now goes to stdout via console.info'); +log('still goes to stdout, but via console.info now'); +``` + +## Extend +You can simply extend debugger +```js +const log = require('debug')('auth'); + +//creates new debug instance with extended namespace +const logSign = log.extend('sign'); +const logLogin = log.extend('login'); + +log('hello'); // auth hello +logSign('hello'); //auth:sign hello +logLogin('hello'); //auth:login hello +``` + +## Set dynamically + +You can also enable debug dynamically by calling the `enable()` method : + +```js +let debug = require('debug'); + +console.log(1, debug.enabled('test')); + +debug.enable('test'); +console.log(2, debug.enabled('test')); + +debug.disable(); +console.log(3, debug.enabled('test')); + +``` + +print : +``` +1 false +2 true +3 false +``` + +Usage : +`enable(namespaces)` +`namespaces` can include modes separated by a colon and wildcards. + +Note that calling `enable()` completely overrides previously set DEBUG variable : + +``` +$ DEBUG=foo node -e 'var dbg = require("debug"); dbg.enable("bar"); console.log(dbg.enabled("foo"))' +=> false +``` + +`disable()` + +Will disable all namespaces. The functions returns the namespaces currently +enabled (and skipped). This can be useful if you want to disable debugging +temporarily without knowing what was enabled to begin with. + +For example: + +```js +let debug = require('debug'); +debug.enable('foo:*,-foo:bar'); +let namespaces = debug.disable(); +debug.enable(namespaces); +``` + +Note: There is no guarantee that the string will be identical to the initial +enable string, but semantically they will be identical. + +## Checking whether a debug target is enabled + +After you've created a debug instance, you can determine whether or not it is +enabled by checking the `enabled` property: + +```javascript +const debug = require('debug')('http'); + +if (debug.enabled) { + // do stuff... +} +``` + +You can also manually toggle this property to force the debug instance to be +enabled or disabled. + +## Usage in child processes + +Due to the way `debug` detects if the output is a TTY or not, colors are not shown in child processes when `stderr` is piped. A solution is to pass the `DEBUG_COLORS=1` environment variable to the child process. +For example: + +```javascript +worker = fork(WORKER_WRAP_PATH, [workerPath], { + stdio: [ + /* stdin: */ 0, + /* stdout: */ 'pipe', + /* stderr: */ 'pipe', + 'ipc', + ], + env: Object.assign({}, process.env, { + DEBUG_COLORS: 1 // without this settings, colors won't be shown + }), +}); + +worker.stderr.pipe(process.stderr, { end: false }); +``` + + +## Authors + + - TJ Holowaychuk + - Nathan Rajlich + - Andrew Rhyne + - Josh Junon + +## Backers + +Support us with a monthly donation and help us continue our activities. [[Become a backer](https://opencollective.com/debug#backer)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## Sponsors + +Become a sponsor and get your logo on our README on Github with a link to your site. [[Become a sponsor](https://opencollective.com/debug#sponsor)] + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +## License + +(The MIT License) + +Copyright (c) 2014-2017 TJ Holowaychuk <tj@vision-media.ca> +Copyright (c) 2018-2021 Josh Junon + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/https-proxy-agent/node_modules/debug/package.json b/node_modules/https-proxy-agent/node_modules/debug/package.json new file mode 100644 index 0000000..ee8abb5 --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/package.json @@ -0,0 +1,64 @@ +{ + "name": "debug", + "version": "4.4.3", + "repository": { + "type": "git", + "url": "git://github.com/debug-js/debug.git" + }, + "description": "Lightweight debugging utility for Node.js and the browser", + "keywords": [ + "debug", + "log", + "debugger" + ], + "files": [ + "src", + "LICENSE", + "README.md" + ], + "author": "Josh Junon (https://github.com/qix-)", + "contributors": [ + "TJ Holowaychuk ", + "Nathan Rajlich (http://n8.io)", + "Andrew Rhyne " + ], + "license": "MIT", + "scripts": { + "lint": "xo", + "test": "npm run test:node && npm run test:browser && npm run lint", + "test:node": "mocha test.js test.node.js", + "test:browser": "karma start --single-run", + "test:coverage": "cat ./coverage/lcov.info | coveralls" + }, + "dependencies": { + "ms": "^2.1.3" + }, + "devDependencies": { + "brfs": "^2.0.1", + "browserify": "^16.2.3", + "coveralls": "^3.0.2", + "karma": "^3.1.4", + "karma-browserify": "^6.0.0", + "karma-chrome-launcher": "^2.2.0", + "karma-mocha": "^1.3.0", + "mocha": "^5.2.0", + "mocha-lcov-reporter": "^1.2.0", + "sinon": "^14.0.0", + "xo": "^0.23.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + }, + "main": "./src/index.js", + "browser": "./src/browser.js", + "engines": { + "node": ">=6.0" + }, + "xo": { + "rules": { + "import/extensions": "off" + } + } +} diff --git a/node_modules/https-proxy-agent/node_modules/debug/src/browser.js b/node_modules/https-proxy-agent/node_modules/debug/src/browser.js new file mode 100644 index 0000000..5993451 --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/src/browser.js @@ -0,0 +1,272 @@ +/* eslint-env browser */ + +/** + * This is the web browser implementation of `debug()`. + */ + +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.storage = localstorage(); +exports.destroy = (() => { + let warned = false; + + return () => { + if (!warned) { + warned = true; + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + }; +})(); + +/** + * Colors. + */ + +exports.colors = [ + '#0000CC', + '#0000FF', + '#0033CC', + '#0033FF', + '#0066CC', + '#0066FF', + '#0099CC', + '#0099FF', + '#00CC00', + '#00CC33', + '#00CC66', + '#00CC99', + '#00CCCC', + '#00CCFF', + '#3300CC', + '#3300FF', + '#3333CC', + '#3333FF', + '#3366CC', + '#3366FF', + '#3399CC', + '#3399FF', + '#33CC00', + '#33CC33', + '#33CC66', + '#33CC99', + '#33CCCC', + '#33CCFF', + '#6600CC', + '#6600FF', + '#6633CC', + '#6633FF', + '#66CC00', + '#66CC33', + '#9900CC', + '#9900FF', + '#9933CC', + '#9933FF', + '#99CC00', + '#99CC33', + '#CC0000', + '#CC0033', + '#CC0066', + '#CC0099', + '#CC00CC', + '#CC00FF', + '#CC3300', + '#CC3333', + '#CC3366', + '#CC3399', + '#CC33CC', + '#CC33FF', + '#CC6600', + '#CC6633', + '#CC9900', + '#CC9933', + '#CCCC00', + '#CCCC33', + '#FF0000', + '#FF0033', + '#FF0066', + '#FF0099', + '#FF00CC', + '#FF00FF', + '#FF3300', + '#FF3333', + '#FF3366', + '#FF3399', + '#FF33CC', + '#FF33FF', + '#FF6600', + '#FF6633', + '#FF9900', + '#FF9933', + '#FFCC00', + '#FFCC33' +]; + +/** + * Currently only WebKit-based Web Inspectors, Firefox >= v31, + * and the Firebug extension (any Firefox version) are known + * to support "%c" CSS customizations. + * + * TODO: add a `localStorage` variable to explicitly enable/disable colors + */ + +// eslint-disable-next-line complexity +function useColors() { + // NB: In an Electron preload script, document will be defined but not fully + // initialized. Since we know we're in Chrome, we'll just detect this case + // explicitly + if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { + return true; + } + + // Internet Explorer and Edge do not support colors. + if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { + return false; + } + + let m; + + // Is webkit? http://stackoverflow.com/a/16459606/376773 + // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 + // eslint-disable-next-line no-return-assign + return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || + // Is firebug? http://stackoverflow.com/a/398120/376773 + (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || + // Is firefox >= v31? + // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages + (typeof navigator !== 'undefined' && navigator.userAgent && (m = navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)) && parseInt(m[1], 10) >= 31) || + // Double check webkit in userAgent just in case we are in a worker + (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); +} + +/** + * Colorize log arguments if enabled. + * + * @api public + */ + +function formatArgs(args) { + args[0] = (this.useColors ? '%c' : '') + + this.namespace + + (this.useColors ? ' %c' : ' ') + + args[0] + + (this.useColors ? '%c ' : ' ') + + '+' + module.exports.humanize(this.diff); + + if (!this.useColors) { + return; + } + + const c = 'color: ' + this.color; + args.splice(1, 0, c, 'color: inherit'); + + // The final "%c" is somewhat tricky, because there could be other + // arguments passed either before or after the %c, so we need to + // figure out the correct index to insert the CSS into + let index = 0; + let lastC = 0; + args[0].replace(/%[a-zA-Z%]/g, match => { + if (match === '%%') { + return; + } + index++; + if (match === '%c') { + // We only are interested in the *last* %c + // (the user may have provided their own) + lastC = index; + } + }); + + args.splice(lastC, 0, c); +} + +/** + * Invokes `console.debug()` when available. + * No-op when `console.debug` is not a "function". + * If `console.debug` is not available, falls back + * to `console.log`. + * + * @api public + */ +exports.log = console.debug || console.log || (() => {}); + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + try { + if (namespaces) { + exports.storage.setItem('debug', namespaces); + } else { + exports.storage.removeItem('debug'); + } + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ +function load() { + let r; + try { + r = exports.storage.getItem('debug') || exports.storage.getItem('DEBUG') ; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } + + // If debug isn't set in LS, and we're in Electron, try to load $DEBUG + if (!r && typeof process !== 'undefined' && 'env' in process) { + r = process.env.DEBUG; + } + + return r; +} + +/** + * Localstorage attempts to return the localstorage. + * + * This is necessary because safari throws + * when a user disables cookies/localstorage + * and you attempt to access it. + * + * @return {LocalStorage} + * @api private + */ + +function localstorage() { + try { + // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context + // The Browser also has localStorage in the global context. + return localStorage; + } catch (error) { + // Swallow + // XXX (@Qix-) should we be logging these? + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. + */ + +formatters.j = function (v) { + try { + return JSON.stringify(v); + } catch (error) { + return '[UnexpectedJSONParseError]: ' + error.message; + } +}; diff --git a/node_modules/https-proxy-agent/node_modules/debug/src/common.js b/node_modules/https-proxy-agent/node_modules/debug/src/common.js new file mode 100644 index 0000000..141cb57 --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/src/common.js @@ -0,0 +1,292 @@ + +/** + * This is the common logic for both the Node.js and web browser + * implementations of `debug()`. + */ + +function setup(env) { + createDebug.debug = createDebug; + createDebug.default = createDebug; + createDebug.coerce = coerce; + createDebug.disable = disable; + createDebug.enable = enable; + createDebug.enabled = enabled; + createDebug.humanize = require('ms'); + createDebug.destroy = destroy; + + Object.keys(env).forEach(key => { + createDebug[key] = env[key]; + }); + + /** + * The currently active debug mode names, and names to skip. + */ + + createDebug.names = []; + createDebug.skips = []; + + /** + * Map of special "%n" handling functions, for the debug "format" argument. + * + * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". + */ + createDebug.formatters = {}; + + /** + * Selects a color for a debug namespace + * @param {String} namespace The namespace string for the debug instance to be colored + * @return {Number|String} An ANSI color code for the given namespace + * @api private + */ + function selectColor(namespace) { + let hash = 0; + + for (let i = 0; i < namespace.length; i++) { + hash = ((hash << 5) - hash) + namespace.charCodeAt(i); + hash |= 0; // Convert to 32bit integer + } + + return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; + } + createDebug.selectColor = selectColor; + + /** + * Create a debugger with the given `namespace`. + * + * @param {String} namespace + * @return {Function} + * @api public + */ + function createDebug(namespace) { + let prevTime; + let enableOverride = null; + let namespacesCache; + let enabledCache; + + function debug(...args) { + // Disabled? + if (!debug.enabled) { + return; + } + + const self = debug; + + // Set `diff` timestamp + const curr = Number(new Date()); + const ms = curr - (prevTime || curr); + self.diff = ms; + self.prev = prevTime; + self.curr = curr; + prevTime = curr; + + args[0] = createDebug.coerce(args[0]); + + if (typeof args[0] !== 'string') { + // Anything else let's inspect with %O + args.unshift('%O'); + } + + // Apply any `formatters` transformations + let index = 0; + args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { + // If we encounter an escaped % then don't increase the array index + if (match === '%%') { + return '%'; + } + index++; + const formatter = createDebug.formatters[format]; + if (typeof formatter === 'function') { + const val = args[index]; + match = formatter.call(self, val); + + // Now we need to remove `args[index]` since it's inlined in the `format` + args.splice(index, 1); + index--; + } + return match; + }); + + // Apply env-specific formatting (colors, etc.) + createDebug.formatArgs.call(self, args); + + const logFn = self.log || createDebug.log; + logFn.apply(self, args); + } + + debug.namespace = namespace; + debug.useColors = createDebug.useColors(); + debug.color = createDebug.selectColor(namespace); + debug.extend = extend; + debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. + + Object.defineProperty(debug, 'enabled', { + enumerable: true, + configurable: false, + get: () => { + if (enableOverride !== null) { + return enableOverride; + } + if (namespacesCache !== createDebug.namespaces) { + namespacesCache = createDebug.namespaces; + enabledCache = createDebug.enabled(namespace); + } + + return enabledCache; + }, + set: v => { + enableOverride = v; + } + }); + + // Env-specific initialization logic for debug instances + if (typeof createDebug.init === 'function') { + createDebug.init(debug); + } + + return debug; + } + + function extend(namespace, delimiter) { + const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); + newDebug.log = this.log; + return newDebug; + } + + /** + * Enables a debug mode by namespaces. This can include modes + * separated by a colon and wildcards. + * + * @param {String} namespaces + * @api public + */ + function enable(namespaces) { + createDebug.save(namespaces); + createDebug.namespaces = namespaces; + + createDebug.names = []; + createDebug.skips = []; + + const split = (typeof namespaces === 'string' ? namespaces : '') + .trim() + .replace(/\s+/g, ',') + .split(',') + .filter(Boolean); + + for (const ns of split) { + if (ns[0] === '-') { + createDebug.skips.push(ns.slice(1)); + } else { + createDebug.names.push(ns); + } + } + } + + /** + * Checks if the given string matches a namespace template, honoring + * asterisks as wildcards. + * + * @param {String} search + * @param {String} template + * @return {Boolean} + */ + function matchesTemplate(search, template) { + let searchIndex = 0; + let templateIndex = 0; + let starIndex = -1; + let matchIndex = 0; + + while (searchIndex < search.length) { + if (templateIndex < template.length && (template[templateIndex] === search[searchIndex] || template[templateIndex] === '*')) { + // Match character or proceed with wildcard + if (template[templateIndex] === '*') { + starIndex = templateIndex; + matchIndex = searchIndex; + templateIndex++; // Skip the '*' + } else { + searchIndex++; + templateIndex++; + } + } else if (starIndex !== -1) { // eslint-disable-line no-negated-condition + // Backtrack to the last '*' and try to match more characters + templateIndex = starIndex + 1; + matchIndex++; + searchIndex = matchIndex; + } else { + return false; // No match + } + } + + // Handle trailing '*' in template + while (templateIndex < template.length && template[templateIndex] === '*') { + templateIndex++; + } + + return templateIndex === template.length; + } + + /** + * Disable debug output. + * + * @return {String} namespaces + * @api public + */ + function disable() { + const namespaces = [ + ...createDebug.names, + ...createDebug.skips.map(namespace => '-' + namespace) + ].join(','); + createDebug.enable(''); + return namespaces; + } + + /** + * Returns true if the given mode name is enabled, false otherwise. + * + * @param {String} name + * @return {Boolean} + * @api public + */ + function enabled(name) { + for (const skip of createDebug.skips) { + if (matchesTemplate(name, skip)) { + return false; + } + } + + for (const ns of createDebug.names) { + if (matchesTemplate(name, ns)) { + return true; + } + } + + return false; + } + + /** + * Coerce `val`. + * + * @param {Mixed} val + * @return {Mixed} + * @api private + */ + function coerce(val) { + if (val instanceof Error) { + return val.stack || val.message; + } + return val; + } + + /** + * XXX DO NOT USE. This is a temporary stub function. + * XXX It WILL be removed in the next major release. + */ + function destroy() { + console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); + } + + createDebug.enable(createDebug.load()); + + return createDebug; +} + +module.exports = setup; diff --git a/node_modules/https-proxy-agent/node_modules/debug/src/index.js b/node_modules/https-proxy-agent/node_modules/debug/src/index.js new file mode 100644 index 0000000..bf4c57f --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/src/index.js @@ -0,0 +1,10 @@ +/** + * Detect Electron renderer / nwjs process, which is node, but we should + * treat as a browser. + */ + +if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { + module.exports = require('./browser.js'); +} else { + module.exports = require('./node.js'); +} diff --git a/node_modules/https-proxy-agent/node_modules/debug/src/node.js b/node_modules/https-proxy-agent/node_modules/debug/src/node.js new file mode 100644 index 0000000..715560a --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/debug/src/node.js @@ -0,0 +1,263 @@ +/** + * Module dependencies. + */ + +const tty = require('tty'); +const util = require('util'); + +/** + * This is the Node.js implementation of `debug()`. + */ + +exports.init = init; +exports.log = log; +exports.formatArgs = formatArgs; +exports.save = save; +exports.load = load; +exports.useColors = useColors; +exports.destroy = util.deprecate( + () => {}, + 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' +); + +/** + * Colors. + */ + +exports.colors = [6, 2, 3, 4, 5, 1]; + +try { + // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) + // eslint-disable-next-line import/no-extraneous-dependencies + const supportsColor = require('supports-color'); + + if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { + exports.colors = [ + 20, + 21, + 26, + 27, + 32, + 33, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 56, + 57, + 62, + 63, + 68, + 69, + 74, + 75, + 76, + 77, + 78, + 79, + 80, + 81, + 92, + 93, + 98, + 99, + 112, + 113, + 128, + 129, + 134, + 135, + 148, + 149, + 160, + 161, + 162, + 163, + 164, + 165, + 166, + 167, + 168, + 169, + 170, + 171, + 172, + 173, + 178, + 179, + 184, + 185, + 196, + 197, + 198, + 199, + 200, + 201, + 202, + 203, + 204, + 205, + 206, + 207, + 208, + 209, + 214, + 215, + 220, + 221 + ]; + } +} catch (error) { + // Swallow - we only care if `supports-color` is available; it doesn't have to be. +} + +/** + * Build up the default `inspectOpts` object from the environment variables. + * + * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js + */ + +exports.inspectOpts = Object.keys(process.env).filter(key => { + return /^debug_/i.test(key); +}).reduce((obj, key) => { + // Camel-case + const prop = key + .substring(6) + .toLowerCase() + .replace(/_([a-z])/g, (_, k) => { + return k.toUpperCase(); + }); + + // Coerce string value into JS value + let val = process.env[key]; + if (/^(yes|on|true|enabled)$/i.test(val)) { + val = true; + } else if (/^(no|off|false|disabled)$/i.test(val)) { + val = false; + } else if (val === 'null') { + val = null; + } else { + val = Number(val); + } + + obj[prop] = val; + return obj; +}, {}); + +/** + * Is stdout a TTY? Colored output is enabled when `true`. + */ + +function useColors() { + return 'colors' in exports.inspectOpts ? + Boolean(exports.inspectOpts.colors) : + tty.isatty(process.stderr.fd); +} + +/** + * Adds ANSI color escape codes if enabled. + * + * @api public + */ + +function formatArgs(args) { + const {namespace: name, useColors} = this; + + if (useColors) { + const c = this.color; + const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); + const prefix = ` ${colorCode};1m${name} \u001B[0m`; + + args[0] = prefix + args[0].split('\n').join('\n' + prefix); + args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); + } else { + args[0] = getDate() + name + ' ' + args[0]; + } +} + +function getDate() { + if (exports.inspectOpts.hideDate) { + return ''; + } + return new Date().toISOString() + ' '; +} + +/** + * Invokes `util.formatWithOptions()` with the specified arguments and writes to stderr. + */ + +function log(...args) { + return process.stderr.write(util.formatWithOptions(exports.inspectOpts, ...args) + '\n'); +} + +/** + * Save `namespaces`. + * + * @param {String} namespaces + * @api private + */ +function save(namespaces) { + if (namespaces) { + process.env.DEBUG = namespaces; + } else { + // If you set a process.env field to null or undefined, it gets cast to the + // string 'null' or 'undefined'. Just delete instead. + delete process.env.DEBUG; + } +} + +/** + * Load `namespaces`. + * + * @return {String} returns the previously persisted debug modes + * @api private + */ + +function load() { + return process.env.DEBUG; +} + +/** + * Init logic for `debug` instances. + * + * Create a new `inspectOpts` object in case `useColors` is set + * differently for a particular `debug` instance. + */ + +function init(debug) { + debug.inspectOpts = {}; + + const keys = Object.keys(exports.inspectOpts); + for (let i = 0; i < keys.length; i++) { + debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; + } +} + +module.exports = require('./common')(exports); + +const {formatters} = module.exports; + +/** + * Map %o to `util.inspect()`, all on a single line. + */ + +formatters.o = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts) + .split('\n') + .map(str => str.trim()) + .join(' '); +}; + +/** + * Map %O to `util.inspect()`, allowing multiple lines if needed. + */ + +formatters.O = function (v) { + this.inspectOpts.colors = this.useColors; + return util.inspect(v, this.inspectOpts); +}; diff --git a/node_modules/https-proxy-agent/node_modules/ms/index.js b/node_modules/https-proxy-agent/node_modules/ms/index.js new file mode 100644 index 0000000..ea734fb --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/ms/index.js @@ -0,0 +1,162 @@ +/** + * Helpers. + */ + +var s = 1000; +var m = s * 60; +var h = m * 60; +var d = h * 24; +var w = d * 7; +var y = d * 365.25; + +/** + * Parse or format the given `val`. + * + * Options: + * + * - `long` verbose formatting [false] + * + * @param {String|Number} val + * @param {Object} [options] + * @throws {Error} throw an error if val is not a non-empty string or a number + * @return {String|Number} + * @api public + */ + +module.exports = function (val, options) { + options = options || {}; + var type = typeof val; + if (type === 'string' && val.length > 0) { + return parse(val); + } else if (type === 'number' && isFinite(val)) { + return options.long ? fmtLong(val) : fmtShort(val); + } + throw new Error( + 'val is not a non-empty string or a valid number. val=' + + JSON.stringify(val) + ); +}; + +/** + * Parse the given `str` and return milliseconds. + * + * @param {String} str + * @return {Number} + * @api private + */ + +function parse(str) { + str = String(str); + if (str.length > 100) { + return; + } + var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( + str + ); + if (!match) { + return; + } + var n = parseFloat(match[1]); + var type = (match[2] || 'ms').toLowerCase(); + switch (type) { + case 'years': + case 'year': + case 'yrs': + case 'yr': + case 'y': + return n * y; + case 'weeks': + case 'week': + case 'w': + return n * w; + case 'days': + case 'day': + case 'd': + return n * d; + case 'hours': + case 'hour': + case 'hrs': + case 'hr': + case 'h': + return n * h; + case 'minutes': + case 'minute': + case 'mins': + case 'min': + case 'm': + return n * m; + case 'seconds': + case 'second': + case 'secs': + case 'sec': + case 's': + return n * s; + case 'milliseconds': + case 'millisecond': + case 'msecs': + case 'msec': + case 'ms': + return n; + default: + return undefined; + } +} + +/** + * Short format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtShort(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return Math.round(ms / d) + 'd'; + } + if (msAbs >= h) { + return Math.round(ms / h) + 'h'; + } + if (msAbs >= m) { + return Math.round(ms / m) + 'm'; + } + if (msAbs >= s) { + return Math.round(ms / s) + 's'; + } + return ms + 'ms'; +} + +/** + * Long format for `ms`. + * + * @param {Number} ms + * @return {String} + * @api private + */ + +function fmtLong(ms) { + var msAbs = Math.abs(ms); + if (msAbs >= d) { + return plural(ms, msAbs, d, 'day'); + } + if (msAbs >= h) { + return plural(ms, msAbs, h, 'hour'); + } + if (msAbs >= m) { + return plural(ms, msAbs, m, 'minute'); + } + if (msAbs >= s) { + return plural(ms, msAbs, s, 'second'); + } + return ms + ' ms'; +} + +/** + * Pluralization helper. + */ + +function plural(ms, msAbs, n, name) { + var isPlural = msAbs >= n * 1.5; + return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); +} diff --git a/node_modules/https-proxy-agent/node_modules/ms/license.md b/node_modules/https-proxy-agent/node_modules/ms/license.md new file mode 100644 index 0000000..fa5d39b --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/ms/license.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2020 Vercel, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/https-proxy-agent/node_modules/ms/package.json b/node_modules/https-proxy-agent/node_modules/ms/package.json new file mode 100644 index 0000000..4997189 --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/ms/package.json @@ -0,0 +1,38 @@ +{ + "name": "ms", + "version": "2.1.3", + "description": "Tiny millisecond conversion utility", + "repository": "vercel/ms", + "main": "./index", + "files": [ + "index.js" + ], + "scripts": { + "precommit": "lint-staged", + "lint": "eslint lib/* bin/*", + "test": "mocha tests.js" + }, + "eslintConfig": { + "extends": "eslint:recommended", + "env": { + "node": true, + "es6": true + } + }, + "lint-staged": { + "*.js": [ + "npm run lint", + "prettier --single-quote --write", + "git add" + ] + }, + "license": "MIT", + "devDependencies": { + "eslint": "4.18.2", + "expect.js": "0.3.1", + "husky": "0.14.3", + "lint-staged": "5.0.0", + "mocha": "4.0.1", + "prettier": "2.0.5" + } +} diff --git a/node_modules/https-proxy-agent/node_modules/ms/readme.md b/node_modules/https-proxy-agent/node_modules/ms/readme.md new file mode 100644 index 0000000..0fc1abb --- /dev/null +++ b/node_modules/https-proxy-agent/node_modules/ms/readme.md @@ -0,0 +1,59 @@ +# ms + +![CI](https://github.com/vercel/ms/workflows/CI/badge.svg) + +Use this package to easily convert various time formats to milliseconds. + +## Examples + +```js +ms('2 days') // 172800000 +ms('1d') // 86400000 +ms('10h') // 36000000 +ms('2.5 hrs') // 9000000 +ms('2h') // 7200000 +ms('1m') // 60000 +ms('5s') // 5000 +ms('1y') // 31557600000 +ms('100') // 100 +ms('-3 days') // -259200000 +ms('-1h') // -3600000 +ms('-200') // -200 +``` + +### Convert from Milliseconds + +```js +ms(60000) // "1m" +ms(2 * 60000) // "2m" +ms(-3 * 60000) // "-3m" +ms(ms('10 hours')) // "10h" +``` + +### Time Format Written-Out + +```js +ms(60000, { long: true }) // "1 minute" +ms(2 * 60000, { long: true }) // "2 minutes" +ms(-3 * 60000, { long: true }) // "-3 minutes" +ms(ms('10 hours'), { long: true }) // "10 hours" +``` + +## Features + +- Works both in [Node.js](https://nodejs.org) and in the browser +- If a number is supplied to `ms`, a string with a unit is returned +- If a string that contains the number is supplied, it returns it as a number (e.g.: it returns `100` for `'100'`) +- If you pass a string with a number and a valid unit, the number of equivalent milliseconds is returned + +## Related Packages + +- [ms.macro](https://github.com/knpwrs/ms.macro) - Run `ms` as a macro at build-time. + +## Caught a Bug? + +1. [Fork](https://help.github.com/articles/fork-a-repo/) this repository to your own GitHub account and then [clone](https://help.github.com/articles/cloning-a-repository/) it to your local device +2. Link the package to the global module directory: `npm link` +3. Within the module you want to test your local development instance of ms, just link it to the dependencies: `npm link ms`. Instead of the default one from npm, Node.js will now use your clone of ms! + +As always, you can run the tests using: `npm test` diff --git a/node_modules/https-proxy-agent/package.json b/node_modules/https-proxy-agent/package.json new file mode 100644 index 0000000..fb2aba1 --- /dev/null +++ b/node_modules/https-proxy-agent/package.json @@ -0,0 +1,56 @@ +{ + "name": "https-proxy-agent", + "version": "5.0.1", + "description": "An HTTP(s) proxy `http.Agent` implementation for HTTPS", + "main": "dist/index", + "types": "dist/index", + "files": [ + "dist" + ], + "scripts": { + "prebuild": "rimraf dist", + "build": "tsc", + "test": "mocha --reporter spec", + "test-lint": "eslint src --ext .js,.ts", + "prepublishOnly": "npm run build" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/node-https-proxy-agent.git" + }, + "keywords": [ + "https", + "proxy", + "endpoint", + "agent" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/node-https-proxy-agent/issues" + }, + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "devDependencies": { + "@types/debug": "4", + "@types/node": "^12.12.11", + "@typescript-eslint/eslint-plugin": "1.6.0", + "@typescript-eslint/parser": "1.1.0", + "eslint": "5.16.0", + "eslint-config-airbnb": "17.1.0", + "eslint-config-prettier": "4.1.0", + "eslint-import-resolver-typescript": "1.1.1", + "eslint-plugin-import": "2.16.0", + "eslint-plugin-jsx-a11y": "6.2.1", + "eslint-plugin-react": "7.12.4", + "mocha": "^6.2.2", + "proxy": "1", + "rimraf": "^3.0.0", + "typescript": "^3.5.3" + }, + "engines": { + "node": ">= 6" + } +} diff --git a/node_modules/inflight/LICENSE b/node_modules/inflight/LICENSE new file mode 100644 index 0000000..05eeeb8 --- /dev/null +++ b/node_modules/inflight/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/inflight/README.md b/node_modules/inflight/README.md new file mode 100644 index 0000000..6dc8929 --- /dev/null +++ b/node_modules/inflight/README.md @@ -0,0 +1,37 @@ +# inflight + +Add callbacks to requests in flight to avoid async duplication + +## USAGE + +```javascript +var inflight = require('inflight') + +// some request that does some stuff +function req(key, callback) { + // key is any random string. like a url or filename or whatever. + // + // will return either a falsey value, indicating that the + // request for this key is already in flight, or a new callback + // which when called will call all callbacks passed to inflightk + // with the same key + callback = inflight(key, callback) + + // If we got a falsey value back, then there's already a req going + if (!callback) return + + // this is where you'd fetch the url or whatever + // callback is also once()-ified, so it can safely be assigned + // to multiple events etc. First call wins. + setTimeout(function() { + callback(null, key) + }, 100) +} + +// only assigns a single setTimeout +// when it dings, all cbs get called +req('foo', cb1) +req('foo', cb2) +req('foo', cb3) +req('foo', cb4) +``` diff --git a/node_modules/inflight/inflight.js b/node_modules/inflight/inflight.js new file mode 100644 index 0000000..48202b3 --- /dev/null +++ b/node_modules/inflight/inflight.js @@ -0,0 +1,54 @@ +var wrappy = require('wrappy') +var reqs = Object.create(null) +var once = require('once') + +module.exports = wrappy(inflight) + +function inflight (key, cb) { + if (reqs[key]) { + reqs[key].push(cb) + return null + } else { + reqs[key] = [cb] + return makeres(key) + } +} + +function makeres (key) { + return once(function RES () { + var cbs = reqs[key] + var len = cbs.length + var args = slice(arguments) + + // XXX It's somewhat ambiguous whether a new callback added in this + // pass should be queued for later execution if something in the + // list of callbacks throws, or if it should just be discarded. + // However, it's such an edge case that it hardly matters, and either + // choice is likely as surprising as the other. + // As it happens, we do go ahead and schedule it for later execution. + try { + for (var i = 0; i < len; i++) { + cbs[i].apply(null, args) + } + } finally { + if (cbs.length > len) { + // added more in the interim. + // de-zalgo, just in case, but don't call again. + cbs.splice(0, len) + process.nextTick(function () { + RES.apply(null, args) + }) + } else { + delete reqs[key] + } + } + }) +} + +function slice (args) { + var length = args.length + var array = [] + + for (var i = 0; i < length; i++) array[i] = args[i] + return array +} diff --git a/node_modules/inflight/package.json b/node_modules/inflight/package.json new file mode 100644 index 0000000..6084d35 --- /dev/null +++ b/node_modules/inflight/package.json @@ -0,0 +1,29 @@ +{ + "name": "inflight", + "version": "1.0.6", + "description": "Add callbacks to requests in flight to avoid async duplication", + "main": "inflight.js", + "files": [ + "inflight.js" + ], + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.1.2" + }, + "scripts": { + "test": "tap test.js --100" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/inflight.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "bugs": { + "url": "https://github.com/isaacs/inflight/issues" + }, + "homepage": "https://github.com/isaacs/inflight", + "license": "ISC" +} diff --git a/node_modules/is-fullwidth-code-point/index.d.ts b/node_modules/is-fullwidth-code-point/index.d.ts new file mode 100644 index 0000000..729d202 --- /dev/null +++ b/node_modules/is-fullwidth-code-point/index.d.ts @@ -0,0 +1,17 @@ +/** +Check if the character represented by a given [Unicode code point](https://en.wikipedia.org/wiki/Code_point) is [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms). + +@param codePoint - The [code point](https://en.wikipedia.org/wiki/Code_point) of a character. + +@example +``` +import isFullwidthCodePoint from 'is-fullwidth-code-point'; + +isFullwidthCodePoint('谢'.codePointAt(0)); +//=> true + +isFullwidthCodePoint('a'.codePointAt(0)); +//=> false +``` +*/ +export default function isFullwidthCodePoint(codePoint: number): boolean; diff --git a/node_modules/is-fullwidth-code-point/index.js b/node_modules/is-fullwidth-code-point/index.js new file mode 100644 index 0000000..671f97f --- /dev/null +++ b/node_modules/is-fullwidth-code-point/index.js @@ -0,0 +1,50 @@ +/* eslint-disable yoda */ +'use strict'; + +const isFullwidthCodePoint = codePoint => { + if (Number.isNaN(codePoint)) { + return false; + } + + // Code points are derived from: + // http://www.unix.org/Public/UNIDATA/EastAsianWidth.txt + if ( + codePoint >= 0x1100 && ( + codePoint <= 0x115F || // Hangul Jamo + codePoint === 0x2329 || // LEFT-POINTING ANGLE BRACKET + codePoint === 0x232A || // RIGHT-POINTING ANGLE BRACKET + // CJK Radicals Supplement .. Enclosed CJK Letters and Months + (0x2E80 <= codePoint && codePoint <= 0x3247 && codePoint !== 0x303F) || + // Enclosed CJK Letters and Months .. CJK Unified Ideographs Extension A + (0x3250 <= codePoint && codePoint <= 0x4DBF) || + // CJK Unified Ideographs .. Yi Radicals + (0x4E00 <= codePoint && codePoint <= 0xA4C6) || + // Hangul Jamo Extended-A + (0xA960 <= codePoint && codePoint <= 0xA97C) || + // Hangul Syllables + (0xAC00 <= codePoint && codePoint <= 0xD7A3) || + // CJK Compatibility Ideographs + (0xF900 <= codePoint && codePoint <= 0xFAFF) || + // Vertical Forms + (0xFE10 <= codePoint && codePoint <= 0xFE19) || + // CJK Compatibility Forms .. Small Form Variants + (0xFE30 <= codePoint && codePoint <= 0xFE6B) || + // Halfwidth and Fullwidth Forms + (0xFF01 <= codePoint && codePoint <= 0xFF60) || + (0xFFE0 <= codePoint && codePoint <= 0xFFE6) || + // Kana Supplement + (0x1B000 <= codePoint && codePoint <= 0x1B001) || + // Enclosed Ideographic Supplement + (0x1F200 <= codePoint && codePoint <= 0x1F251) || + // CJK Unified Ideographs Extension B .. Tertiary Ideographic Plane + (0x20000 <= codePoint && codePoint <= 0x3FFFD) + ) + ) { + return true; + } + + return false; +}; + +module.exports = isFullwidthCodePoint; +module.exports.default = isFullwidthCodePoint; diff --git a/node_modules/is-fullwidth-code-point/license b/node_modules/is-fullwidth-code-point/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/is-fullwidth-code-point/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/is-fullwidth-code-point/package.json b/node_modules/is-fullwidth-code-point/package.json new file mode 100644 index 0000000..2137e88 --- /dev/null +++ b/node_modules/is-fullwidth-code-point/package.json @@ -0,0 +1,42 @@ +{ + "name": "is-fullwidth-code-point", + "version": "3.0.0", + "description": "Check if the character represented by a given Unicode code point is fullwidth", + "license": "MIT", + "repository": "sindresorhus/is-fullwidth-code-point", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd-check" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "fullwidth", + "full-width", + "full", + "width", + "unicode", + "character", + "string", + "codepoint", + "code", + "point", + "is", + "detect", + "check" + ], + "devDependencies": { + "ava": "^1.3.1", + "tsd-check": "^0.5.0", + "xo": "^0.24.0" + } +} diff --git a/node_modules/is-fullwidth-code-point/readme.md b/node_modules/is-fullwidth-code-point/readme.md new file mode 100644 index 0000000..4236bba --- /dev/null +++ b/node_modules/is-fullwidth-code-point/readme.md @@ -0,0 +1,39 @@ +# is-fullwidth-code-point [![Build Status](https://travis-ci.org/sindresorhus/is-fullwidth-code-point.svg?branch=master)](https://travis-ci.org/sindresorhus/is-fullwidth-code-point) + +> Check if the character represented by a given [Unicode code point](https://en.wikipedia.org/wiki/Code_point) is [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) + + +## Install + +``` +$ npm install is-fullwidth-code-point +``` + + +## Usage + +```js +const isFullwidthCodePoint = require('is-fullwidth-code-point'); + +isFullwidthCodePoint('谢'.codePointAt(0)); +//=> true + +isFullwidthCodePoint('a'.codePointAt(0)); +//=> false +``` + + +## API + +### isFullwidthCodePoint(codePoint) + +#### codePoint + +Type: `number` + +The [code point](https://en.wikipedia.org/wiki/Code_point) of a character. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/make-dir/index.d.ts b/node_modules/make-dir/index.d.ts new file mode 100644 index 0000000..3a78251 --- /dev/null +++ b/node_modules/make-dir/index.d.ts @@ -0,0 +1,66 @@ +/// +import * as fs from 'fs'; + +declare namespace makeDir { + interface Options { + /** + Directory [permissions](https://x-team.com/blog/file-system-permissions-umask-node-js/). + + @default 0o777 + */ + readonly mode?: number; + + /** + Use a custom `fs` implementation. For example [`graceful-fs`](https://github.com/isaacs/node-graceful-fs). + + Using a custom `fs` implementation will block the use of the native `recursive` option if `fs.mkdir` or `fs.mkdirSync` is not the native function. + + @default require('fs') + */ + readonly fs?: typeof fs; + } +} + +declare const makeDir: { + /** + Make a directory and its parents if needed - Think `mkdir -p`. + + @param path - Directory to create. + @returns The path to the created directory. + + @example + ``` + import makeDir = require('make-dir'); + + (async () => { + const path = await makeDir('unicorn/rainbow/cake'); + + console.log(path); + //=> '/Users/sindresorhus/fun/unicorn/rainbow/cake' + + // Multiple directories: + const paths = await Promise.all([ + makeDir('unicorn/rainbow'), + makeDir('foo/bar') + ]); + + console.log(paths); + // [ + // '/Users/sindresorhus/fun/unicorn/rainbow', + // '/Users/sindresorhus/fun/foo/bar' + // ] + })(); + ``` + */ + (path: string, options?: makeDir.Options): Promise; + + /** + Synchronously make a directory and its parents if needed - Think `mkdir -p`. + + @param path - Directory to create. + @returns The path to the created directory. + */ + sync(path: string, options?: makeDir.Options): string; +}; + +export = makeDir; diff --git a/node_modules/make-dir/index.js b/node_modules/make-dir/index.js new file mode 100644 index 0000000..75889d8 --- /dev/null +++ b/node_modules/make-dir/index.js @@ -0,0 +1,156 @@ +'use strict'; +const fs = require('fs'); +const path = require('path'); +const {promisify} = require('util'); +const semver = require('semver'); + +const useNativeRecursiveOption = semver.satisfies(process.version, '>=10.12.0'); + +// https://github.com/nodejs/node/issues/8987 +// https://github.com/libuv/libuv/pull/1088 +const checkPath = pth => { + if (process.platform === 'win32') { + const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, '')); + + if (pathHasInvalidWinCharacters) { + const error = new Error(`Path contains invalid characters: ${pth}`); + error.code = 'EINVAL'; + throw error; + } + } +}; + +const processOptions = options => { + // https://github.com/sindresorhus/make-dir/issues/18 + const defaults = { + mode: 0o777, + fs + }; + + return { + ...defaults, + ...options + }; +}; + +const permissionError = pth => { + // This replicates the exception of `fs.mkdir` with native the + // `recusive` option when run on an invalid drive under Windows. + const error = new Error(`operation not permitted, mkdir '${pth}'`); + error.code = 'EPERM'; + error.errno = -4048; + error.path = pth; + error.syscall = 'mkdir'; + return error; +}; + +const makeDir = async (input, options) => { + checkPath(input); + options = processOptions(options); + + const mkdir = promisify(options.fs.mkdir); + const stat = promisify(options.fs.stat); + + if (useNativeRecursiveOption && options.fs.mkdir === fs.mkdir) { + const pth = path.resolve(input); + + await mkdir(pth, { + mode: options.mode, + recursive: true + }); + + return pth; + } + + const make = async pth => { + try { + await mkdir(pth, options.mode); + + return pth; + } catch (error) { + if (error.code === 'EPERM') { + throw error; + } + + if (error.code === 'ENOENT') { + if (path.dirname(pth) === pth) { + throw permissionError(pth); + } + + if (error.message.includes('null bytes')) { + throw error; + } + + await make(path.dirname(pth)); + + return make(pth); + } + + try { + const stats = await stat(pth); + if (!stats.isDirectory()) { + throw new Error('The path is not a directory'); + } + } catch (_) { + throw error; + } + + return pth; + } + }; + + return make(path.resolve(input)); +}; + +module.exports = makeDir; + +module.exports.sync = (input, options) => { + checkPath(input); + options = processOptions(options); + + if (useNativeRecursiveOption && options.fs.mkdirSync === fs.mkdirSync) { + const pth = path.resolve(input); + + fs.mkdirSync(pth, { + mode: options.mode, + recursive: true + }); + + return pth; + } + + const make = pth => { + try { + options.fs.mkdirSync(pth, options.mode); + } catch (error) { + if (error.code === 'EPERM') { + throw error; + } + + if (error.code === 'ENOENT') { + if (path.dirname(pth) === pth) { + throw permissionError(pth); + } + + if (error.message.includes('null bytes')) { + throw error; + } + + make(path.dirname(pth)); + return make(pth); + } + + try { + if (!options.fs.statSync(pth).isDirectory()) { + throw new Error('The path is not a directory'); + } + } catch (_) { + throw error; + } + } + + return pth; + }; + + return make(path.resolve(input)); +}; diff --git a/node_modules/make-dir/license b/node_modules/make-dir/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/make-dir/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/make-dir/node_modules/.bin/semver b/node_modules/make-dir/node_modules/.bin/semver new file mode 100644 index 0000000..97c5327 --- /dev/null +++ b/node_modules/make-dir/node_modules/.bin/semver @@ -0,0 +1,16 @@ +#!/bin/sh +basedir=$(dirname "$(echo "$0" | sed -e 's,\\,/,g')") + +case `uname` in + *CYGWIN*|*MINGW*|*MSYS*) + if command -v cygpath > /dev/null 2>&1; then + basedir=`cygpath -w "$basedir"` + fi + ;; +esac + +if [ -x "$basedir/node" ]; then + exec "$basedir/node" "$basedir/../semver/bin/semver.js" "$@" +else + exec node "$basedir/../semver/bin/semver.js" "$@" +fi diff --git a/node_modules/make-dir/node_modules/.bin/semver.cmd b/node_modules/make-dir/node_modules/.bin/semver.cmd new file mode 100644 index 0000000..9913fa9 --- /dev/null +++ b/node_modules/make-dir/node_modules/.bin/semver.cmd @@ -0,0 +1,17 @@ +@ECHO off +GOTO start +:find_dp0 +SET dp0=%~dp0 +EXIT /b +:start +SETLOCAL +CALL :find_dp0 + +IF EXIST "%dp0%\node.exe" ( + SET "_prog=%dp0%\node.exe" +) ELSE ( + SET "_prog=node" + SET PATHEXT=%PATHEXT:;.JS;=;% +) + +endLocal & goto #_undefined_# 2>NUL || title %COMSPEC% & "%_prog%" "%dp0%\..\semver\bin\semver.js" %* diff --git a/node_modules/make-dir/node_modules/.bin/semver.ps1 b/node_modules/make-dir/node_modules/.bin/semver.ps1 new file mode 100644 index 0000000..314717a --- /dev/null +++ b/node_modules/make-dir/node_modules/.bin/semver.ps1 @@ -0,0 +1,28 @@ +#!/usr/bin/env pwsh +$basedir=Split-Path $MyInvocation.MyCommand.Definition -Parent + +$exe="" +if ($PSVersionTable.PSVersion -lt "6.0" -or $IsWindows) { + # Fix case when both the Windows and Linux builds of Node + # are installed in the same directory + $exe=".exe" +} +$ret=0 +if (Test-Path "$basedir/node$exe") { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args + } else { + & "$basedir/node$exe" "$basedir/../semver/bin/semver.js" $args + } + $ret=$LASTEXITCODE +} else { + # Support pipeline input + if ($MyInvocation.ExpectingInput) { + $input | & "node$exe" "$basedir/../semver/bin/semver.js" $args + } else { + & "node$exe" "$basedir/../semver/bin/semver.js" $args + } + $ret=$LASTEXITCODE +} +exit $ret diff --git a/node_modules/make-dir/node_modules/semver/LICENSE b/node_modules/make-dir/node_modules/semver/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/make-dir/node_modules/semver/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/make-dir/node_modules/semver/README.md b/node_modules/make-dir/node_modules/semver/README.md new file mode 100644 index 0000000..2293a14 --- /dev/null +++ b/node_modules/make-dir/node_modules/semver/README.md @@ -0,0 +1,443 @@ +semver(1) -- The semantic versioner for npm +=========================================== + +## Install + +```bash +npm install semver +```` + +## Usage + +As a node module: + +```js +const semver = require('semver') + +semver.valid('1.2.3') // '1.2.3' +semver.valid('a.b.c') // null +semver.clean(' =v1.2.3 ') // '1.2.3' +semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true +semver.gt('1.2.3', '9.8.7') // false +semver.lt('1.2.3', '9.8.7') // true +semver.minVersion('>=1.0.0') // '1.0.0' +semver.valid(semver.coerce('v2')) // '2.0.0' +semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' +``` + +As a command-line utility: + +``` +$ semver -h + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, or prerelease. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them. +``` + +## Versions + +A "version" is described by the `v2.0.0` specification found at +. + +A leading `"="` or `"v"` character is stripped off and ignored. + +## Ranges + +A `version range` is a set of `comparators` which specify versions +that satisfy the range. + +A `comparator` is composed of an `operator` and a `version`. The set +of primitive `operators` is: + +* `<` Less than +* `<=` Less than or equal to +* `>` Greater than +* `>=` Greater than or equal to +* `=` Equal. If no operator is specified, then equality is assumed, + so this operator is optional, but MAY be included. + +For example, the comparator `>=1.2.7` would match the versions +`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` +or `1.1.0`. + +Comparators can be joined by whitespace to form a `comparator set`, +which is satisfied by the **intersection** of all of the comparators +it includes. + +A range is composed of one or more comparator sets, joined by `||`. A +version matches a range if and only if every comparator in at least +one of the `||`-separated comparator sets is satisfied by the version. + +For example, the range `>=1.2.7 <1.3.0` would match the versions +`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, +or `1.1.0`. + +The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, +`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. + +### Prerelease Tags + +If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then +it will only be allowed to satisfy comparator sets if at least one +comparator with the same `[major, minor, patch]` tuple also has a +prerelease tag. + +For example, the range `>1.2.3-alpha.3` would be allowed to match the +version `1.2.3-alpha.7`, but it would *not* be satisfied by +`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater +than" `1.2.3-alpha.3` according to the SemVer sort rules. The version +range only accepts prerelease tags on the `1.2.3` version. The +version `3.4.5` *would* satisfy the range, because it does not have a +prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. + +The purpose for this behavior is twofold. First, prerelease versions +frequently are updated very quickly, and contain many breaking changes +that are (by the author's design) not yet fit for public consumption. +Therefore, by default, they are excluded from range matching +semantics. + +Second, a user who has opted into using a prerelease version has +clearly indicated the intent to use *that specific* set of +alpha/beta/rc versions. By including a prerelease tag in the range, +the user is indicating that they are aware of the risk. However, it +is still not appropriate to assume that they have opted into taking a +similar risk on the *next* set of prerelease versions. + +Note that this behavior can be suppressed (treating all prerelease +versions as if they were normal versions, for the purpose of range +matching) by setting the `includePrerelease` flag on the options +object to any +[functions](https://github.com/npm/node-semver#functions) that do +range matching. + +#### Prerelease Identifiers + +The method `.inc` takes an additional `identifier` string argument that +will append the value of the string as a prerelease identifier: + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta') +// '1.2.4-beta.0' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta +1.2.4-beta.0 +``` + +Which then can be used to increment further: + +```bash +$ semver 1.2.4-beta.0 -i prerelease +1.2.4-beta.1 +``` + +### Advanced Range Syntax + +Advanced range syntax desugars to primitive comparators in +deterministic ways. + +Advanced ranges may be combined in the same way as primitive +comparators using white space or `||`. + +#### Hyphen Ranges `X.Y.Z - A.B.C` + +Specifies an inclusive set. + +* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` + +If a partial version is provided as the first version in the inclusive +range, then the missing pieces are replaced with zeroes. + +* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` + +If a partial version is provided as the second version in the +inclusive range, then all versions that start with the supplied parts +of the tuple are accepted, but nothing that would be greater than the +provided tuple parts. + +* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0` +* `1.2.3 - 2` := `>=1.2.3 <3.0.0` + +#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` + +Any of `X`, `x`, or `*` may be used to "stand in" for one of the +numeric values in the `[major, minor, patch]` tuple. + +* `*` := `>=0.0.0` (Any version satisfies) +* `1.x` := `>=1.0.0 <2.0.0` (Matching major version) +* `1.2.x` := `>=1.2.0 <1.3.0` (Matching major and minor versions) + +A partial version range is treated as an X-Range, so the special +character is in fact optional. + +* `""` (empty string) := `*` := `>=0.0.0` +* `1` := `1.x.x` := `>=1.0.0 <2.0.0` +* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0` + +#### Tilde Ranges `~1.2.3` `~1.2` `~1` + +Allows patch-level changes if a minor version is specified on the +comparator. Allows minor-level changes if not. + +* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0` +* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0` (Same as `1.2.x`) +* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0` (Same as `1.x`) +* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0` +* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0` (Same as `0.2.x`) +* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0` (Same as `0.x`) +* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. + +#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` + +Allows changes that do not modify the left-most non-zero element in the +`[major, minor, patch]` tuple. In other words, this allows patch and +minor updates for versions `1.0.0` and above, patch updates for +versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. + +Many authors treat a `0.x` version as if the `x` were the major +"breaking-change" indicator. + +Caret ranges are ideal when an author may make breaking changes +between `0.2.4` and `0.3.0` releases, which is a common practice. +However, it presumes that there will *not* be breaking changes between +`0.2.4` and `0.2.5`. It allows for changes that are presumed to be +additive (but non-breaking), according to commonly observed practices. + +* `^1.2.3` := `>=1.2.3 <2.0.0` +* `^0.2.3` := `>=0.2.3 <0.3.0` +* `^0.0.3` := `>=0.0.3 <0.0.4` +* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. +* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4` Note that prereleases in the + `0.0.3` version *only* will be allowed, if they are greater than or + equal to `beta`. So, `0.0.3-pr.2` would be allowed. + +When parsing caret ranges, a missing `patch` value desugars to the +number `0`, but will allow flexibility within that value, even if the +major and minor versions are both `0`. + +* `^1.2.x` := `>=1.2.0 <2.0.0` +* `^0.0.x` := `>=0.0.0 <0.1.0` +* `^0.0` := `>=0.0.0 <0.1.0` + +A missing `minor` and `patch` values will desugar to zero, but also +allow flexibility within those values, even if the major version is +zero. + +* `^1.x` := `>=1.0.0 <2.0.0` +* `^0.x` := `>=0.0.0 <1.0.0` + +### Range Grammar + +Putting all this together, here is a Backus-Naur grammar for ranges, +for the benefit of parser authors: + +```bnf +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ +``` + +## Functions + +All methods and classes take a final `options` object argument. All +options in this object are `false` by default. The options supported +are: + +- `loose` Be more forgiving about not-quite-valid semver strings. + (Any resulting output will always be 100% strict compliant, of + course.) For backwards compatibility reasons, if the `options` + argument is a boolean value instead of an object, it is interpreted + to be the `loose` param. +- `includePrerelease` Set to suppress the [default + behavior](https://github.com/npm/node-semver#prerelease-tags) of + excluding prerelease tagged versions from ranges unless they are + explicitly opted into. + +Strict-mode Comparators and Ranges will be strict about the SemVer +strings that they parse. + +* `valid(v)`: Return the parsed version, or null if it's not valid. +* `inc(v, release)`: Return the version incremented by the release + type (`major`, `premajor`, `minor`, `preminor`, `patch`, + `prepatch`, or `prerelease`), or null if it's not valid + * `premajor` in one call will bump the version up to the next major + version and down to a prerelease of that major version. + `preminor`, and `prepatch` work the same way. + * If called from a non-prerelease version, the `prerelease` will work the + same as `prepatch`. It increments the patch version, then makes a + prerelease. If the input version is already a prerelease it simply + increments it. +* `prerelease(v)`: Returns an array of prerelease components, or null + if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` +* `major(v)`: Return the major version number. +* `minor(v)`: Return the minor version number. +* `patch(v)`: Return the patch version number. +* `intersects(r1, r2, loose)`: Return true if the two supplied ranges + or comparators intersect. +* `parse(v)`: Attempt to parse a string as a semantic version, returning either + a `SemVer` object or `null`. + +### Comparison + +* `gt(v1, v2)`: `v1 > v2` +* `gte(v1, v2)`: `v1 >= v2` +* `lt(v1, v2)`: `v1 < v2` +* `lte(v1, v2)`: `v1 <= v2` +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, + even if they're not the exact same string. You already know how to + compare strings. +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call + the corresponding function above. `"==="` and `"!=="` do simple + string comparison, but are included for completeness. Throws if an + invalid comparison string is provided. +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `rcompare(v1, v2)`: The reverse of compare. Sorts an array of versions + in descending order when passed to `Array.sort()`. +* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions + are equal. Sorts in ascending order if passed to `Array.sort()`. + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `diff(v1, v2)`: Returns difference between two versions by the release type + (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), + or null if the versions are the same. + +### Comparators + +* `intersects(comparator)`: Return true if the comparators intersect + +### Ranges + +* `validRange(range)`: Return the valid range or null if it's not valid +* `satisfies(version, range)`: Return true if the version satisfies the + range. +* `maxSatisfying(versions, range)`: Return the highest version in the list + that satisfies the range, or `null` if none of them do. +* `minSatisfying(versions, range)`: Return the lowest version in the list + that satisfies the range, or `null` if none of them do. +* `minVersion(range)`: Return the lowest version that can possibly match + the given range. +* `gtr(version, range)`: Return `true` if version is greater than all the + versions possible in the range. +* `ltr(version, range)`: Return `true` if version is less than all the + versions possible in the range. +* `outside(version, range, hilo)`: Return true if the version is outside + the bounds of the range in either the high or low direction. The + `hilo` argument must be either the string `'>'` or `'<'`. (This is + the function called by `gtr` and `ltr`.) +* `intersects(range)`: Return true if any of the ranges comparators intersect + +Note that, since ranges may be non-contiguous, a version might not be +greater than a range, less than a range, *or* satisfy a range! For +example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` +until `2.0.0`, so the version `1.2.10` would not be greater than the +range (because `2.0.1` satisfies, which is higher), nor less than the +range (since `1.2.8` satisfies, which is lower), and it also does not +satisfy the range. + +If you want to know if a version satisfies or does not satisfy a +range, use the `satisfies(version, range)` function. + +### Coercion + +* `coerce(version, options)`: Coerces a string to semver if possible + +This aims to provide a very forgiving translation of a non-semver string to +semver. It looks for the first digit in a string, and consumes all +remaining characters which satisfy at least a partial semver (e.g., `1`, +`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer +versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All +surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes +`3.4.0`). Only text which lacks digits will fail coercion (`version one` +is not valid). The maximum length for any semver component considered for +coercion is 16 characters; longer components will be ignored +(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any +semver component is `Integer.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value +components are invalid (`9999999999999999.4.7.4` is likely invalid). + +If the `options.rtl` flag is set, then `coerce` will return the right-most +coercible tuple that does not share an ending index with a longer coercible +tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not +`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of +any other overlapping SemVer tuple. + +### Clean + +* `clean(version)`: Clean a string to be a valid semver if possible + +This will return a cleaned and trimmed semver version. If the provided version is not valid a null will be returned. This does not work for ranges. + +ex. +* `s.clean(' = v 2.1.5foo')`: `null` +* `s.clean(' = v 2.1.5foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean(' = v 2.1.5-foo')`: `null` +* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean('=v2.1.5')`: `'2.1.5'` +* `s.clean(' =v2.1.5')`: `2.1.5` +* `s.clean(' 2.1.5 ')`: `'2.1.5'` +* `s.clean('~1.0.0')`: `null` diff --git a/node_modules/make-dir/node_modules/semver/bin/semver.js b/node_modules/make-dir/node_modules/semver/bin/semver.js new file mode 100644 index 0000000..666034a --- /dev/null +++ b/node_modules/make-dir/node_modules/semver/bin/semver.js @@ -0,0 +1,174 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +var argv = process.argv.slice(2) + +var versions = [] + +var range = [] + +var inc = null + +var version = require('../package.json').version + +var loose = false + +var includePrerelease = false + +var coerce = false + +var rtl = false + +var identifier + +var semver = require('../semver') + +var reverse = false + +var options = {} + +main() + +function main () { + if (!argv.length) return help() + while (argv.length) { + var a = argv.shift() + var indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + a = a.slice(0, indexOfEqualSign) + argv.unshift(a.slice(indexOfEqualSign + 1)) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-c': case '--coerce': + coerce = true + break + case '--rtl': + rtl = true + break + case '--ltr': + rtl = false + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + var options = { loose: loose, includePrerelease: includePrerelease, rtl: rtl } + + versions = versions.map(function (v) { + return coerce ? (semver.coerce(v, options) || { version: v }).version : v + }).filter(function (v) { + return semver.valid(v) + }) + if (!versions.length) return fail() + if (inc && (versions.length !== 1 || range.length)) { return failInc() } + + for (var i = 0, l = range.length; i < l; i++) { + versions = versions.filter(function (v) { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) return fail() + } + return success(versions) +} + +function failInc () { + console.error('--inc can only be used on a single version with no range') + fail() +} + +function fail () { process.exit(1) } + +function success () { + var compare = reverse ? 'rcompare' : 'compare' + versions.sort(function (a, b) { + return semver[compare](a, b, options) + }).map(function (v) { + return semver.clean(v, options) + }).map(function (v) { + return inc ? semver.inc(v, inc, options, identifier) : v + }).forEach(function (v, i, _) { console.log(v) }) +} + +function help () { + console.log(['SemVer ' + version, + '', + 'A JavaScript implementation of the https://semver.org/ specification', + 'Copyright Isaac Z. Schlueter', + '', + 'Usage: semver [options] [ [...]]', + 'Prints valid versions sorted by SemVer precedence', + '', + 'Options:', + '-r --range ', + ' Print versions that match the specified range.', + '', + '-i --increment []', + ' Increment a version by the specified level. Level can', + ' be one of: major, minor, patch, premajor, preminor,', + " prepatch, or prerelease. Default level is 'patch'.", + ' Only one version may be specified.', + '', + '--preid ', + ' Identifier to be used to prefix premajor, preminor,', + ' prepatch or prerelease version increments.', + '', + '-l --loose', + ' Interpret versions and ranges loosely', + '', + '-p --include-prerelease', + ' Always include prerelease versions in range matching', + '', + '-c --coerce', + ' Coerce a string into SemVer if possible', + ' (does not imply --loose)', + '', + '--rtl', + ' Coerce version strings right to left', + '', + '--ltr', + ' Coerce version strings left to right (default)', + '', + 'Program exits successfully if any valid version satisfies', + 'all supplied ranges, and prints all satisfying versions.', + '', + 'If no satisfying versions are found, then exits failure.', + '', + 'Versions are printed in ascending order, so supplying', + 'multiple versions to the utility will just sort them.' + ].join('\n')) +} diff --git a/node_modules/make-dir/node_modules/semver/package.json b/node_modules/make-dir/node_modules/semver/package.json new file mode 100644 index 0000000..6b970a6 --- /dev/null +++ b/node_modules/make-dir/node_modules/semver/package.json @@ -0,0 +1,38 @@ +{ + "name": "semver", + "version": "6.3.1", + "description": "The semantic version parser used by npm.", + "main": "semver.js", + "scripts": { + "test": "tap test/ --100 --timeout=30", + "lint": "echo linting disabled", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap test/ --100 --timeout=30", + "posttest": "npm run lint" + }, + "devDependencies": { + "@npmcli/template-oss": "4.17.0", + "tap": "^12.7.0" + }, + "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/npm/node-semver.git" + }, + "bin": { + "semver": "./bin/semver.js" + }, + "files": [ + "bin", + "range.bnf", + "semver.js" + ], + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "content": "./scripts/template-oss", + "version": "4.17.0" + } +} diff --git a/node_modules/make-dir/node_modules/semver/range.bnf b/node_modules/make-dir/node_modules/semver/range.bnf new file mode 100644 index 0000000..d4c6ae0 --- /dev/null +++ b/node_modules/make-dir/node_modules/semver/range.bnf @@ -0,0 +1,16 @@ +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | [1-9] ( [0-9] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ diff --git a/node_modules/make-dir/node_modules/semver/semver.js b/node_modules/make-dir/node_modules/semver/semver.js new file mode 100644 index 0000000..39319c1 --- /dev/null +++ b/node_modules/make-dir/node_modules/semver/semver.js @@ -0,0 +1,1643 @@ +exports = module.exports = SemVer + +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} +} + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' + +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 + +var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + +// The actual regexps go on exports.re +var re = exports.re = [] +var safeRe = exports.safeRe = [] +var src = exports.src = [] +var t = exports.tokens = {} +var R = 0 + +function tok (n) { + t[n] = R++ +} + +var LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +var safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +function makeSafeRe (value) { + for (var i = 0; i < safeRegexReplacements.length; i++) { + var token = safeRegexReplacements[i][0] + var max = safeRegexReplacements[i][1] + value = value + .split(token + '*').join(token + '{0,' + max + '}') + .split(token + '+').join(token + '{1,' + max + '}') + } + return value +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +tok('NUMERICIDENTIFIER') +src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' +tok('NUMERICIDENTIFIERLOOSE') +src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +tok('NONNUMERICIDENTIFIER') +src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' + +// ## Main Version +// Three dot-separated numeric identifiers. + +tok('MAINVERSION') +src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIER] + ')' + +tok('MAINVERSIONLOOSE') +src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. + +tok('PRERELEASEIDENTIFIER') +src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +tok('PRERELEASEIDENTIFIERLOOSE') +src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + + '|' + src[t.NONNUMERICIDENTIFIER] + ')' + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +tok('PRERELEASE') +src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' + +tok('PRERELEASELOOSE') +src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +tok('BUILDIDENTIFIER') +src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +tok('BUILD') +src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +tok('FULL') +tok('FULLPLAIN') +src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + + src[t.PRERELEASE] + '?' + + src[t.BUILD] + '?' + +src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +tok('LOOSEPLAIN') +src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + + src[t.PRERELEASELOOSE] + '?' + + src[t.BUILD] + '?' + +tok('LOOSE') +src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' + +tok('GTLT') +src[t.GTLT] = '((?:<|>)?=?)' + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +tok('XRANGEIDENTIFIERLOOSE') +src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +tok('XRANGEIDENTIFIER') +src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' + +tok('XRANGEPLAIN') +src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + + '(?:' + src[t.PRERELEASE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGEPLAINLOOSE') +src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[t.PRERELEASELOOSE] + ')?' + + src[t.BUILD] + '?' + + ')?)?' + +tok('XRANGE') +src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' +tok('XRANGELOOSE') +src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +tok('COERCE') +src[t.COERCE] = '(^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' +tok('COERCERTL') +re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') +safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +tok('LONETILDE') +src[t.LONETILDE] = '(?:~>?)' + +tok('TILDETRIM') +src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' +re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') +safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') +var tildeTrimReplace = '$1~' + +tok('TILDE') +src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' +tok('TILDELOOSE') +src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +tok('LONECARET') +src[t.LONECARET] = '(?:\\^)' + +tok('CARETTRIM') +src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' +re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') +safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') +var caretTrimReplace = '$1^' + +tok('CARET') +src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' +tok('CARETLOOSE') +src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +tok('COMPARATORLOOSE') +src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' +tok('COMPARATOR') +src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +tok('COMPARATORTRIM') +src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' + +// this one has to use the /g flag +re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') +safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') +var comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +tok('HYPHENRANGE') +src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAIN] + ')' + + '\\s*$' + +tok('HYPHENRANGELOOSE') +src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[t.XRANGEPLAINLOOSE] + ')' + + '\\s*$' + +// Star ranges basically just allow anything at all. +tok('STAR') +src[t.STAR] = '(<|>)?=?\\s*\\*' + +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) + + // Replace all greedy whitespace to prevent regex dos issues. These regex are + // used internally via the safeRe object since all inputs in this library get + // normalized first to trim and collapse all extra whitespace. The original + // regexes are exported for userland consumption and lower level usage. A + // future breaking change could export the safer regex only with a note that + // all input should have extra whitespace removed. + safeRe[i] = new RegExp(makeSafeRe(src[i])) + } +} + +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + if (version.length > MAX_LENGTH) { + return null + } + + var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] + if (!r.test(version)) { + return null + } + + try { + return new SemVer(version, options) + } catch (er) { + return null + } +} + +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} + +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} + +exports.SemVer = SemVer + +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } + + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + + var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) + + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() +} + +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version +} + +SemVer.prototype.toString = function () { + return this.version +} + +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return this.compareMain(other) || this.comparePre(other) +} + +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} + +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +SemVer.prototype.compareBuild = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + var i = 0 + do { + var a = this.build[i] + var b = other.build[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) + } + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } + } + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} + +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } + + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } + } + } + return defaultResult // may be undefined + } +} + +exports.compareIdentifiers = compareIdentifiers + +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} + +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} + +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} + +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} + +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} + +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} + +exports.compareBuild = compareBuild +function compareBuild (a, b, loose) { + var versionA = new SemVer(a, loose) + var versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} + +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} + +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(a, b, loose) + }) +} + +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.compareBuild(b, a, loose) + }) +} + +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} + +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 +} + +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} + +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} + +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} + +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} + +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b + + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) + } +} + +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } + + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) +} + +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] + var m = comp.match(r) + + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} + +Comparator.prototype.toString = function () { + return this.value +} + +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) +} + +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + var rangeTmp + + if (this.operator === '') { + if (this.value === '') { + return true + } + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } + + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} + +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + return new Range(range.value, options) + } + + if (!(this instanceof Range)) { + return new Range(range, options) + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range + .trim() + .split(/\s+/) + .join(' ') + + // First, split based on boolean or || + this.set = this.raw.split('||').map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + this.raw) + } + + this.format() +} + +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} + +Range.prototype.toString = function () { + return this.range +} + +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) + + // normalize spaces + range = range.split(/\s+/).join(' ') + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) + + return set +} + +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some(function (thisComparators) { + return ( + isSatisfiable(thisComparators, options) && + range.set.some(function (rangeComparators) { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every(function (thisComparator) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) +} + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +function isSatisfiable (comparators, options) { + var result = true + var remainingComparators = comparators.slice() + var testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every(function (otherComparator) { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} + +function replaceTilde (comp, options) { + var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} + +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' + } + } + + debug('caret return', ret) + return ret + }) +} + +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} + +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + ret = gtlt + M + '.' + m + '.' + p + pr + } else if (xm) { + ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr + } else if (xp) { + ret = '>=' + M + '.' + m + '.0' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + pr + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(safeRe[t.STAR], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } + + return (from + ' ' + to).trim() +} + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} + +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} + +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} + +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} + +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) + + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) + } + }) + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} + +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} + +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) + + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + var high = null + var low = null + + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} + +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} + +exports.coerce = coerce +function coerce (version, options) { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + var match = null + if (!options.rtl) { + match = version.match(safeRe[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + var next + while ((next = safeRe[t.COERCERTL].exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + safeRe[t.COERCERTL].lastIndex = -1 + } + + if (match === null) { + return null + } + + return parse(match[2] + + '.' + (match[3] || '0') + + '.' + (match[4] || '0'), options) +} diff --git a/node_modules/make-dir/package.json b/node_modules/make-dir/package.json new file mode 100644 index 0000000..98403ea --- /dev/null +++ b/node_modules/make-dir/package.json @@ -0,0 +1,59 @@ +{ + "name": "make-dir", + "version": "3.1.0", + "description": "Make a directory and its parents if needed - Think `mkdir -p`", + "license": "MIT", + "repository": "sindresorhus/make-dir", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && nyc ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "mkdir", + "mkdirp", + "make", + "directories", + "dir", + "dirs", + "folders", + "directory", + "folder", + "path", + "parent", + "parents", + "intermediate", + "recursively", + "recursive", + "create", + "fs", + "filesystem", + "file-system" + ], + "dependencies": { + "semver": "^6.0.0" + }, + "devDependencies": { + "@types/graceful-fs": "^4.1.3", + "@types/node": "^13.7.1", + "ava": "^1.4.0", + "codecov": "^3.2.0", + "graceful-fs": "^4.1.15", + "nyc": "^15.0.0", + "path-type": "^4.0.0", + "tempy": "^0.2.1", + "tsd": "^0.11.0", + "xo": "^0.25.4" + } +} diff --git a/node_modules/make-dir/readme.md b/node_modules/make-dir/readme.md new file mode 100644 index 0000000..a10a1a4 --- /dev/null +++ b/node_modules/make-dir/readme.md @@ -0,0 +1,125 @@ +# make-dir [![Build Status](https://travis-ci.org/sindresorhus/make-dir.svg?branch=master)](https://travis-ci.org/sindresorhus/make-dir) [![codecov](https://codecov.io/gh/sindresorhus/make-dir/branch/master/graph/badge.svg)](https://codecov.io/gh/sindresorhus/make-dir) + +> Make a directory and its parents if needed - Think `mkdir -p` + +## Advantages over [`mkdirp`](https://github.com/substack/node-mkdirp) + +- Promise API *(Async/await ready!)* +- Fixes many `mkdirp` issues: [#96](https://github.com/substack/node-mkdirp/pull/96) [#70](https://github.com/substack/node-mkdirp/issues/70) [#66](https://github.com/substack/node-mkdirp/issues/66) +- 100% test coverage +- CI-tested on macOS, Linux, and Windows +- Actively maintained +- Doesn't bundle a CLI +- Uses the native `fs.mkdir/mkdirSync` [`recursive` option](https://nodejs.org/dist/latest/docs/api/fs.html#fs_fs_mkdir_path_options_callback) in Node.js >=10.12.0 unless [overridden](#fs) + +## Install + +``` +$ npm install make-dir +``` + +## Usage + +``` +$ pwd +/Users/sindresorhus/fun +$ tree +. +``` + +```js +const makeDir = require('make-dir'); + +(async () => { + const path = await makeDir('unicorn/rainbow/cake'); + + console.log(path); + //=> '/Users/sindresorhus/fun/unicorn/rainbow/cake' +})(); +``` + +``` +$ tree +. +└── unicorn + └── rainbow + └── cake +``` + +Multiple directories: + +```js +const makeDir = require('make-dir'); + +(async () => { + const paths = await Promise.all([ + makeDir('unicorn/rainbow'), + makeDir('foo/bar') + ]); + + console.log(paths); + /* + [ + '/Users/sindresorhus/fun/unicorn/rainbow', + '/Users/sindresorhus/fun/foo/bar' + ] + */ +})(); +``` + +## API + +### makeDir(path, options?) + +Returns a `Promise` for the path to the created directory. + +### makeDir.sync(path, options?) + +Returns the path to the created directory. + +#### path + +Type: `string` + +Directory to create. + +#### options + +Type: `object` + +##### mode + +Type: `integer`\ +Default: `0o777` + +Directory [permissions](https://x-team.com/blog/file-system-permissions-umask-node-js/). + +##### fs + +Type: `object`\ +Default: `require('fs')` + +Use a custom `fs` implementation. For example [`graceful-fs`](https://github.com/isaacs/node-graceful-fs). + +Using a custom `fs` implementation will block the use of the native `recursive` option if `fs.mkdir` or `fs.mkdirSync` is not the native function. + +## Related + +- [make-dir-cli](https://github.com/sindresorhus/make-dir-cli) - CLI for this module +- [del](https://github.com/sindresorhus/del) - Delete files and directories +- [globby](https://github.com/sindresorhus/globby) - User-friendly glob matching +- [cpy](https://github.com/sindresorhus/cpy) - Copy files +- [cpy-cli](https://github.com/sindresorhus/cpy-cli) - Copy files on the command-line +- [move-file](https://github.com/sindresorhus/move-file) - Move a file + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/minimatch/LICENSE b/node_modules/minimatch/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/minimatch/README.md b/node_modules/minimatch/README.md new file mode 100644 index 0000000..33ede1d --- /dev/null +++ b/node_modules/minimatch/README.md @@ -0,0 +1,230 @@ +# minimatch + +A minimal matching utility. + +[![Build Status](https://travis-ci.org/isaacs/minimatch.svg?branch=master)](http://travis-ci.org/isaacs/minimatch) + + +This is the matching library used internally by npm. + +It works by converting glob expressions into JavaScript `RegExp` +objects. + +## Usage + +```javascript +var minimatch = require("minimatch") + +minimatch("bar.foo", "*.foo") // true! +minimatch("bar.foo", "*.bar") // false! +minimatch("bar.foo", "*.+(bar|foo)", { debug: true }) // true, and noisy! +``` + +## Features + +Supports these glob features: + +* Brace Expansion +* Extended glob matching +* "Globstar" `**` matching + +See: + +* `man sh` +* `man bash` +* `man 3 fnmatch` +* `man 5 gitignore` + +## Minimatch Class + +Create a minimatch object by instantiating the `minimatch.Minimatch` class. + +```javascript +var Minimatch = require("minimatch").Minimatch +var mm = new Minimatch(pattern, options) +``` + +### Properties + +* `pattern` The original pattern the minimatch object represents. +* `options` The options supplied to the constructor. +* `set` A 2-dimensional array of regexp or string expressions. + Each row in the + array corresponds to a brace-expanded pattern. Each item in the row + corresponds to a single path-part. For example, the pattern + `{a,b/c}/d` would expand to a set of patterns like: + + [ [ a, d ] + , [ b, c, d ] ] + + If a portion of the pattern doesn't have any "magic" in it + (that is, it's something like `"foo"` rather than `fo*o?`), then it + will be left as a string rather than converted to a regular + expression. + +* `regexp` Created by the `makeRe` method. A single regular expression + expressing the entire pattern. This is useful in cases where you wish + to use the pattern somewhat like `fnmatch(3)` with `FNM_PATH` enabled. +* `negate` True if the pattern is negated. +* `comment` True if the pattern is a comment. +* `empty` True if the pattern is `""`. + +### Methods + +* `makeRe` Generate the `regexp` member if necessary, and return it. + Will return `false` if the pattern is invalid. +* `match(fname)` Return true if the filename matches the pattern, or + false otherwise. +* `matchOne(fileArray, patternArray, partial)` Take a `/`-split + filename, and match it against a single row in the `regExpSet`. This + method is mainly for internal use, but is exposed so that it can be + used by a glob-walker that needs to avoid excessive filesystem calls. + +All other methods are internal, and will be called as necessary. + +### minimatch(path, pattern, options) + +Main export. Tests a path against the pattern using the options. + +```javascript +var isJS = minimatch(file, "*.js", { matchBase: true }) +``` + +### minimatch.filter(pattern, options) + +Returns a function that tests its +supplied argument, suitable for use with `Array.filter`. Example: + +```javascript +var javascripts = fileList.filter(minimatch.filter("*.js", {matchBase: true})) +``` + +### minimatch.match(list, pattern, options) + +Match against the list of +files, in the style of fnmatch or glob. If nothing is matched, and +options.nonull is set, then return a list containing the pattern itself. + +```javascript +var javascripts = minimatch.match(fileList, "*.js", {matchBase: true})) +``` + +### minimatch.makeRe(pattern, options) + +Make a regular expression object from the pattern. + +## Options + +All options are `false` by default. + +### debug + +Dump a ton of stuff to stderr. + +### nobrace + +Do not expand `{a,b}` and `{1..3}` brace sets. + +### noglobstar + +Disable `**` matching against multiple folder names. + +### dot + +Allow patterns to match filenames starting with a period, even if +the pattern does not explicitly have a period in that spot. + +Note that by default, `a/**/b` will **not** match `a/.d/b`, unless `dot` +is set. + +### noext + +Disable "extglob" style patterns like `+(a|b)`. + +### nocase + +Perform a case-insensitive match. + +### nonull + +When a match is not found by `minimatch.match`, return a list containing +the pattern itself if this option is set. When not set, an empty list +is returned if there are no matches. + +### matchBase + +If set, then patterns without slashes will be matched +against the basename of the path if it contains slashes. For example, +`a?b` would match the path `/xyz/123/acb`, but not `/xyz/acb/123`. + +### nocomment + +Suppress the behavior of treating `#` at the start of a pattern as a +comment. + +### nonegate + +Suppress the behavior of treating a leading `!` character as negation. + +### flipNegate + +Returns from negate expressions the same as if they were not negated. +(Ie, true on a hit, false on a miss.) + +### partial + +Compare a partial path to a pattern. As long as the parts of the path that +are present are not contradicted by the pattern, it will be treated as a +match. This is useful in applications where you're walking through a +folder structure, and don't yet have the full path, but want to ensure that +you do not walk down paths that can never be a match. + +For example, + +```js +minimatch('/a/b', '/a/*/c/d', { partial: true }) // true, might be /a/b/c/d +minimatch('/a/b', '/**/d', { partial: true }) // true, might be /a/b/.../d +minimatch('/x/y/z', '/a/**/z', { partial: true }) // false, because x !== a +``` + +### allowWindowsEscape + +Windows path separator `\` is by default converted to `/`, which +prohibits the usage of `\` as a escape character. This flag skips that +behavior and allows using the escape character. + +## Comparisons to other fnmatch/glob implementations + +While strict compliance with the existing standards is a worthwhile +goal, some discrepancies exist between minimatch and other +implementations, and are intentional. + +If the pattern starts with a `!` character, then it is negated. Set the +`nonegate` flag to suppress this behavior, and treat leading `!` +characters normally. This is perhaps relevant if you wish to start the +pattern with a negative extglob pattern like `!(a|B)`. Multiple `!` +characters at the start of a pattern will negate the pattern multiple +times. + +If a pattern starts with `#`, then it is treated as a comment, and +will not match anything. Use `\#` to match a literal `#` at the +start of a line, or set the `nocomment` flag to suppress this behavior. + +The double-star character `**` is supported by default, unless the +`noglobstar` flag is set. This is supported in the manner of bsdglob +and bash 4.1, where `**` only has special significance if it is the only +thing in a path part. That is, `a/**/b` will match `a/x/y/b`, but +`a/**b` will not. + +If an escaped pattern has no matches, and the `nonull` flag is set, +then minimatch.match returns the pattern as-provided, rather than +interpreting the character escapes. For example, +`minimatch.match([], "\\*a\\?")` will return `"\\*a\\?"` rather than +`"*a?"`. This is akin to setting the `nullglob` option in bash, except +that it does not resolve escaped pattern characters. + +If brace expansion is not disabled, then it is performed before any +other interpretation of the glob pattern. Thus, a pattern like +`+(a|{b),c)}`, which would not be valid in bash or zsh, is expanded +**first** into the set of `+(a|b)` and `+(a|c)`, and those patterns are +checked for validity. Since those two are valid, matching proceeds. diff --git a/node_modules/minimatch/minimatch.js b/node_modules/minimatch/minimatch.js new file mode 100644 index 0000000..fda45ad --- /dev/null +++ b/node_modules/minimatch/minimatch.js @@ -0,0 +1,947 @@ +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = (function () { try { return require('path') } catch (e) {}}()) || { + sep: '/' +} +minimatch.sep = path.sep + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = require('brace-expansion') + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + b = b || {} + var t = {} + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return minimatch + } + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + m.Minimatch.defaults = function defaults (options) { + return orig.defaults(ext(def, options)).Minimatch + } + + m.filter = function filter (pattern, options) { + return orig.filter(pattern, ext(def, options)) + } + + m.defaults = function defaults (options) { + return orig.defaults(ext(def, options)) + } + + m.makeRe = function makeRe (pattern, options) { + return orig.makeRe(pattern, ext(def, options)) + } + + m.braceExpand = function braceExpand (pattern, options) { + return orig.braceExpand(pattern, ext(def, options)) + } + + m.match = function (list, pattern, options) { + return orig.match(list, pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + assertValidPattern(pattern) + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + assertValidPattern(pattern) + + if (!options) options = {} + + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (!options.allowWindowsEscape && path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + this.partial = !!options.partial + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + assertValidPattern(pattern) + + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +var MAX_PATTERN_LENGTH = 1024 * 64 +var assertValidPattern = function (pattern) { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern') + } + + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long') + } +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + assertValidPattern(pattern) + + var options = this.options + + // shortcuts + if (pattern === '**') { + if (!options.noglobstar) + return GLOBSTAR + else + pattern = '*' + } + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break + } + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + /* istanbul ignore next */ + case '/': { + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + } + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' + } + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '[': case '.': case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) /* istanbul ignore next - should be impossible */ { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) /* istanbul ignore next - should be impossible */ { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = function match (f, partial) { + if (typeof partial === 'undefined') partial = this.partial + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + /* istanbul ignore if */ + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true + } + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } + } + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + /* istanbul ignore if */ + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true + } + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + hit = f === p + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else /* istanbul ignore else */ if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + return (fi === fl - 1) && (file[fi] === '') + } + + // should be unreachable. + /* istanbul ignore next */ + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} diff --git a/node_modules/minimatch/package.json b/node_modules/minimatch/package.json new file mode 100644 index 0000000..566efdf --- /dev/null +++ b/node_modules/minimatch/package.json @@ -0,0 +1,33 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me)", + "name": "minimatch", + "description": "a glob matcher in javascript", + "version": "3.1.2", + "publishConfig": { + "tag": "v3-legacy" + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/minimatch.git" + }, + "main": "minimatch.js", + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "engines": { + "node": "*" + }, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "devDependencies": { + "tap": "^15.1.6" + }, + "license": "ISC", + "files": [ + "minimatch.js" + ] +} diff --git a/node_modules/minipass/LICENSE b/node_modules/minipass/LICENSE new file mode 100644 index 0000000..97f8e32 --- /dev/null +++ b/node_modules/minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/minipass/README.md b/node_modules/minipass/README.md new file mode 100644 index 0000000..6108809 --- /dev/null +++ b/node_modules/minipass/README.md @@ -0,0 +1,769 @@ +# minipass + +A _very_ minimal implementation of a [PassThrough +stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough) + +[It's very +fast](https://docs.google.com/spreadsheets/d/1K_HR5oh3r80b8WVMWCPPjfuWXUgfkmhlX7FGI6JJ8tY/edit?usp=sharing) +for objects, strings, and buffers. + +Supports `pipe()`ing (including multi-`pipe()` and backpressure +transmission), buffering data until either a `data` event handler +or `pipe()` is added (so you don't lose the first chunk), and +most other cases where PassThrough is a good idea. + +There is a `read()` method, but it's much more efficient to +consume data from this stream via `'data'` events or by calling +`pipe()` into some other stream. Calling `read()` requires the +buffer to be flattened in some cases, which requires copying +memory. + +If you set `objectMode: true` in the options, then whatever is +written will be emitted. Otherwise, it'll do a minimal amount of +Buffer copying to ensure proper Streams semantics when `read(n)` +is called. + +`objectMode` can also be set by doing `stream.objectMode = true`, +or by writing any non-string/non-buffer data. `objectMode` cannot +be set to false once it is set. + +This is not a `through` or `through2` stream. It doesn't +transform the data, it just passes it right through. If you want +to transform the data, extend the class, and override the +`write()` method. Once you're done transforming the data however +you want, call `super.write()` with the transform output. + +For some examples of streams that extend Minipass in various +ways, check out: + +- [minizlib](http://npm.im/minizlib) +- [fs-minipass](http://npm.im/fs-minipass) +- [tar](http://npm.im/tar) +- [minipass-collect](http://npm.im/minipass-collect) +- [minipass-flush](http://npm.im/minipass-flush) +- [minipass-pipeline](http://npm.im/minipass-pipeline) +- [tap](http://npm.im/tap) +- [tap-parser](http://npm.im/tap-parser) +- [treport](http://npm.im/treport) +- [minipass-fetch](http://npm.im/minipass-fetch) +- [pacote](http://npm.im/pacote) +- [make-fetch-happen](http://npm.im/make-fetch-happen) +- [cacache](http://npm.im/cacache) +- [ssri](http://npm.im/ssri) +- [npm-registry-fetch](http://npm.im/npm-registry-fetch) +- [minipass-json-stream](http://npm.im/minipass-json-stream) +- [minipass-sized](http://npm.im/minipass-sized) + +## Differences from Node.js Streams + +There are several things that make Minipass streams different +from (and in some ways superior to) Node.js core streams. + +Please read these caveats if you are familiar with node-core +streams and intend to use Minipass streams in your programs. + +You can avoid most of these differences entirely (for a very +small performance penalty) by setting `{async: true}` in the +constructor options. + +### Timing + +Minipass streams are designed to support synchronous use-cases. +Thus, data is emitted as soon as it is available, always. It is +buffered until read, but no longer. Another way to look at it is +that Minipass streams are exactly as synchronous as the logic +that writes into them. + +This can be surprising if your code relies on +`PassThrough.write()` always providing data on the next tick +rather than the current one, or being able to call `resume()` and +not have the entire buffer disappear immediately. + +However, without this synchronicity guarantee, there would be no +way for Minipass to achieve the speeds it does, or support the +synchronous use cases that it does. Simply put, waiting takes +time. + +This non-deferring approach makes Minipass streams much easier to +reason about, especially in the context of Promises and other +flow-control mechanisms. + +Example: + +```js +// hybrid module, either works +import { Minipass } from 'minipass' +// or: +const { Minipass } = require('minipass') + +const stream = new Minipass() +stream.on('data', () => console.log('data event')) +console.log('before write') +stream.write('hello') +console.log('after write') +// output: +// before write +// data event +// after write +``` + +### Exception: Async Opt-In + +If you wish to have a Minipass stream with behavior that more +closely mimics Node.js core streams, you can set the stream in +async mode either by setting `async: true` in the constructor +options, or by setting `stream.async = true` later on. + +```js +// hybrid module, either works +import { Minipass } from 'minipass' +// or: +const { Minipass } = require('minipass') + +const asyncStream = new Minipass({ async: true }) +asyncStream.on('data', () => console.log('data event')) +console.log('before write') +asyncStream.write('hello') +console.log('after write') +// output: +// before write +// after write +// data event <-- this is deferred until the next tick +``` + +Switching _out_ of async mode is unsafe, as it could cause data +corruption, and so is not enabled. Example: + +```js +import { Minipass } from 'minipass' +const stream = new Minipass({ encoding: 'utf8' }) +stream.on('data', chunk => console.log(chunk)) +stream.async = true +console.log('before writes') +stream.write('hello') +setStreamSyncAgainSomehow(stream) // <-- this doesn't actually exist! +stream.write('world') +console.log('after writes') +// hypothetical output would be: +// before writes +// world +// after writes +// hello +// NOT GOOD! +``` + +To avoid this problem, once set into async mode, any attempt to +make the stream sync again will be ignored. + +```js +const { Minipass } = require('minipass') +const stream = new Minipass({ encoding: 'utf8' }) +stream.on('data', chunk => console.log(chunk)) +stream.async = true +console.log('before writes') +stream.write('hello') +stream.async = false // <-- no-op, stream already async +stream.write('world') +console.log('after writes') +// actual output: +// before writes +// after writes +// hello +// world +``` + +### No High/Low Water Marks + +Node.js core streams will optimistically fill up a buffer, +returning `true` on all writes until the limit is hit, even if +the data has nowhere to go. Then, they will not attempt to draw +more data in until the buffer size dips below a minimum value. + +Minipass streams are much simpler. The `write()` method will +return `true` if the data has somewhere to go (which is to say, +given the timing guarantees, that the data is already there by +the time `write()` returns). + +If the data has nowhere to go, then `write()` returns false, and +the data sits in a buffer, to be drained out immediately as soon +as anyone consumes it. + +Since nothing is ever buffered unnecessarily, there is much less +copying data, and less bookkeeping about buffer capacity levels. + +### Hazards of Buffering (or: Why Minipass Is So Fast) + +Since data written to a Minipass stream is immediately written +all the way through the pipeline, and `write()` always returns +true/false based on whether the data was fully flushed, +backpressure is communicated immediately to the upstream caller. +This minimizes buffering. + +Consider this case: + +```js +const { PassThrough } = require('stream') +const p1 = new PassThrough({ highWaterMark: 1024 }) +const p2 = new PassThrough({ highWaterMark: 1024 }) +const p3 = new PassThrough({ highWaterMark: 1024 }) +const p4 = new PassThrough({ highWaterMark: 1024 }) + +p1.pipe(p2).pipe(p3).pipe(p4) +p4.on('data', () => console.log('made it through')) + +// this returns false and buffers, then writes to p2 on next tick (1) +// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2) +// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3) +// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain' +// on next tick (4) +// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and +// 'drain' on next tick (5) +// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6) +// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next +// tick (7) + +p1.write(Buffer.alloc(2048)) // returns false +``` + +Along the way, the data was buffered and deferred at each stage, +and multiple event deferrals happened, for an unblocked pipeline +where it was perfectly safe to write all the way through! + +Furthermore, setting a `highWaterMark` of `1024` might lead +someone reading the code to think an advisory maximum of 1KiB is +being set for the pipeline. However, the actual advisory +buffering level is the _sum_ of `highWaterMark` values, since +each one has its own bucket. + +Consider the Minipass case: + +```js +const m1 = new Minipass() +const m2 = new Minipass() +const m3 = new Minipass() +const m4 = new Minipass() + +m1.pipe(m2).pipe(m3).pipe(m4) +m4.on('data', () => console.log('made it through')) + +// m1 is flowing, so it writes the data to m2 immediately +// m2 is flowing, so it writes the data to m3 immediately +// m3 is flowing, so it writes the data to m4 immediately +// m4 is flowing, so it fires the 'data' event immediately, returns true +// m4's write returned true, so m3 is still flowing, returns true +// m3's write returned true, so m2 is still flowing, returns true +// m2's write returned true, so m1 is still flowing, returns true +// No event deferrals or buffering along the way! + +m1.write(Buffer.alloc(2048)) // returns true +``` + +It is extremely unlikely that you _don't_ want to buffer any data +written, or _ever_ buffer data that can be flushed all the way +through. Neither node-core streams nor Minipass ever fail to +buffer written data, but node-core streams do a lot of +unnecessary buffering and pausing. + +As always, the faster implementation is the one that does less +stuff and waits less time to do it. + +### Immediately emit `end` for empty streams (when not paused) + +If a stream is not paused, and `end()` is called before writing +any data into it, then it will emit `end` immediately. + +If you have logic that occurs on the `end` event which you don't +want to potentially happen immediately (for example, closing file +descriptors, moving on to the next entry in an archive parse +stream, etc.) then be sure to call `stream.pause()` on creation, +and then `stream.resume()` once you are ready to respond to the +`end` event. + +However, this is _usually_ not a problem because: + +### Emit `end` When Asked + +One hazard of immediately emitting `'end'` is that you may not +yet have had a chance to add a listener. In order to avoid this +hazard, Minipass streams safely re-emit the `'end'` event if a +new listener is added after `'end'` has been emitted. + +Ie, if you do `stream.on('end', someFunction)`, and the stream +has already emitted `end`, then it will call the handler right +away. (You can think of this somewhat like attaching a new +`.then(fn)` to a previously-resolved Promise.) + +To prevent calling handlers multiple times who would not expect +multiple ends to occur, all listeners are removed from the +`'end'` event whenever it is emitted. + +### Emit `error` When Asked + +The most recent error object passed to the `'error'` event is +stored on the stream. If a new `'error'` event handler is added, +and an error was previously emitted, then the event handler will +be called immediately (or on `process.nextTick` in the case of +async streams). + +This makes it much more difficult to end up trying to interact +with a broken stream, if the error handler is added after an +error was previously emitted. + +### Impact of "immediate flow" on Tee-streams + +A "tee stream" is a stream piping to multiple destinations: + +```js +const tee = new Minipass() +t.pipe(dest1) +t.pipe(dest2) +t.write('foo') // goes to both destinations +``` + +Since Minipass streams _immediately_ process any pending data +through the pipeline when a new pipe destination is added, this +can have surprising effects, especially when a stream comes in +from some other function and may or may not have data in its +buffer. + +```js +// WARNING! WILL LOSE DATA! +const src = new Minipass() +src.write('foo') +src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone +src.pipe(dest2) // gets nothing! +``` + +One solution is to create a dedicated tee-stream junction that +pipes to both locations, and then pipe to _that_ instead. + +```js +// Safe example: tee to both places +const src = new Minipass() +src.write('foo') +const tee = new Minipass() +tee.pipe(dest1) +tee.pipe(dest2) +src.pipe(tee) // tee gets 'foo', pipes to both locations +``` + +The same caveat applies to `on('data')` event listeners. The +first one added will _immediately_ receive all of the data, +leaving nothing for the second: + +```js +// WARNING! WILL LOSE DATA! +const src = new Minipass() +src.write('foo') +src.on('data', handler1) // receives 'foo' right away +src.on('data', handler2) // nothing to see here! +``` + +Using a dedicated tee-stream can be used in this case as well: + +```js +// Safe example: tee to both data handlers +const src = new Minipass() +src.write('foo') +const tee = new Minipass() +tee.on('data', handler1) +tee.on('data', handler2) +src.pipe(tee) +``` + +All of the hazards in this section are avoided by setting `{ +async: true }` in the Minipass constructor, or by setting +`stream.async = true` afterwards. Note that this does add some +overhead, so should only be done in cases where you are willing +to lose a bit of performance in order to avoid having to refactor +program logic. + +## USAGE + +It's a stream! Use it like a stream and it'll most likely do what +you want. + +```js +import { Minipass } from 'minipass' +const mp = new Minipass(options) // optional: { encoding, objectMode } +mp.write('foo') +mp.pipe(someOtherStream) +mp.end('bar') +``` + +### OPTIONS + +- `encoding` How would you like the data coming _out_ of the + stream to be encoded? Accepts any values that can be passed to + `Buffer.toString()`. +- `objectMode` Emit data exactly as it comes in. This will be + flipped on by default if you write() something other than a + string or Buffer at any point. Setting `objectMode: true` will + prevent setting any encoding value. +- `async` Defaults to `false`. Set to `true` to defer data + emission until next tick. This reduces performance slightly, + but makes Minipass streams use timing behavior closer to Node + core streams. See [Timing](#timing) for more details. +- `signal` An `AbortSignal` that will cause the stream to unhook + itself from everything and become as inert as possible. Note + that providing a `signal` parameter will make `'error'` events + no longer throw if they are unhandled, but they will still be + emitted to handlers if any are attached. + +### API + +Implements the user-facing portions of Node.js's `Readable` and +`Writable` streams. + +### Methods + +- `write(chunk, [encoding], [callback])` - Put data in. (Note + that, in the base Minipass class, the same data will come out.) + Returns `false` if the stream will buffer the next write, or + true if it's still in "flowing" mode. +- `end([chunk, [encoding]], [callback])` - Signal that you have + no more data to write. This will queue an `end` event to be + fired when all the data has been consumed. +- `setEncoding(encoding)` - Set the encoding for data coming of + the stream. This can only be done once. +- `pause()` - No more data for a while, please. This also + prevents `end` from being emitted for empty streams until the + stream is resumed. +- `resume()` - Resume the stream. If there's data in the buffer, + it is all discarded. Any buffered events are immediately + emitted. +- `pipe(dest)` - Send all output to the stream provided. When + data is emitted, it is immediately written to any and all pipe + destinations. (Or written on next tick in `async` mode.) +- `unpipe(dest)` - Stop piping to the destination stream. This is + immediate, meaning that any asynchronously queued data will + _not_ make it to the destination when running in `async` mode. + - `options.end` - Boolean, end the destination stream when the + source stream ends. Default `true`. + - `options.proxyErrors` - Boolean, proxy `error` events from + the source stream to the destination stream. Note that errors + are _not_ proxied after the pipeline terminates, either due + to the source emitting `'end'` or manually unpiping with + `src.unpipe(dest)`. Default `false`. +- `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are + EventEmitters. Some events are given special treatment, + however. (See below under "events".) +- `promise()` - Returns a Promise that resolves when the stream + emits `end`, or rejects if the stream emits `error`. +- `collect()` - Return a Promise that resolves on `end` with an + array containing each chunk of data that was emitted, or + rejects if the stream emits `error`. Note that this consumes + the stream data. +- `concat()` - Same as `collect()`, but concatenates the data + into a single Buffer object. Will reject the returned promise + if the stream is in objectMode, or if it goes into objectMode + by the end of the data. +- `read(n)` - Consume `n` bytes of data out of the buffer. If `n` + is not provided, then consume all of it. If `n` bytes are not + available, then it returns null. **Note** consuming streams in + this way is less efficient, and can lead to unnecessary Buffer + copying. +- `destroy([er])` - Destroy the stream. If an error is provided, + then an `'error'` event is emitted. If the stream has a + `close()` method, and has not emitted a `'close'` event yet, + then `stream.close()` will be called. Any Promises returned by + `.promise()`, `.collect()` or `.concat()` will be rejected. + After being destroyed, writing to the stream will emit an + error. No more data will be emitted if the stream is destroyed, + even if it was previously buffered. + +### Properties + +- `bufferLength` Read-only. Total number of bytes buffered, or in + the case of objectMode, the total number of objects. +- `encoding` The encoding that has been set. (Setting this is + equivalent to calling `setEncoding(enc)` and has the same + prohibition against setting multiple times.) +- `flowing` Read-only. Boolean indicating whether a chunk written + to the stream will be immediately emitted. +- `emittedEnd` Read-only. Boolean indicating whether the end-ish + events (ie, `end`, `prefinish`, `finish`) have been emitted. + Note that listening on any end-ish event will immediateyl + re-emit it if it has already been emitted. +- `writable` Whether the stream is writable. Default `true`. Set + to `false` when `end()` +- `readable` Whether the stream is readable. Default `true`. +- `pipes` An array of Pipe objects referencing streams that this + stream is piping into. +- `destroyed` A getter that indicates whether the stream was + destroyed. +- `paused` True if the stream has been explicitly paused, + otherwise false. +- `objectMode` Indicates whether the stream is in `objectMode`. + Once set to `true`, it cannot be set to `false`. +- `aborted` Readonly property set when the `AbortSignal` + dispatches an `abort` event. + +### Events + +- `data` Emitted when there's data to read. Argument is the data + to read. This is never emitted while not flowing. If a listener + is attached, that will resume the stream. +- `end` Emitted when there's no more data to read. This will be + emitted immediately for empty streams when `end()` is called. + If a listener is attached, and `end` was already emitted, then + it will be emitted again. All listeners are removed when `end` + is emitted. +- `prefinish` An end-ish event that follows the same logic as + `end` and is emitted in the same conditions where `end` is + emitted. Emitted after `'end'`. +- `finish` An end-ish event that follows the same logic as `end` + and is emitted in the same conditions where `end` is emitted. + Emitted after `'prefinish'`. +- `close` An indication that an underlying resource has been + released. Minipass does not emit this event, but will defer it + until after `end` has been emitted, since it throws off some + stream libraries otherwise. +- `drain` Emitted when the internal buffer empties, and it is + again suitable to `write()` into the stream. +- `readable` Emitted when data is buffered and ready to be read + by a consumer. +- `resume` Emitted when stream changes state from buffering to + flowing mode. (Ie, when `resume` is called, `pipe` is called, + or a `data` event listener is added.) + +### Static Methods + +- `Minipass.isStream(stream)` Returns `true` if the argument is a + stream, and false otherwise. To be considered a stream, the + object must be either an instance of Minipass, or an + EventEmitter that has either a `pipe()` method, or both + `write()` and `end()` methods. (Pretty much any stream in + node-land will return `true` for this.) + +## EXAMPLES + +Here are some examples of things you can do with Minipass +streams. + +### simple "are you done yet" promise + +```js +mp.promise().then( + () => { + // stream is finished + }, + er => { + // stream emitted an error + } +) +``` + +### collecting + +```js +mp.collect().then(all => { + // all is an array of all the data emitted + // encoding is supported in this case, so + // so the result will be a collection of strings if + // an encoding is specified, or buffers/objects if not. + // + // In an async function, you may do + // const data = await stream.collect() +}) +``` + +### collecting into a single blob + +This is a bit slower because it concatenates the data into one +chunk for you, but if you're going to do it yourself anyway, it's +convenient this way: + +```js +mp.concat().then(onebigchunk => { + // onebigchunk is a string if the stream + // had an encoding set, or a buffer otherwise. +}) +``` + +### iteration + +You can iterate over streams synchronously or asynchronously in +platforms that support it. + +Synchronous iteration will end when the currently available data +is consumed, even if the `end` event has not been reached. In +string and buffer mode, the data is concatenated, so unless +multiple writes are occurring in the same tick as the `read()`, +sync iteration loops will generally only have a single iteration. + +To consume chunks in this way exactly as they have been written, +with no flattening, create the stream with the `{ objectMode: +true }` option. + +```js +const mp = new Minipass({ objectMode: true }) +mp.write('a') +mp.write('b') +for (let letter of mp) { + console.log(letter) // a, b +} +mp.write('c') +mp.write('d') +for (let letter of mp) { + console.log(letter) // c, d +} +mp.write('e') +mp.end() +for (let letter of mp) { + console.log(letter) // e +} +for (let letter of mp) { + console.log(letter) // nothing +} +``` + +Asynchronous iteration will continue until the end event is reached, +consuming all of the data. + +```js +const mp = new Minipass({ encoding: 'utf8' }) + +// some source of some data +let i = 5 +const inter = setInterval(() => { + if (i-- > 0) mp.write(Buffer.from('foo\n', 'utf8')) + else { + mp.end() + clearInterval(inter) + } +}, 100) + +// consume the data with asynchronous iteration +async function consume() { + for await (let chunk of mp) { + console.log(chunk) + } + return 'ok' +} + +consume().then(res => console.log(res)) +// logs `foo\n` 5 times, and then `ok` +``` + +### subclass that `console.log()`s everything written into it + +```js +class Logger extends Minipass { + write(chunk, encoding, callback) { + console.log('WRITE', chunk, encoding) + return super.write(chunk, encoding, callback) + } + end(chunk, encoding, callback) { + console.log('END', chunk, encoding) + return super.end(chunk, encoding, callback) + } +} + +someSource.pipe(new Logger()).pipe(someDest) +``` + +### same thing, but using an inline anonymous class + +```js +// js classes are fun +someSource + .pipe( + new (class extends Minipass { + emit(ev, ...data) { + // let's also log events, because debugging some weird thing + console.log('EMIT', ev) + return super.emit(ev, ...data) + } + write(chunk, encoding, callback) { + console.log('WRITE', chunk, encoding) + return super.write(chunk, encoding, callback) + } + end(chunk, encoding, callback) { + console.log('END', chunk, encoding) + return super.end(chunk, encoding, callback) + } + })() + ) + .pipe(someDest) +``` + +### subclass that defers 'end' for some reason + +```js +class SlowEnd extends Minipass { + emit(ev, ...args) { + if (ev === 'end') { + console.log('going to end, hold on a sec') + setTimeout(() => { + console.log('ok, ready to end now') + super.emit('end', ...args) + }, 100) + } else { + return super.emit(ev, ...args) + } + } +} +``` + +### transform that creates newline-delimited JSON + +```js +class NDJSONEncode extends Minipass { + write(obj, cb) { + try { + // JSON.stringify can throw, emit an error on that + return super.write(JSON.stringify(obj) + '\n', 'utf8', cb) + } catch (er) { + this.emit('error', er) + } + } + end(obj, cb) { + if (typeof obj === 'function') { + cb = obj + obj = undefined + } + if (obj !== undefined) { + this.write(obj) + } + return super.end(cb) + } +} +``` + +### transform that parses newline-delimited JSON + +```js +class NDJSONDecode extends Minipass { + constructor (options) { + // always be in object mode, as far as Minipass is concerned + super({ objectMode: true }) + this._jsonBuffer = '' + } + write (chunk, encoding, cb) { + if (typeof chunk === 'string' && + typeof encoding === 'string' && + encoding !== 'utf8') { + chunk = Buffer.from(chunk, encoding).toString() + } else if (Buffer.isBuffer(chunk)) { + chunk = chunk.toString() + } + if (typeof encoding === 'function') { + cb = encoding + } + const jsonData = (this._jsonBuffer + chunk).split('\n') + this._jsonBuffer = jsonData.pop() + for (let i = 0; i < jsonData.length; i++) { + try { + // JSON.parse can throw, emit an error on that + super.write(JSON.parse(jsonData[i])) + } catch (er) { + this.emit('error', er) + continue + } + } + if (cb) + cb() + } +} +``` diff --git a/node_modules/minipass/index.d.ts b/node_modules/minipass/index.d.ts new file mode 100644 index 0000000..86851f9 --- /dev/null +++ b/node_modules/minipass/index.d.ts @@ -0,0 +1,152 @@ +/// + +// Note: marking anything protected or private in the exported +// class will limit Minipass's ability to be used as the base +// for mixin classes. +import { EventEmitter } from 'events' +import { Stream } from 'stream' + +export namespace Minipass { + export type Encoding = BufferEncoding | 'buffer' | null + + export interface Writable extends EventEmitter { + end(): any + write(chunk: any, ...args: any[]): any + } + + export interface Readable extends EventEmitter { + pause(): any + resume(): any + pipe(): any + } + + export type DualIterable = Iterable & AsyncIterable + + export type ContiguousData = + | Buffer + | ArrayBufferLike + | ArrayBufferView + | string + + export type BufferOrString = Buffer | string + + export interface SharedOptions { + async?: boolean + signal?: AbortSignal + } + + export interface StringOptions extends SharedOptions { + encoding: BufferEncoding + objectMode?: boolean + } + + export interface BufferOptions extends SharedOptions { + encoding?: null | 'buffer' + objectMode?: boolean + } + + export interface ObjectModeOptions extends SharedOptions { + objectMode: true + } + + export interface PipeOptions { + end?: boolean + proxyErrors?: boolean + } + + export type Options = T extends string + ? StringOptions + : T extends Buffer + ? BufferOptions + : ObjectModeOptions +} + +export class Minipass< + RType extends any = Buffer, + WType extends any = RType extends Minipass.BufferOrString + ? Minipass.ContiguousData + : RType + > + extends Stream + implements Minipass.DualIterable +{ + static isStream(stream: any): stream is Minipass.Readable | Minipass.Writable + + readonly bufferLength: number + readonly flowing: boolean + readonly writable: boolean + readonly readable: boolean + readonly aborted: boolean + readonly paused: boolean + readonly emittedEnd: boolean + readonly destroyed: boolean + + /** + * Technically writable, but mutating it can change the type, + * so is not safe to do in TypeScript. + */ + readonly objectMode: boolean + async: boolean + + /** + * Note: encoding is not actually read-only, and setEncoding(enc) + * exists. However, this type definition will insist that TypeScript + * programs declare the type of a Minipass stream up front, and if + * that type is string, then an encoding MUST be set in the ctor. If + * the type is Buffer, then the encoding must be missing, or set to + * 'buffer' or null. If the type is anything else, then objectMode + * must be set in the constructor options. So there is effectively + * no allowed way that a TS program can set the encoding after + * construction, as doing so will destroy any hope of type safety. + * TypeScript does not provide many options for changing the type of + * an object at run-time, which is what changing the encoding does. + */ + readonly encoding: Minipass.Encoding + // setEncoding(encoding: Encoding): void + + // Options required if not reading buffers + constructor( + ...args: RType extends Buffer + ? [] | [Minipass.Options] + : [Minipass.Options] + ) + + write(chunk: WType, cb?: () => void): boolean + write(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): boolean + read(size?: number): RType + end(cb?: () => void): this + end(chunk: any, cb?: () => void): this + end(chunk: any, encoding?: Minipass.Encoding, cb?: () => void): this + pause(): void + resume(): void + promise(): Promise + collect(): Promise + + concat(): RType extends Minipass.BufferOrString ? Promise : never + destroy(er?: any): void + pipe(dest: W, opts?: Minipass.PipeOptions): W + unpipe(dest: W): void + + /** + * alias for on() + */ + addEventHandler(event: string, listener: (...args: any[]) => any): this + + on(event: string, listener: (...args: any[]) => any): this + on(event: 'data', listener: (chunk: RType) => any): this + on(event: 'error', listener: (error: any) => any): this + on( + event: + | 'readable' + | 'drain' + | 'resume' + | 'end' + | 'prefinish' + | 'finish' + | 'close', + listener: () => any + ): this + + [Symbol.iterator](): Generator + [Symbol.asyncIterator](): AsyncGenerator +} diff --git a/node_modules/minipass/index.js b/node_modules/minipass/index.js new file mode 100644 index 0000000..ed07c17 --- /dev/null +++ b/node_modules/minipass/index.js @@ -0,0 +1,702 @@ +'use strict' +const proc = + typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + } +const EE = require('events') +const Stream = require('stream') +const stringdecoder = require('string_decoder') +const SD = stringdecoder.StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFER = Symbol('buffer') +const PIPES = Symbol('pipes') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed') +// internal event when stream has an error +const ERROR = Symbol('error') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = + (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') +const ITERATOR = + (doIter && Symbol.iterator) || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' + +const isArrayBuffer = b => + b instanceof ArrayBuffer || + (typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0) + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor(src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe() { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors() {} + end() { + this.unpipe() + if (this.opts.end) this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor(src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +class Minipass extends Stream { + constructor(options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this[PIPES] = [] + this[BUFFER] = [] + this[OBJECTMODE] = (options && options.objectMode) || false + if (this[OBJECTMODE]) this[ENCODING] = null + else this[ENCODING] = (options && options.encoding) || null + if (this[ENCODING] === 'buffer') this[ENCODING] = null + this[ASYNC] = (options && !!options.async) || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) + } + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) + } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } + } + + get bufferLength() { + return this[BUFFERLENGTH] + } + + get encoding() { + return this[ENCODING] + } + set encoding(enc) { + if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + + if ( + this[ENCODING] && + enc !== this[ENCODING] && + ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) + ) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this[BUFFER].length) + this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding(enc) { + this.encoding = enc + } + + get objectMode() { + return this[OBJECTMODE] + } + set objectMode(om) { + this[OBJECTMODE] = this[OBJECTMODE] || !!om + } + + get ['async']() { + return this[ASYNC] + } + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a + } + + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + + write(chunk, encoding, cb) { + if (this[ABORTED]) return false + if (this[EOF]) throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit( + 'error', + Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + ) + ) + return true + } + + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + + if (!encoding) encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + if (cb) fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if ( + typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed) + ) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + read(n) { + if (this[DESTROYED]) return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) n = null + + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] + else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this[BUFFER][0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ](n, chunk) { + if (n === chunk.length || n === null) this[BUFFERSHIFT]() + else { + this[BUFFER][0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + + return chunk + } + + end(chunk, encoding, cb) { + if (typeof chunk === 'function') (cb = chunk), (chunk = null) + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + if (chunk) this.write(chunk, encoding) + if (cb) this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this[BUFFER].length) this[FLUSH]() + else if (this[EOF]) this[MAYBE_EMIT_END]() + else this.emit('drain') + } + + resume() { + return this[RESUME]() + } + + pause() { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed() { + return this[DESTROYED] + } + + get flowing() { + return this[FLOWING] + } + + get paused() { + return this[PAUSED] + } + + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 + else this[BUFFERLENGTH] += chunk.length + this[BUFFER].push(chunk) + } + + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length + return this[BUFFER].shift() + } + + [FLUSH](noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) + + if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') + } + + [FLUSHCHUNK](chunk) { + this.emit('data', chunk) + return this.flowing + } + + pipe(dest, opts) { + if (this[DESTROYED]) return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) opts.end = false + else opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) dest.end() + } else { + this[PIPES].push( + !opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts) + ) + if (this[ASYNC]) defer(() => this[RESUME]()) + else this[RESUME]() + } + + return dest + } + + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest) + if (p) { + this[PIPES].splice(this[PIPES].indexOf(p), 1) + p.unpipe() + } + } + + addListener(ev, fn) { + return this.on(ev, fn) + } + + on(ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) + else fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd() { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END]() { + if ( + !this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF] + ) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) this.emit('close') + this[EMITTING_END] = false + } + } + + emit(ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + super.emit(ERROR, data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND]() { + if (this[EMITTED_END]) return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) defer(() => this[EMITEND2]()) + else this[EMITEND2]() + } + + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this[PIPES]) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect() { + const buf = [] + if (!this[OBJECTMODE]) buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat() { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength) + ) + } + + // stream.promise().then(() => done, er => emitted error) + promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + stopped = true + return Promise.resolve({ done: true }) + } + const next = () => { + if (stopped) return stop() + const res = this.read() + if (res !== null) return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) return stop() + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + stop() + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + this.removeListener(DESTROYED, ondestroy) + stop() + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { + next, + throw: stop, + return: stop, + [ASYNCITERATOR]() { + return this + }, + } + } + + // for (let chunk of stream) + [ITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + this.removeListener(ERROR, stop) + this.removeListener(DESTROYED, stop) + this.removeListener('end', stop) + stopped = true + return { done: true } + } + + const next = () => { + if (stopped) return stop() + const value = this.read() + return value === null ? stop() : { value } + } + this.once('end', stop) + this.once(ERROR, stop) + this.once(DESTROYED, stop) + + return { + next, + throw: stop, + return: stop, + [ITERATOR]() { + return this + }, + } + } + + destroy(er) { + if (this[DESTROYED]) { + if (er) this.emit('error', er) + else this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) this.close() + + if (er) this.emit('error', er) + // if no error to emit, still reject pending promises + else this.emit(DESTROYED) + + return this + } + + static isStream(s) { + return ( + !!s && + (s instanceof Minipass || + s instanceof Stream || + (s instanceof EE && + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) + ) + } +} + +exports.Minipass = Minipass diff --git a/node_modules/minipass/index.mjs b/node_modules/minipass/index.mjs new file mode 100644 index 0000000..6ef6cd8 --- /dev/null +++ b/node_modules/minipass/index.mjs @@ -0,0 +1,702 @@ +'use strict' +const proc = + typeof process === 'object' && process + ? process + : { + stdout: null, + stderr: null, + } +import EE from 'events' +import Stream from 'stream' +import stringdecoder from 'string_decoder' +const SD = stringdecoder.StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFER = Symbol('buffer') +const PIPES = Symbol('pipes') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +// internal event when stream is destroyed +const DESTROYED = Symbol('destroyed') +// internal event when stream has an error +const ERROR = Symbol('error') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') +const ABORT = Symbol('abort') +const ABORTED = Symbol('aborted') +const SIGNAL = Symbol('signal') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = + (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented') +const ITERATOR = + (doIter && Symbol.iterator) || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish' + +const isArrayBuffer = b => + b instanceof ArrayBuffer || + (typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0) + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor(src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe() { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors() {} + end() { + this.unpipe() + if (this.opts.end) this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe() { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor(src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +export class Minipass extends Stream { + constructor(options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this[PIPES] = [] + this[BUFFER] = [] + this[OBJECTMODE] = (options && options.objectMode) || false + if (this[OBJECTMODE]) this[ENCODING] = null + else this[ENCODING] = (options && options.encoding) || null + if (this[ENCODING] === 'buffer') this[ENCODING] = null + this[ASYNC] = (options && !!options.async) || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + if (options && options.debugExposeBuffer === true) { + Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] }) + } + if (options && options.debugExposePipes === true) { + Object.defineProperty(this, 'pipes', { get: () => this[PIPES] }) + } + this[SIGNAL] = options && options.signal + this[ABORTED] = false + if (this[SIGNAL]) { + this[SIGNAL].addEventListener('abort', () => this[ABORT]()) + if (this[SIGNAL].aborted) { + this[ABORT]() + } + } + } + + get bufferLength() { + return this[BUFFERLENGTH] + } + + get encoding() { + return this[ENCODING] + } + set encoding(enc) { + if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode') + + if ( + this[ENCODING] && + enc !== this[ENCODING] && + ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH]) + ) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this[BUFFER].length) + this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding(enc) { + this.encoding = enc + } + + get objectMode() { + return this[OBJECTMODE] + } + set objectMode(om) { + this[OBJECTMODE] = this[OBJECTMODE] || !!om + } + + get ['async']() { + return this[ASYNC] + } + set ['async'](a) { + this[ASYNC] = this[ASYNC] || !!a + } + + // drop everything and get out of the flow completely + [ABORT]() { + this[ABORTED] = true + this.emit('abort', this[SIGNAL].reason) + this.destroy(this[SIGNAL].reason) + } + + get aborted() { + return this[ABORTED] + } + set aborted(_) {} + + write(chunk, encoding, cb) { + if (this[ABORTED]) return false + if (this[EOF]) throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit( + 'error', + Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + ) + ) + return true + } + + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + + if (!encoding) encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + if (cb) fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if ( + typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed) + ) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true) + + if (this.flowing) this.emit('data', chunk) + else this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) this.emit('readable') + + if (cb) fn(cb) + + return this.flowing + } + + read(n) { + if (this[DESTROYED]) return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) n = null + + if (this[BUFFER].length > 1 && !this[OBJECTMODE]) { + if (this.encoding) this[BUFFER] = [this[BUFFER].join('')] + else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this[BUFFER][0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ](n, chunk) { + if (n === chunk.length || n === null) this[BUFFERSHIFT]() + else { + this[BUFFER][0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this[BUFFER].length && !this[EOF]) this.emit('drain') + + return chunk + } + + end(chunk, encoding, cb) { + if (typeof chunk === 'function') (cb = chunk), (chunk = null) + if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8') + if (chunk) this.write(chunk, encoding) + if (cb) this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME]() { + if (this[DESTROYED]) return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this[BUFFER].length) this[FLUSH]() + else if (this[EOF]) this[MAYBE_EMIT_END]() + else this.emit('drain') + } + + resume() { + return this[RESUME]() + } + + pause() { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed() { + return this[DESTROYED] + } + + get flowing() { + return this[FLOWING] + } + + get paused() { + return this[PAUSED] + } + + [BUFFERPUSH](chunk) { + if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1 + else this[BUFFERLENGTH] += chunk.length + this[BUFFER].push(chunk) + } + + [BUFFERSHIFT]() { + if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1 + else this[BUFFERLENGTH] -= this[BUFFER][0].length + return this[BUFFER].shift() + } + + [FLUSH](noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length) + + if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain') + } + + [FLUSHCHUNK](chunk) { + this.emit('data', chunk) + return this.flowing + } + + pipe(dest, opts) { + if (this[DESTROYED]) return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) opts.end = false + else opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) dest.end() + } else { + this[PIPES].push( + !opts.proxyErrors + ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts) + ) + if (this[ASYNC]) defer(() => this[RESUME]()) + else this[RESUME]() + } + + return dest + } + + unpipe(dest) { + const p = this[PIPES].find(p => p.dest === dest) + if (p) { + this[PIPES].splice(this[PIPES].indexOf(p), 1) + p.unpipe() + } + } + + addListener(ev, fn) { + return this.on(ev, fn) + } + + on(ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR])) + else fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd() { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END]() { + if ( + !this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this[BUFFER].length === 0 && + this[EOF] + ) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) this.emit('close') + this[EMITTING_END] = false + } + } + + emit(ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !this[OBJECTMODE] && !data + ? false + : this[ASYNC] + ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + super.emit(ERROR, data) + const ret = + !this[SIGNAL] || this.listeners('error').length + ? super.emit('error', data) + : false + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA](data) { + for (const p of this[PIPES]) { + if (p.dest.write(data) === false) this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND]() { + if (this[EMITTED_END]) return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) defer(() => this[EMITEND2]()) + else this[EMITEND2]() + } + + [EMITEND2]() { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this[PIPES]) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this[PIPES]) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect() { + const buf = [] + if (!this[OBJECTMODE]) buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat() { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] + ? buf.join('') + : Buffer.concat(buf, buf.dataLength) + ) + } + + // stream.promise().then(() => done, er => emitted error) + promise() { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + stopped = true + return Promise.resolve({ done: true }) + } + const next = () => { + if (stopped) return stop() + const res = this.read() + if (res !== null) return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) return stop() + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + stop() + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.removeListener(DESTROYED, ondestroy) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + this.removeListener(DESTROYED, ondestroy) + stop() + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { + next, + throw: stop, + return: stop, + [ASYNCITERATOR]() { + return this + }, + } + } + + // for (let chunk of stream) + [ITERATOR]() { + let stopped = false + const stop = () => { + this.pause() + this.removeListener(ERROR, stop) + this.removeListener(DESTROYED, stop) + this.removeListener('end', stop) + stopped = true + return { done: true } + } + + const next = () => { + if (stopped) return stop() + const value = this.read() + return value === null ? stop() : { value } + } + this.once('end', stop) + this.once(ERROR, stop) + this.once(DESTROYED, stop) + + return { + next, + throw: stop, + return: stop, + [ITERATOR]() { + return this + }, + } + } + + destroy(er) { + if (this[DESTROYED]) { + if (er) this.emit('error', er) + else this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this[BUFFER].length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) this.close() + + if (er) this.emit('error', er) + // if no error to emit, still reject pending promises + else this.emit(DESTROYED) + + return this + } + + static isStream(s) { + return ( + !!s && + (s instanceof Minipass || + s instanceof Stream || + (s instanceof EE && + // readable + (typeof s.pipe === 'function' || + // writable + (typeof s.write === 'function' && typeof s.end === 'function')))) + ) + } +} + + diff --git a/node_modules/minipass/package.json b/node_modules/minipass/package.json new file mode 100644 index 0000000..0e20e98 --- /dev/null +++ b/node_modules/minipass/package.json @@ -0,0 +1,76 @@ +{ + "name": "minipass", + "version": "5.0.0", + "description": "minimal implementation of a PassThrough stream", + "main": "./index.js", + "module": "./index.mjs", + "types": "./index.d.ts", + "exports": { + ".": { + "import": { + "types": "./index.d.ts", + "default": "./index.mjs" + }, + "require": { + "types": "./index.d.ts", + "default": "./index.js" + } + }, + "./package.json": "./package.json" + }, + "devDependencies": { + "@types/node": "^17.0.41", + "end-of-stream": "^1.4.0", + "node-abort-controller": "^3.1.1", + "prettier": "^2.6.2", + "tap": "^16.2.0", + "through2": "^2.0.3", + "ts-node": "^10.8.1", + "typedoc": "^0.23.24", + "typescript": "^4.7.3" + }, + "scripts": { + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "node ./scripts/transpile-to-esm.js", + "snap": "tap", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "typedoc": "typedoc ./index.d.ts", + "format": "prettier --write . --loglevel warn" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minipass.git" + }, + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "index.d.ts", + "index.js", + "index.mjs" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">=8" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/node_modules/minizlib/LICENSE b/node_modules/minizlib/LICENSE new file mode 100644 index 0000000..ffce738 --- /dev/null +++ b/node_modules/minizlib/LICENSE @@ -0,0 +1,26 @@ +Minizlib was created by Isaac Z. Schlueter. +It is a derivative work of the Node.js project. + +""" +Copyright Isaac Z. Schlueter and Contributors +Copyright Node.js contributors. All rights reserved. +Copyright Joyent, Inc. and other Node contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +""" diff --git a/node_modules/minizlib/README.md b/node_modules/minizlib/README.md new file mode 100644 index 0000000..80e067a --- /dev/null +++ b/node_modules/minizlib/README.md @@ -0,0 +1,60 @@ +# minizlib + +A fast zlib stream built on [minipass](http://npm.im/minipass) and +Node.js's zlib binding. + +This module was created to serve the needs of +[node-tar](http://npm.im/tar) and +[minipass-fetch](http://npm.im/minipass-fetch). + +Brotli is supported in versions of node with a Brotli binding. + +## How does this differ from the streams in `require('zlib')`? + +First, there are no convenience methods to compress or decompress a +buffer. If you want those, use the built-in `zlib` module. This is +only streams. That being said, Minipass streams to make it fairly easy to +use as one-liners: `new zlib.Deflate().end(data).read()` will return the +deflate compressed result. + +This module compresses and decompresses the data as fast as you feed +it in. It is synchronous, and runs on the main process thread. Zlib +and Brotli operations can be high CPU, but they're very fast, and doing it +this way means much less bookkeeping and artificial deferral. + +Node's built in zlib streams are built on top of `stream.Transform`. +They do the maximally safe thing with respect to consistent +asynchrony, buffering, and backpressure. + +See [Minipass](http://npm.im/minipass) for more on the differences between +Node.js core streams and Minipass streams, and the convenience methods +provided by that class. + +## Classes + +- Deflate +- Inflate +- Gzip +- Gunzip +- DeflateRaw +- InflateRaw +- Unzip +- BrotliCompress (Node v10 and higher) +- BrotliDecompress (Node v10 and higher) + +## USAGE + +```js +const zlib = require('minizlib') +const input = sourceOfCompressedData() +const decode = new zlib.BrotliDecompress() +const output = whereToWriteTheDecodedData() +input.pipe(decode).pipe(output) +``` + +## REPRODUCIBLE BUILDS + +To create reproducible gzip compressed files across different operating +systems, set `portable: true` in the options. This causes minizlib to set +the `OS` indicator in byte 9 of the extended gzip header to `0xFF` for +'unknown'. diff --git a/node_modules/minizlib/constants.js b/node_modules/minizlib/constants.js new file mode 100644 index 0000000..641ebc7 --- /dev/null +++ b/node_modules/minizlib/constants.js @@ -0,0 +1,115 @@ +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +const realZlibConstants = require('zlib').constants || + /* istanbul ignore next */ { ZLIB_VERNUM: 4736 } + +module.exports = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)) diff --git a/node_modules/minizlib/index.js b/node_modules/minizlib/index.js new file mode 100644 index 0000000..fbaf69e --- /dev/null +++ b/node_modules/minizlib/index.js @@ -0,0 +1,348 @@ +'use strict' + +const assert = require('assert') +const Buffer = require('buffer').Buffer +const realZlib = require('zlib') + +const constants = exports.constants = require('./constants.js') +const Minipass = require('minipass') + +const OriginalBufferConcat = Buffer.concat + +const _superWrite = Symbol('_superWrite') +class ZlibError extends Error { + constructor (err) { + super('zlib: ' + err.message) + this.code = err.code + this.errno = err.errno + /* istanbul ignore if */ + if (!this.code) + this.code = 'ZLIB_ERROR' + + this.message = 'zlib: ' + err.message + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'ZlibError' + } +} + +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _opts = Symbol('opts') +const _flushFlag = Symbol('flushFlag') +const _finishFlushFlag = Symbol('finishFlushFlag') +const _fullFlushFlag = Symbol('fullFlushFlag') +const _handle = Symbol('handle') +const _onError = Symbol('onError') +const _sawError = Symbol('sawError') +const _level = Symbol('level') +const _strategy = Symbol('strategy') +const _ended = Symbol('ended') +const _defaultFullFlush = Symbol('_defaultFullFlush') + +class ZlibBase extends Minipass { + constructor (opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor') + + super(opts) + this[_sawError] = false + this[_ended] = false + this[_opts] = opts + + this[_flushFlag] = opts.flush + this[_finishFlushFlag] = opts.finishFlush + // this will throw if any options are invalid for the class selected + try { + this[_handle] = new realZlib[mode](opts) + } catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er) + } + + this[_onError] = (err) => { + // no sense raising multiple errors, since we abort on the first one. + if (this[_sawError]) + return + + this[_sawError] = true + + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close() + this.emit('error', err) + } + + this[_handle].on('error', er => this[_onError](new ZlibError(er))) + this.once('end', () => this.close) + } + + close () { + if (this[_handle]) { + this[_handle].close() + this[_handle] = null + this.emit('close') + } + } + + reset () { + if (!this[_sawError]) { + assert(this[_handle], 'zlib binding closed') + return this[_handle].reset() + } + } + + flush (flushFlag) { + if (this.ended) + return + + if (typeof flushFlag !== 'number') + flushFlag = this[_fullFlushFlag] + this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag })) + } + + end (chunk, encoding, cb) { + if (chunk) + this.write(chunk, encoding) + this.flush(this[_finishFlushFlag]) + this[_ended] = true + return super.end(null, null, cb) + } + + get ended () { + return this[_ended] + } + + write (chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + + if (typeof chunk === 'string') + chunk = Buffer.from(chunk, encoding) + + if (this[_sawError]) + return + assert(this[_handle], 'zlib binding closed') + + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + const nativeHandle = this[_handle]._handle + const originalNativeClose = nativeHandle.close + nativeHandle.close = () => {} + const originalClose = this[_handle].close + this[_handle].close = () => {} + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + Buffer.concat = (args) => args + let result + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] : this[_flushFlag] + result = this[_handle]._processChunk(chunk, flushFlag) + // if we don't throw, reset it back how it was + Buffer.concat = OriginalBufferConcat + } catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + Buffer.concat = OriginalBufferConcat + this[_onError](new ZlibError(err)) + } finally { + if (this[_handle]) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + this[_handle]._handle = nativeHandle + nativeHandle.close = originalNativeClose + this[_handle].close = originalClose + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this[_handle].removeAllListeners('error') + // make sure OUR error listener is still attached tho + } + } + + if (this[_handle]) + this[_handle].on('error', er => this[_onError](new ZlibError(er))) + + let writeReturn + if (result) { + if (Array.isArray(result) && result.length > 0) { + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](Buffer.from(result[0])) + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]) + } + } else { + writeReturn = this[_superWrite](Buffer.from(result)) + } + } + + if (cb) + cb() + return writeReturn + } + + [_superWrite] (data) { + return super.write(data) + } +} + +class Zlib extends ZlibBase { + constructor (opts, mode) { + opts = opts || {} + + opts.flush = opts.flush || constants.Z_NO_FLUSH + opts.finishFlush = opts.finishFlush || constants.Z_FINISH + super(opts, mode) + + this[_fullFlushFlag] = constants.Z_FULL_FLUSH + this[_level] = opts.level + this[_strategy] = opts.strategy + } + + params (level, strategy) { + if (this[_sawError]) + return + + if (!this[_handle]) + throw new Error('cannot switch params when binding is closed') + + // no way to test this without also not supporting params at all + /* istanbul ignore if */ + if (!this[_handle].params) + throw new Error('not supported in this implementation') + + if (this[_level] !== level || this[_strategy] !== strategy) { + this.flush(constants.Z_SYNC_FLUSH) + assert(this[_handle], 'zlib binding closed') + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this[_handle].flush + this[_handle].flush = (flushFlag, cb) => { + this.flush(flushFlag) + cb() + } + try { + this[_handle].params(level, strategy) + } finally { + this[_handle].flush = origFlush + } + /* istanbul ignore else */ + if (this[_handle]) { + this[_level] = level + this[_strategy] = strategy + } + } + } +} + +// minimal 2-byte header +class Deflate extends Zlib { + constructor (opts) { + super(opts, 'Deflate') + } +} + +class Inflate extends Zlib { + constructor (opts) { + super(opts, 'Inflate') + } +} + +// gzip - bigger header, same deflate compression +const _portable = Symbol('_portable') +class Gzip extends Zlib { + constructor (opts) { + super(opts, 'Gzip') + this[_portable] = opts && !!opts.portable + } + + [_superWrite] (data) { + if (!this[_portable]) + return super[_superWrite](data) + + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this[_portable] = false + data[9] = 255 + return super[_superWrite](data) + } +} + +class Gunzip extends Zlib { + constructor (opts) { + super(opts, 'Gunzip') + } +} + +// raw - no header +class DeflateRaw extends Zlib { + constructor (opts) { + super(opts, 'DeflateRaw') + } +} + +class InflateRaw extends Zlib { + constructor (opts) { + super(opts, 'InflateRaw') + } +} + +// auto-detect header. +class Unzip extends Zlib { + constructor (opts) { + super(opts, 'Unzip') + } +} + +class Brotli extends ZlibBase { + constructor (opts, mode) { + opts = opts || {} + + opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS + opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH + + super(opts, mode) + + this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH + } +} + +class BrotliCompress extends Brotli { + constructor (opts) { + super(opts, 'BrotliCompress') + } +} + +class BrotliDecompress extends Brotli { + constructor (opts) { + super(opts, 'BrotliDecompress') + } +} + +exports.Deflate = Deflate +exports.Inflate = Inflate +exports.Gzip = Gzip +exports.Gunzip = Gunzip +exports.DeflateRaw = DeflateRaw +exports.InflateRaw = InflateRaw +exports.Unzip = Unzip +/* istanbul ignore else */ +if (typeof realZlib.BrotliCompress === 'function') { + exports.BrotliCompress = BrotliCompress + exports.BrotliDecompress = BrotliDecompress +} else { + exports.BrotliCompress = exports.BrotliDecompress = class { + constructor () { + throw new Error('Brotli is not supported in this version of Node.js') + } + } +} diff --git a/node_modules/minizlib/node_modules/minipass/LICENSE b/node_modules/minizlib/node_modules/minipass/LICENSE new file mode 100644 index 0000000..bf1dece --- /dev/null +++ b/node_modules/minizlib/node_modules/minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/minizlib/node_modules/minipass/README.md b/node_modules/minizlib/node_modules/minipass/README.md new file mode 100644 index 0000000..2cde46c --- /dev/null +++ b/node_modules/minizlib/node_modules/minipass/README.md @@ -0,0 +1,728 @@ +# minipass + +A _very_ minimal implementation of a [PassThrough +stream](https://nodejs.org/api/stream.html#stream_class_stream_passthrough) + +[It's very +fast](https://docs.google.com/spreadsheets/d/1oObKSrVwLX_7Ut4Z6g3fZW-AX1j1-k6w-cDsrkaSbHM/edit#gid=0) +for objects, strings, and buffers. + +Supports `pipe()`ing (including multi-`pipe()` and backpressure transmission), +buffering data until either a `data` event handler or `pipe()` is added (so +you don't lose the first chunk), and most other cases where PassThrough is +a good idea. + +There is a `read()` method, but it's much more efficient to consume data +from this stream via `'data'` events or by calling `pipe()` into some other +stream. Calling `read()` requires the buffer to be flattened in some +cases, which requires copying memory. + +If you set `objectMode: true` in the options, then whatever is written will +be emitted. Otherwise, it'll do a minimal amount of Buffer copying to +ensure proper Streams semantics when `read(n)` is called. + +`objectMode` can also be set by doing `stream.objectMode = true`, or by +writing any non-string/non-buffer data. `objectMode` cannot be set to +false once it is set. + +This is not a `through` or `through2` stream. It doesn't transform the +data, it just passes it right through. If you want to transform the data, +extend the class, and override the `write()` method. Once you're done +transforming the data however you want, call `super.write()` with the +transform output. + +For some examples of streams that extend Minipass in various ways, check +out: + +- [minizlib](http://npm.im/minizlib) +- [fs-minipass](http://npm.im/fs-minipass) +- [tar](http://npm.im/tar) +- [minipass-collect](http://npm.im/minipass-collect) +- [minipass-flush](http://npm.im/minipass-flush) +- [minipass-pipeline](http://npm.im/minipass-pipeline) +- [tap](http://npm.im/tap) +- [tap-parser](http://npm.im/tap-parser) +- [treport](http://npm.im/treport) +- [minipass-fetch](http://npm.im/minipass-fetch) +- [pacote](http://npm.im/pacote) +- [make-fetch-happen](http://npm.im/make-fetch-happen) +- [cacache](http://npm.im/cacache) +- [ssri](http://npm.im/ssri) +- [npm-registry-fetch](http://npm.im/npm-registry-fetch) +- [minipass-json-stream](http://npm.im/minipass-json-stream) +- [minipass-sized](http://npm.im/minipass-sized) + +## Differences from Node.js Streams + +There are several things that make Minipass streams different from (and in +some ways superior to) Node.js core streams. + +Please read these caveats if you are familiar with node-core streams and +intend to use Minipass streams in your programs. + +You can avoid most of these differences entirely (for a very +small performance penalty) by setting `{async: true}` in the +constructor options. + +### Timing + +Minipass streams are designed to support synchronous use-cases. Thus, data +is emitted as soon as it is available, always. It is buffered until read, +but no longer. Another way to look at it is that Minipass streams are +exactly as synchronous as the logic that writes into them. + +This can be surprising if your code relies on `PassThrough.write()` always +providing data on the next tick rather than the current one, or being able +to call `resume()` and not have the entire buffer disappear immediately. + +However, without this synchronicity guarantee, there would be no way for +Minipass to achieve the speeds it does, or support the synchronous use +cases that it does. Simply put, waiting takes time. + +This non-deferring approach makes Minipass streams much easier to reason +about, especially in the context of Promises and other flow-control +mechanisms. + +Example: + +```js +const Minipass = require('minipass') +const stream = new Minipass({ async: true }) +stream.on('data', () => console.log('data event')) +console.log('before write') +stream.write('hello') +console.log('after write') +// output: +// before write +// data event +// after write +``` + +### Exception: Async Opt-In + +If you wish to have a Minipass stream with behavior that more +closely mimics Node.js core streams, you can set the stream in +async mode either by setting `async: true` in the constructor +options, or by setting `stream.async = true` later on. + +```js +const Minipass = require('minipass') +const asyncStream = new Minipass({ async: true }) +asyncStream.on('data', () => console.log('data event')) +console.log('before write') +asyncStream.write('hello') +console.log('after write') +// output: +// before write +// after write +// data event <-- this is deferred until the next tick +``` + +Switching _out_ of async mode is unsafe, as it could cause data +corruption, and so is not enabled. Example: + +```js +const Minipass = require('minipass') +const stream = new Minipass({ encoding: 'utf8' }) +stream.on('data', chunk => console.log(chunk)) +stream.async = true +console.log('before writes') +stream.write('hello') +setStreamSyncAgainSomehow(stream) // <-- this doesn't actually exist! +stream.write('world') +console.log('after writes') +// hypothetical output would be: +// before writes +// world +// after writes +// hello +// NOT GOOD! +``` + +To avoid this problem, once set into async mode, any attempt to +make the stream sync again will be ignored. + +```js +const Minipass = require('minipass') +const stream = new Minipass({ encoding: 'utf8' }) +stream.on('data', chunk => console.log(chunk)) +stream.async = true +console.log('before writes') +stream.write('hello') +stream.async = false // <-- no-op, stream already async +stream.write('world') +console.log('after writes') +// actual output: +// before writes +// after writes +// hello +// world +``` + +### No High/Low Water Marks + +Node.js core streams will optimistically fill up a buffer, returning `true` +on all writes until the limit is hit, even if the data has nowhere to go. +Then, they will not attempt to draw more data in until the buffer size dips +below a minimum value. + +Minipass streams are much simpler. The `write()` method will return `true` +if the data has somewhere to go (which is to say, given the timing +guarantees, that the data is already there by the time `write()` returns). + +If the data has nowhere to go, then `write()` returns false, and the data +sits in a buffer, to be drained out immediately as soon as anyone consumes +it. + +Since nothing is ever buffered unnecessarily, there is much less +copying data, and less bookkeeping about buffer capacity levels. + +### Hazards of Buffering (or: Why Minipass Is So Fast) + +Since data written to a Minipass stream is immediately written all the way +through the pipeline, and `write()` always returns true/false based on +whether the data was fully flushed, backpressure is communicated +immediately to the upstream caller. This minimizes buffering. + +Consider this case: + +```js +const {PassThrough} = require('stream') +const p1 = new PassThrough({ highWaterMark: 1024 }) +const p2 = new PassThrough({ highWaterMark: 1024 }) +const p3 = new PassThrough({ highWaterMark: 1024 }) +const p4 = new PassThrough({ highWaterMark: 1024 }) + +p1.pipe(p2).pipe(p3).pipe(p4) +p4.on('data', () => console.log('made it through')) + +// this returns false and buffers, then writes to p2 on next tick (1) +// p2 returns false and buffers, pausing p1, then writes to p3 on next tick (2) +// p3 returns false and buffers, pausing p2, then writes to p4 on next tick (3) +// p4 returns false and buffers, pausing p3, then emits 'data' and 'drain' +// on next tick (4) +// p3 sees p4's 'drain' event, and calls resume(), emitting 'resume' and +// 'drain' on next tick (5) +// p2 sees p3's 'drain', calls resume(), emits 'resume' and 'drain' on next tick (6) +// p1 sees p2's 'drain', calls resume(), emits 'resume' and 'drain' on next +// tick (7) + +p1.write(Buffer.alloc(2048)) // returns false +``` + +Along the way, the data was buffered and deferred at each stage, and +multiple event deferrals happened, for an unblocked pipeline where it was +perfectly safe to write all the way through! + +Furthermore, setting a `highWaterMark` of `1024` might lead someone reading +the code to think an advisory maximum of 1KiB is being set for the +pipeline. However, the actual advisory buffering level is the _sum_ of +`highWaterMark` values, since each one has its own bucket. + +Consider the Minipass case: + +```js +const m1 = new Minipass() +const m2 = new Minipass() +const m3 = new Minipass() +const m4 = new Minipass() + +m1.pipe(m2).pipe(m3).pipe(m4) +m4.on('data', () => console.log('made it through')) + +// m1 is flowing, so it writes the data to m2 immediately +// m2 is flowing, so it writes the data to m3 immediately +// m3 is flowing, so it writes the data to m4 immediately +// m4 is flowing, so it fires the 'data' event immediately, returns true +// m4's write returned true, so m3 is still flowing, returns true +// m3's write returned true, so m2 is still flowing, returns true +// m2's write returned true, so m1 is still flowing, returns true +// No event deferrals or buffering along the way! + +m1.write(Buffer.alloc(2048)) // returns true +``` + +It is extremely unlikely that you _don't_ want to buffer any data written, +or _ever_ buffer data that can be flushed all the way through. Neither +node-core streams nor Minipass ever fail to buffer written data, but +node-core streams do a lot of unnecessary buffering and pausing. + +As always, the faster implementation is the one that does less stuff and +waits less time to do it. + +### Immediately emit `end` for empty streams (when not paused) + +If a stream is not paused, and `end()` is called before writing any data +into it, then it will emit `end` immediately. + +If you have logic that occurs on the `end` event which you don't want to +potentially happen immediately (for example, closing file descriptors, +moving on to the next entry in an archive parse stream, etc.) then be sure +to call `stream.pause()` on creation, and then `stream.resume()` once you +are ready to respond to the `end` event. + +However, this is _usually_ not a problem because: + +### Emit `end` When Asked + +One hazard of immediately emitting `'end'` is that you may not yet have had +a chance to add a listener. In order to avoid this hazard, Minipass +streams safely re-emit the `'end'` event if a new listener is added after +`'end'` has been emitted. + +Ie, if you do `stream.on('end', someFunction)`, and the stream has already +emitted `end`, then it will call the handler right away. (You can think of +this somewhat like attaching a new `.then(fn)` to a previously-resolved +Promise.) + +To prevent calling handlers multiple times who would not expect multiple +ends to occur, all listeners are removed from the `'end'` event whenever it +is emitted. + +### Emit `error` When Asked + +The most recent error object passed to the `'error'` event is +stored on the stream. If a new `'error'` event handler is added, +and an error was previously emitted, then the event handler will +be called immediately (or on `process.nextTick` in the case of +async streams). + +This makes it much more difficult to end up trying to interact +with a broken stream, if the error handler is added after an +error was previously emitted. + +### Impact of "immediate flow" on Tee-streams + +A "tee stream" is a stream piping to multiple destinations: + +```js +const tee = new Minipass() +t.pipe(dest1) +t.pipe(dest2) +t.write('foo') // goes to both destinations +``` + +Since Minipass streams _immediately_ process any pending data through the +pipeline when a new pipe destination is added, this can have surprising +effects, especially when a stream comes in from some other function and may +or may not have data in its buffer. + +```js +// WARNING! WILL LOSE DATA! +const src = new Minipass() +src.write('foo') +src.pipe(dest1) // 'foo' chunk flows to dest1 immediately, and is gone +src.pipe(dest2) // gets nothing! +``` + +One solution is to create a dedicated tee-stream junction that pipes to +both locations, and then pipe to _that_ instead. + +```js +// Safe example: tee to both places +const src = new Minipass() +src.write('foo') +const tee = new Minipass() +tee.pipe(dest1) +tee.pipe(dest2) +src.pipe(tee) // tee gets 'foo', pipes to both locations +``` + +The same caveat applies to `on('data')` event listeners. The first one +added will _immediately_ receive all of the data, leaving nothing for the +second: + +```js +// WARNING! WILL LOSE DATA! +const src = new Minipass() +src.write('foo') +src.on('data', handler1) // receives 'foo' right away +src.on('data', handler2) // nothing to see here! +``` + +Using a dedicated tee-stream can be used in this case as well: + +```js +// Safe example: tee to both data handlers +const src = new Minipass() +src.write('foo') +const tee = new Minipass() +tee.on('data', handler1) +tee.on('data', handler2) +src.pipe(tee) +``` + +All of the hazards in this section are avoided by setting `{ +async: true }` in the Minipass constructor, or by setting +`stream.async = true` afterwards. Note that this does add some +overhead, so should only be done in cases where you are willing +to lose a bit of performance in order to avoid having to refactor +program logic. + +## USAGE + +It's a stream! Use it like a stream and it'll most likely do what you +want. + +```js +const Minipass = require('minipass') +const mp = new Minipass(options) // optional: { encoding, objectMode } +mp.write('foo') +mp.pipe(someOtherStream) +mp.end('bar') +``` + +### OPTIONS + +* `encoding` How would you like the data coming _out_ of the stream to be + encoded? Accepts any values that can be passed to `Buffer.toString()`. +* `objectMode` Emit data exactly as it comes in. This will be flipped on + by default if you write() something other than a string or Buffer at any + point. Setting `objectMode: true` will prevent setting any encoding + value. +* `async` Defaults to `false`. Set to `true` to defer data + emission until next tick. This reduces performance slightly, + but makes Minipass streams use timing behavior closer to Node + core streams. See [Timing](#timing) for more details. + +### API + +Implements the user-facing portions of Node.js's `Readable` and `Writable` +streams. + +### Methods + +* `write(chunk, [encoding], [callback])` - Put data in. (Note that, in the + base Minipass class, the same data will come out.) Returns `false` if + the stream will buffer the next write, or true if it's still in "flowing" + mode. +* `end([chunk, [encoding]], [callback])` - Signal that you have no more + data to write. This will queue an `end` event to be fired when all the + data has been consumed. +* `setEncoding(encoding)` - Set the encoding for data coming of the stream. + This can only be done once. +* `pause()` - No more data for a while, please. This also prevents `end` + from being emitted for empty streams until the stream is resumed. +* `resume()` - Resume the stream. If there's data in the buffer, it is all + discarded. Any buffered events are immediately emitted. +* `pipe(dest)` - Send all output to the stream provided. When + data is emitted, it is immediately written to any and all pipe + destinations. (Or written on next tick in `async` mode.) +* `unpipe(dest)` - Stop piping to the destination stream. This + is immediate, meaning that any asynchronously queued data will + _not_ make it to the destination when running in `async` mode. + * `options.end` - Boolean, end the destination stream when + the source stream ends. Default `true`. + * `options.proxyErrors` - Boolean, proxy `error` events from + the source stream to the destination stream. Note that + errors are _not_ proxied after the pipeline terminates, + either due to the source emitting `'end'` or manually + unpiping with `src.unpipe(dest)`. Default `false`. +* `on(ev, fn)`, `emit(ev, fn)` - Minipass streams are EventEmitters. Some + events are given special treatment, however. (See below under "events".) +* `promise()` - Returns a Promise that resolves when the stream emits + `end`, or rejects if the stream emits `error`. +* `collect()` - Return a Promise that resolves on `end` with an array + containing each chunk of data that was emitted, or rejects if the stream + emits `error`. Note that this consumes the stream data. +* `concat()` - Same as `collect()`, but concatenates the data into a single + Buffer object. Will reject the returned promise if the stream is in + objectMode, or if it goes into objectMode by the end of the data. +* `read(n)` - Consume `n` bytes of data out of the buffer. If `n` is not + provided, then consume all of it. If `n` bytes are not available, then + it returns null. **Note** consuming streams in this way is less + efficient, and can lead to unnecessary Buffer copying. +* `destroy([er])` - Destroy the stream. If an error is provided, then an + `'error'` event is emitted. If the stream has a `close()` method, and + has not emitted a `'close'` event yet, then `stream.close()` will be + called. Any Promises returned by `.promise()`, `.collect()` or + `.concat()` will be rejected. After being destroyed, writing to the + stream will emit an error. No more data will be emitted if the stream is + destroyed, even if it was previously buffered. + +### Properties + +* `bufferLength` Read-only. Total number of bytes buffered, or in the case + of objectMode, the total number of objects. +* `encoding` The encoding that has been set. (Setting this is equivalent + to calling `setEncoding(enc)` and has the same prohibition against + setting multiple times.) +* `flowing` Read-only. Boolean indicating whether a chunk written to the + stream will be immediately emitted. +* `emittedEnd` Read-only. Boolean indicating whether the end-ish events + (ie, `end`, `prefinish`, `finish`) have been emitted. Note that + listening on any end-ish event will immediateyl re-emit it if it has + already been emitted. +* `writable` Whether the stream is writable. Default `true`. Set to + `false` when `end()` +* `readable` Whether the stream is readable. Default `true`. +* `buffer` A [yallist](http://npm.im/yallist) linked list of chunks written + to the stream that have not yet been emitted. (It's probably a bad idea + to mess with this.) +* `pipes` A [yallist](http://npm.im/yallist) linked list of streams that + this stream is piping into. (It's probably a bad idea to mess with + this.) +* `destroyed` A getter that indicates whether the stream was destroyed. +* `paused` True if the stream has been explicitly paused, otherwise false. +* `objectMode` Indicates whether the stream is in `objectMode`. Once set + to `true`, it cannot be set to `false`. + +### Events + +* `data` Emitted when there's data to read. Argument is the data to read. + This is never emitted while not flowing. If a listener is attached, that + will resume the stream. +* `end` Emitted when there's no more data to read. This will be emitted + immediately for empty streams when `end()` is called. If a listener is + attached, and `end` was already emitted, then it will be emitted again. + All listeners are removed when `end` is emitted. +* `prefinish` An end-ish event that follows the same logic as `end` and is + emitted in the same conditions where `end` is emitted. Emitted after + `'end'`. +* `finish` An end-ish event that follows the same logic as `end` and is + emitted in the same conditions where `end` is emitted. Emitted after + `'prefinish'`. +* `close` An indication that an underlying resource has been released. + Minipass does not emit this event, but will defer it until after `end` + has been emitted, since it throws off some stream libraries otherwise. +* `drain` Emitted when the internal buffer empties, and it is again + suitable to `write()` into the stream. +* `readable` Emitted when data is buffered and ready to be read by a + consumer. +* `resume` Emitted when stream changes state from buffering to flowing + mode. (Ie, when `resume` is called, `pipe` is called, or a `data` event + listener is added.) + +### Static Methods + +* `Minipass.isStream(stream)` Returns `true` if the argument is a stream, + and false otherwise. To be considered a stream, the object must be + either an instance of Minipass, or an EventEmitter that has either a + `pipe()` method, or both `write()` and `end()` methods. (Pretty much any + stream in node-land will return `true` for this.) + +## EXAMPLES + +Here are some examples of things you can do with Minipass streams. + +### simple "are you done yet" promise + +```js +mp.promise().then(() => { + // stream is finished +}, er => { + // stream emitted an error +}) +``` + +### collecting + +```js +mp.collect().then(all => { + // all is an array of all the data emitted + // encoding is supported in this case, so + // so the result will be a collection of strings if + // an encoding is specified, or buffers/objects if not. + // + // In an async function, you may do + // const data = await stream.collect() +}) +``` + +### collecting into a single blob + +This is a bit slower because it concatenates the data into one chunk for +you, but if you're going to do it yourself anyway, it's convenient this +way: + +```js +mp.concat().then(onebigchunk => { + // onebigchunk is a string if the stream + // had an encoding set, or a buffer otherwise. +}) +``` + +### iteration + +You can iterate over streams synchronously or asynchronously in platforms +that support it. + +Synchronous iteration will end when the currently available data is +consumed, even if the `end` event has not been reached. In string and +buffer mode, the data is concatenated, so unless multiple writes are +occurring in the same tick as the `read()`, sync iteration loops will +generally only have a single iteration. + +To consume chunks in this way exactly as they have been written, with no +flattening, create the stream with the `{ objectMode: true }` option. + +```js +const mp = new Minipass({ objectMode: true }) +mp.write('a') +mp.write('b') +for (let letter of mp) { + console.log(letter) // a, b +} +mp.write('c') +mp.write('d') +for (let letter of mp) { + console.log(letter) // c, d +} +mp.write('e') +mp.end() +for (let letter of mp) { + console.log(letter) // e +} +for (let letter of mp) { + console.log(letter) // nothing +} +``` + +Asynchronous iteration will continue until the end event is reached, +consuming all of the data. + +```js +const mp = new Minipass({ encoding: 'utf8' }) + +// some source of some data +let i = 5 +const inter = setInterval(() => { + if (i-- > 0) + mp.write(Buffer.from('foo\n', 'utf8')) + else { + mp.end() + clearInterval(inter) + } +}, 100) + +// consume the data with asynchronous iteration +async function consume () { + for await (let chunk of mp) { + console.log(chunk) + } + return 'ok' +} + +consume().then(res => console.log(res)) +// logs `foo\n` 5 times, and then `ok` +``` + +### subclass that `console.log()`s everything written into it + +```js +class Logger extends Minipass { + write (chunk, encoding, callback) { + console.log('WRITE', chunk, encoding) + return super.write(chunk, encoding, callback) + } + end (chunk, encoding, callback) { + console.log('END', chunk, encoding) + return super.end(chunk, encoding, callback) + } +} + +someSource.pipe(new Logger()).pipe(someDest) +``` + +### same thing, but using an inline anonymous class + +```js +// js classes are fun +someSource + .pipe(new (class extends Minipass { + emit (ev, ...data) { + // let's also log events, because debugging some weird thing + console.log('EMIT', ev) + return super.emit(ev, ...data) + } + write (chunk, encoding, callback) { + console.log('WRITE', chunk, encoding) + return super.write(chunk, encoding, callback) + } + end (chunk, encoding, callback) { + console.log('END', chunk, encoding) + return super.end(chunk, encoding, callback) + } + })) + .pipe(someDest) +``` + +### subclass that defers 'end' for some reason + +```js +class SlowEnd extends Minipass { + emit (ev, ...args) { + if (ev === 'end') { + console.log('going to end, hold on a sec') + setTimeout(() => { + console.log('ok, ready to end now') + super.emit('end', ...args) + }, 100) + } else { + return super.emit(ev, ...args) + } + } +} +``` + +### transform that creates newline-delimited JSON + +```js +class NDJSONEncode extends Minipass { + write (obj, cb) { + try { + // JSON.stringify can throw, emit an error on that + return super.write(JSON.stringify(obj) + '\n', 'utf8', cb) + } catch (er) { + this.emit('error', er) + } + } + end (obj, cb) { + if (typeof obj === 'function') { + cb = obj + obj = undefined + } + if (obj !== undefined) { + this.write(obj) + } + return super.end(cb) + } +} +``` + +### transform that parses newline-delimited JSON + +```js +class NDJSONDecode extends Minipass { + constructor (options) { + // always be in object mode, as far as Minipass is concerned + super({ objectMode: true }) + this._jsonBuffer = '' + } + write (chunk, encoding, cb) { + if (typeof chunk === 'string' && + typeof encoding === 'string' && + encoding !== 'utf8') { + chunk = Buffer.from(chunk, encoding).toString() + } else if (Buffer.isBuffer(chunk)) + chunk = chunk.toString() + } + if (typeof encoding === 'function') { + cb = encoding + } + const jsonData = (this._jsonBuffer + chunk).split('\n') + this._jsonBuffer = jsonData.pop() + for (let i = 0; i < jsonData.length; i++) { + try { + // JSON.parse can throw, emit an error on that + super.write(JSON.parse(jsonData[i])) + } catch (er) { + this.emit('error', er) + continue + } + } + if (cb) + cb() + } +} +``` diff --git a/node_modules/minizlib/node_modules/minipass/index.d.ts b/node_modules/minizlib/node_modules/minipass/index.d.ts new file mode 100644 index 0000000..65faf63 --- /dev/null +++ b/node_modules/minizlib/node_modules/minipass/index.d.ts @@ -0,0 +1,155 @@ +/// +import { EventEmitter } from 'events' +import { Stream } from 'stream' + +declare namespace Minipass { + type Encoding = BufferEncoding | 'buffer' | null + + interface Writable extends EventEmitter { + end(): any + write(chunk: any, ...args: any[]): any + } + + interface Readable extends EventEmitter { + pause(): any + resume(): any + pipe(): any + } + + interface Pipe { + src: Minipass + dest: Writable + opts: PipeOptions + } + + type DualIterable = Iterable & AsyncIterable + + type ContiguousData = Buffer | ArrayBufferLike | ArrayBufferView | string + + type BufferOrString = Buffer | string + + interface StringOptions { + encoding: BufferEncoding + objectMode?: boolean + async?: boolean + } + + interface BufferOptions { + encoding?: null | 'buffer' + objectMode?: boolean + async?: boolean + } + + interface ObjectModeOptions { + objectMode: true + async?: boolean + } + + interface PipeOptions { + end?: boolean + proxyErrors?: boolean + } + + type Options = T extends string + ? StringOptions + : T extends Buffer + ? BufferOptions + : ObjectModeOptions +} + +declare class Minipass< + RType extends any = Buffer, + WType extends any = RType extends Minipass.BufferOrString + ? Minipass.ContiguousData + : RType + > + extends Stream + implements Minipass.DualIterable +{ + static isStream(stream: any): stream is Minipass.Readable | Minipass.Writable + + readonly bufferLength: number + readonly flowing: boolean + readonly writable: boolean + readonly readable: boolean + readonly paused: boolean + readonly emittedEnd: boolean + readonly destroyed: boolean + + /** + * Not technically private or readonly, but not safe to mutate. + */ + private readonly buffer: RType[] + private readonly pipes: Minipass.Pipe[] + + /** + * Technically writable, but mutating it can change the type, + * so is not safe to do in TypeScript. + */ + readonly objectMode: boolean + async: boolean + + /** + * Note: encoding is not actually read-only, and setEncoding(enc) + * exists. However, this type definition will insist that TypeScript + * programs declare the type of a Minipass stream up front, and if + * that type is string, then an encoding MUST be set in the ctor. If + * the type is Buffer, then the encoding must be missing, or set to + * 'buffer' or null. If the type is anything else, then objectMode + * must be set in the constructor options. So there is effectively + * no allowed way that a TS program can set the encoding after + * construction, as doing so will destroy any hope of type safety. + * TypeScript does not provide many options for changing the type of + * an object at run-time, which is what changing the encoding does. + */ + readonly encoding: Minipass.Encoding + // setEncoding(encoding: Encoding): void + + // Options required if not reading buffers + constructor( + ...args: RType extends Buffer + ? [] | [Minipass.Options] + : [Minipass.Options] + ) + + write(chunk: WType, cb?: () => void): boolean + write(chunk: WType, encoding?: Minipass.Encoding, cb?: () => void): boolean + read(size?: number): RType + end(cb?: () => void): this + end(chunk: any, cb?: () => void): this + end(chunk: any, encoding?: Minipass.Encoding, cb?: () => void): this + pause(): void + resume(): void + promise(): Promise + collect(): Promise + + concat(): RType extends Minipass.BufferOrString ? Promise : never + destroy(er?: any): void + pipe(dest: W, opts?: Minipass.PipeOptions): W + unpipe(dest: W): void + + /** + * alias for on() + */ + addEventHandler(event: string, listener: (...args: any[]) => any): this + + on(event: string, listener: (...args: any[]) => any): this + on(event: 'data', listener: (chunk: RType) => any): this + on(event: 'error', listener: (error: any) => any): this + on( + event: + | 'readable' + | 'drain' + | 'resume' + | 'end' + | 'prefinish' + | 'finish' + | 'close', + listener: () => any + ): this + + [Symbol.iterator](): Iterator + [Symbol.asyncIterator](): AsyncIterator +} + +export = Minipass diff --git a/node_modules/minizlib/node_modules/minipass/index.js b/node_modules/minizlib/node_modules/minipass/index.js new file mode 100644 index 0000000..e8797aa --- /dev/null +++ b/node_modules/minizlib/node_modules/minipass/index.js @@ -0,0 +1,649 @@ +'use strict' +const proc = typeof process === 'object' && process ? process : { + stdout: null, + stderr: null, +} +const EE = require('events') +const Stream = require('stream') +const SD = require('string_decoder').StringDecoder + +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const EMITTED_ERROR = Symbol('emittedError') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +const DESTROYED = Symbol('destroyed') +const EMITDATA = Symbol('emitData') +const EMITEND = Symbol('emitEnd') +const EMITEND2 = Symbol('emitEnd2') +const ASYNC = Symbol('async') + +const defer = fn => Promise.resolve().then(fn) + +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = doIter && Symbol.asyncIterator + || Symbol('asyncIterator not implemented') +const ITERATOR = doIter && Symbol.iterator + || Symbol('iterator not implemented') + +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => + ev === 'end' || + ev === 'finish' || + ev === 'prefinish' + +const isArrayBuffer = b => b instanceof ArrayBuffer || + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0 + +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + +class Pipe { + constructor (src, dest, opts) { + this.src = src + this.dest = dest + this.opts = opts + this.ondrain = () => src[RESUME]() + dest.on('drain', this.ondrain) + } + unpipe () { + this.dest.removeListener('drain', this.ondrain) + } + // istanbul ignore next - only here for the prototype + proxyErrors () {} + end () { + this.unpipe() + if (this.opts.end) + this.dest.end() + } +} + +class PipeProxyErrors extends Pipe { + unpipe () { + this.src.removeListener('error', this.proxyErrors) + super.unpipe() + } + constructor (src, dest, opts) { + super(src, dest, opts) + this.proxyErrors = er => dest.emit('error', er) + src.on('error', this.proxyErrors) + } +} + +module.exports = class Minipass extends Stream { + constructor (options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this.pipes = [] + this.buffer = [] + this[OBJECTMODE] = options && options.objectMode || false + if (this[OBJECTMODE]) + this[ENCODING] = null + else + this[ENCODING] = options && options.encoding || null + if (this[ENCODING] === 'buffer') + this[ENCODING] = null + this[ASYNC] = options && !!options.async || false + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this[EMITTED_ERROR] = null + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + } + + get bufferLength () { return this[BUFFERLENGTH] } + + get encoding () { return this[ENCODING] } + set encoding (enc) { + if (this[OBJECTMODE]) + throw new Error('cannot set encoding in objectMode') + + if (this[ENCODING] && enc !== this[ENCODING] && + (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) + throw new Error('cannot change encoding') + + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this.buffer.length) + this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) + } + + this[ENCODING] = enc + } + + setEncoding (enc) { + this.encoding = enc + } + + get objectMode () { return this[OBJECTMODE] } + set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om } + + get ['async'] () { return this[ASYNC] } + set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a } + + write (chunk, encoding, cb) { + if (this[EOF]) + throw new Error('write after end') + + if (this[DESTROYED]) { + this.emit('error', Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + )) + return true + } + + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + + if (!encoding) + encoding = 'utf8' + + const fn = this[ASYNC] ? defer : f => f() + + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) + chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } + + // handle object mode up front, since it's simpler + // this yields better performance, fewer checks later. + if (this[OBJECTMODE]) { + /* istanbul ignore if - maybe impossible? */ + if (this.flowing && this[BUFFERLENGTH] !== 0) + this[FLUSH](true) + + if (this.flowing) + this.emit('data', chunk) + else + this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) + this.emit('readable') + + if (cb) + fn(cb) + + return this.flowing + } + + // at this point the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!chunk.length) { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable') + if (cb) + fn(cb) + return this.flowing + } + + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { + chunk = Buffer.from(chunk, encoding) + } + + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) + + // Note: flushing CAN potentially switch us into not-flowing mode + if (this.flowing && this[BUFFERLENGTH] !== 0) + this[FLUSH](true) + + if (this.flowing) + this.emit('data', chunk) + else + this[BUFFERPUSH](chunk) + + if (this[BUFFERLENGTH] !== 0) + this.emit('readable') + + if (cb) + fn(cb) + + return this.flowing + } + + read (n) { + if (this[DESTROYED]) + return null + + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) { + this[MAYBE_EMIT_END]() + return null + } + + if (this[OBJECTMODE]) + n = null + + if (this.buffer.length > 1 && !this[OBJECTMODE]) { + if (this.encoding) + this.buffer = [this.buffer.join('')] + else + this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])] + } + + const ret = this[READ](n || null, this.buffer[0]) + this[MAYBE_EMIT_END]() + return ret + } + + [READ] (n, chunk) { + if (n === chunk.length || n === null) + this[BUFFERSHIFT]() + else { + this.buffer[0] = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } + + this.emit('data', chunk) + + if (!this.buffer.length && !this[EOF]) + this.emit('drain') + + return chunk + } + + end (chunk, encoding, cb) { + if (typeof chunk === 'function') + cb = chunk, chunk = null + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + if (chunk) + this.write(chunk, encoding) + if (cb) + this.once('end', cb) + this[EOF] = true + this.writable = false + + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) + this[MAYBE_EMIT_END]() + return this + } + + // don't let the internal resume be overwritten + [RESUME] () { + if (this[DESTROYED]) + return + + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this.buffer.length) + this[FLUSH]() + else if (this[EOF]) + this[MAYBE_EMIT_END]() + else + this.emit('drain') + } + + resume () { + return this[RESUME]() + } + + pause () { + this[FLOWING] = false + this[PAUSED] = true + } + + get destroyed () { + return this[DESTROYED] + } + + get flowing () { + return this[FLOWING] + } + + get paused () { + return this[PAUSED] + } + + [BUFFERPUSH] (chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1 + else + this[BUFFERLENGTH] += chunk.length + this.buffer.push(chunk) + } + + [BUFFERSHIFT] () { + if (this.buffer.length) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1 + else + this[BUFFERLENGTH] -= this.buffer[0].length + } + return this.buffer.shift() + } + + [FLUSH] (noDrain) { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) + + if (!noDrain && !this.buffer.length && !this[EOF]) + this.emit('drain') + } + + [FLUSHCHUNK] (chunk) { + return chunk ? (this.emit('data', chunk), this.flowing) : false + } + + pipe (dest, opts) { + if (this[DESTROYED]) + return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === proc.stdout || dest === proc.stderr) + opts.end = false + else + opts.end = opts.end !== false + opts.proxyErrors = !!opts.proxyErrors + + // piping an ended stream ends immediately + if (ended) { + if (opts.end) + dest.end() + } else { + this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts) + : new PipeProxyErrors(this, dest, opts)) + if (this[ASYNC]) + defer(() => this[RESUME]()) + else + this[RESUME]() + } + + return dest + } + + unpipe (dest) { + const p = this.pipes.find(p => p.dest === dest) + if (p) { + this.pipes.splice(this.pipes.indexOf(p), 1) + p.unpipe() + } + } + + addListener (ev, fn) { + return this.on(ev, fn) + } + + on (ev, fn) { + const ret = super.on(ev, fn) + if (ev === 'data' && !this.pipes.length && !this.flowing) + this[RESUME]() + else if (ev === 'readable' && this[BUFFERLENGTH] !== 0) + super.emit('readable') + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) + } else if (ev === 'error' && this[EMITTED_ERROR]) { + if (this[ASYNC]) + defer(() => fn.call(this, this[EMITTED_ERROR])) + else + fn.call(this, this[EMITTED_ERROR]) + } + return ret + } + + get emittedEnd () { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END] () { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this.buffer.length === 0 && + this[EOF]) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) + this.emit('close') + this[EMITTING_END] = false + } + } + + emit (ev, data, ...extra) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + return !data ? false + : this[ASYNC] ? defer(() => this[EMITDATA](data)) + : this[EMITDATA](data) + } else if (ev === 'end') { + return this[EMITEND]() + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return + const ret = super.emit('close') + this.removeAllListeners('close') + return ret + } else if (ev === 'error') { + this[EMITTED_ERROR] = data + const ret = super.emit('error', data) + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'resume') { + const ret = super.emit('resume') + this[MAYBE_EMIT_END]() + return ret + } else if (ev === 'finish' || ev === 'prefinish') { + const ret = super.emit(ev) + this.removeAllListeners(ev) + return ret + } + + // Some other unknown event + const ret = super.emit(ev, data, ...extra) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITDATA] (data) { + for (const p of this.pipes) { + if (p.dest.write(data) === false) + this.pause() + } + const ret = super.emit('data', data) + this[MAYBE_EMIT_END]() + return ret + } + + [EMITEND] () { + if (this[EMITTED_END]) + return + + this[EMITTED_END] = true + this.readable = false + if (this[ASYNC]) + defer(() => this[EMITEND2]()) + else + this[EMITEND2]() + } + + [EMITEND2] () { + if (this[DECODER]) { + const data = this[DECODER].end() + if (data) { + for (const p of this.pipes) { + p.dest.write(data) + } + super.emit('data', data) + } + } + + for (const p of this.pipes) { + p.end() + } + const ret = super.emit('end') + this.removeAllListeners('end') + return ret + } + + // const all = await stream.collect() + collect () { + const buf = [] + if (!this[OBJECTMODE]) + buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) + buf.dataLength += c.length + }) + return p.then(() => buf) + } + + // const data = await stream.concat() + concat () { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength)) + } + + // stream.promise().then(() => done, er => emitted error) + promise () { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('error', er => reject(er)) + this.on('end', () => resolve()) + }) + } + + // for await (let chunk of stream) + [ASYNCITERATOR] () { + const next = () => { + const res = this.read() + if (res !== null) + return Promise.resolve({ done: false, value: res }) + + if (this[EOF]) + return Promise.resolve({ done: true }) + + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } + + return { next } + } + + // for (let chunk of stream) + [ITERATOR] () { + const next = () => { + const value = this.read() + const done = value === null + return { value, done } + } + return { next } + } + + destroy (er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er) + else + this.emit(DESTROYED) + return this + } + + this[DESTROYED] = true + + // throw away all buffered data, it's never coming out + this.buffer.length = 0 + this[BUFFERLENGTH] = 0 + + if (typeof this.close === 'function' && !this[CLOSED]) + this.close() + + if (er) + this.emit('error', er) + else // if no error to emit, still reject pending promises + this.emit(DESTROYED) + + return this + } + + static isStream (s) { + return !!s && (s instanceof Minipass || s instanceof Stream || + s instanceof EE && ( + typeof s.pipe === 'function' || // readable + (typeof s.write === 'function' && typeof s.end === 'function') // writable + )) + } +} diff --git a/node_modules/minizlib/node_modules/minipass/package.json b/node_modules/minizlib/node_modules/minipass/package.json new file mode 100644 index 0000000..548d03f --- /dev/null +++ b/node_modules/minizlib/node_modules/minipass/package.json @@ -0,0 +1,56 @@ +{ + "name": "minipass", + "version": "3.3.6", + "description": "minimal implementation of a PassThrough stream", + "main": "index.js", + "types": "index.d.ts", + "dependencies": { + "yallist": "^4.0.0" + }, + "devDependencies": { + "@types/node": "^17.0.41", + "end-of-stream": "^1.4.0", + "prettier": "^2.6.2", + "tap": "^16.2.0", + "through2": "^2.0.3", + "ts-node": "^10.8.1", + "typescript": "^4.7.3" + }, + "scripts": { + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minipass.git" + }, + "keywords": [ + "passthrough", + "stream" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "files": [ + "index.d.ts", + "index.js" + ], + "tap": { + "check-coverage": true + }, + "engines": { + "node": ">=8" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/node_modules/minizlib/package.json b/node_modules/minizlib/package.json new file mode 100644 index 0000000..98825a5 --- /dev/null +++ b/node_modules/minizlib/package.json @@ -0,0 +1,42 @@ +{ + "name": "minizlib", + "version": "2.1.2", + "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.", + "main": "index.js", + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "scripts": { + "test": "tap test/*.js --100 -J", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minizlib.git" + }, + "keywords": [ + "zlib", + "gzip", + "gunzip", + "deflate", + "inflate", + "compression", + "zip", + "unzip" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "MIT", + "devDependencies": { + "tap": "^14.6.9" + }, + "files": [ + "index.js", + "constants.js" + ], + "engines": { + "node": ">= 8" + } +} diff --git a/node_modules/mkdirp/CHANGELOG.md b/node_modules/mkdirp/CHANGELOG.md new file mode 100644 index 0000000..8145838 --- /dev/null +++ b/node_modules/mkdirp/CHANGELOG.md @@ -0,0 +1,15 @@ +# Changers Lorgs! + +## 1.0 + +Full rewrite. Essentially a brand new module. + +- Return a promise instead of taking a callback. +- Use native `fs.mkdir(path, { recursive: true })` when available. +- Drop support for outdated Node.js versions. (Technically still works on + Node.js v8, but only 10 and above are officially supported.) + +## 0.x + +Original and most widely used recursive directory creation implementation +in JavaScript, dating back to 2010. diff --git a/node_modules/mkdirp/LICENSE b/node_modules/mkdirp/LICENSE new file mode 100644 index 0000000..13fcd15 --- /dev/null +++ b/node_modules/mkdirp/LICENSE @@ -0,0 +1,21 @@ +Copyright James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me) + +This project is free software released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/mkdirp/bin/cmd.js b/node_modules/mkdirp/bin/cmd.js new file mode 100644 index 0000000..6e0aa8d --- /dev/null +++ b/node_modules/mkdirp/bin/cmd.js @@ -0,0 +1,68 @@ +#!/usr/bin/env node + +const usage = () => ` +usage: mkdirp [DIR1,DIR2..] {OPTIONS} + + Create each supplied directory including any necessary parent directories + that don't yet exist. + + If the directory already exists, do nothing. + +OPTIONS are: + + -m If a directory needs to be created, set the mode as an octal + --mode= permission string. + + -v --version Print the mkdirp version number + + -h --help Print this helpful banner + + -p --print Print the first directories created for each path provided + + --manual Use manual implementation, even if native is available +` + +const dirs = [] +const opts = {} +let print = false +let dashdash = false +let manual = false +for (const arg of process.argv.slice(2)) { + if (dashdash) + dirs.push(arg) + else if (arg === '--') + dashdash = true + else if (arg === '--manual') + manual = true + else if (/^-h/.test(arg) || /^--help/.test(arg)) { + console.log(usage()) + process.exit(0) + } else if (arg === '-v' || arg === '--version') { + console.log(require('../package.json').version) + process.exit(0) + } else if (arg === '-p' || arg === '--print') { + print = true + } else if (/^-m/.test(arg) || /^--mode=/.test(arg)) { + const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8) + if (isNaN(mode)) { + console.error(`invalid mode argument: ${arg}\nMust be an octal number.`) + process.exit(1) + } + opts.mode = mode + } else + dirs.push(arg) +} + +const mkdirp = require('../') +const impl = manual ? mkdirp.manual : mkdirp +if (dirs.length === 0) + console.error(usage()) + +Promise.all(dirs.map(dir => impl(dir, opts))) + .then(made => print ? made.forEach(m => m && console.log(m)) : null) + .catch(er => { + console.error(er.message) + if (er.code) + console.error(' code: ' + er.code) + process.exit(1) + }) diff --git a/node_modules/mkdirp/index.js b/node_modules/mkdirp/index.js new file mode 100644 index 0000000..ad7a16c --- /dev/null +++ b/node_modules/mkdirp/index.js @@ -0,0 +1,31 @@ +const optsArg = require('./lib/opts-arg.js') +const pathArg = require('./lib/path-arg.js') + +const {mkdirpNative, mkdirpNativeSync} = require('./lib/mkdirp-native.js') +const {mkdirpManual, mkdirpManualSync} = require('./lib/mkdirp-manual.js') +const {useNative, useNativeSync} = require('./lib/use-native.js') + + +const mkdirp = (path, opts) => { + path = pathArg(path) + opts = optsArg(opts) + return useNative(opts) + ? mkdirpNative(path, opts) + : mkdirpManual(path, opts) +} + +const mkdirpSync = (path, opts) => { + path = pathArg(path) + opts = optsArg(opts) + return useNativeSync(opts) + ? mkdirpNativeSync(path, opts) + : mkdirpManualSync(path, opts) +} + +mkdirp.sync = mkdirpSync +mkdirp.native = (path, opts) => mkdirpNative(pathArg(path), optsArg(opts)) +mkdirp.manual = (path, opts) => mkdirpManual(pathArg(path), optsArg(opts)) +mkdirp.nativeSync = (path, opts) => mkdirpNativeSync(pathArg(path), optsArg(opts)) +mkdirp.manualSync = (path, opts) => mkdirpManualSync(pathArg(path), optsArg(opts)) + +module.exports = mkdirp diff --git a/node_modules/mkdirp/lib/find-made.js b/node_modules/mkdirp/lib/find-made.js new file mode 100644 index 0000000..022e492 --- /dev/null +++ b/node_modules/mkdirp/lib/find-made.js @@ -0,0 +1,29 @@ +const {dirname} = require('path') + +const findMade = (opts, parent, path = undefined) => { + // we never want the 'made' return value to be a root directory + if (path === parent) + return Promise.resolve() + + return opts.statAsync(parent).then( + st => st.isDirectory() ? path : undefined, // will fail later + er => er.code === 'ENOENT' + ? findMade(opts, dirname(parent), parent) + : undefined + ) +} + +const findMadeSync = (opts, parent, path = undefined) => { + if (path === parent) + return undefined + + try { + return opts.statSync(parent).isDirectory() ? path : undefined + } catch (er) { + return er.code === 'ENOENT' + ? findMadeSync(opts, dirname(parent), parent) + : undefined + } +} + +module.exports = {findMade, findMadeSync} diff --git a/node_modules/mkdirp/lib/mkdirp-manual.js b/node_modules/mkdirp/lib/mkdirp-manual.js new file mode 100644 index 0000000..2eb18cd --- /dev/null +++ b/node_modules/mkdirp/lib/mkdirp-manual.js @@ -0,0 +1,64 @@ +const {dirname} = require('path') + +const mkdirpManual = (path, opts, made) => { + opts.recursive = false + const parent = dirname(path) + if (parent === path) { + return opts.mkdirAsync(path, opts).catch(er => { + // swallowed by recursive implementation on posix systems + // any other error is a failure + if (er.code !== 'EISDIR') + throw er + }) + } + + return opts.mkdirAsync(path, opts).then(() => made || path, er => { + if (er.code === 'ENOENT') + return mkdirpManual(parent, opts) + .then(made => mkdirpManual(path, opts, made)) + if (er.code !== 'EEXIST' && er.code !== 'EROFS') + throw er + return opts.statAsync(path).then(st => { + if (st.isDirectory()) + return made + else + throw er + }, () => { throw er }) + }) +} + +const mkdirpManualSync = (path, opts, made) => { + const parent = dirname(path) + opts.recursive = false + + if (parent === path) { + try { + return opts.mkdirSync(path, opts) + } catch (er) { + // swallowed by recursive implementation on posix systems + // any other error is a failure + if (er.code !== 'EISDIR') + throw er + else + return + } + } + + try { + opts.mkdirSync(path, opts) + return made || path + } catch (er) { + if (er.code === 'ENOENT') + return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made)) + if (er.code !== 'EEXIST' && er.code !== 'EROFS') + throw er + try { + if (!opts.statSync(path).isDirectory()) + throw er + } catch (_) { + throw er + } + } +} + +module.exports = {mkdirpManual, mkdirpManualSync} diff --git a/node_modules/mkdirp/lib/mkdirp-native.js b/node_modules/mkdirp/lib/mkdirp-native.js new file mode 100644 index 0000000..c7a6b69 --- /dev/null +++ b/node_modules/mkdirp/lib/mkdirp-native.js @@ -0,0 +1,39 @@ +const {dirname} = require('path') +const {findMade, findMadeSync} = require('./find-made.js') +const {mkdirpManual, mkdirpManualSync} = require('./mkdirp-manual.js') + +const mkdirpNative = (path, opts) => { + opts.recursive = true + const parent = dirname(path) + if (parent === path) + return opts.mkdirAsync(path, opts) + + return findMade(opts, path).then(made => + opts.mkdirAsync(path, opts).then(() => made) + .catch(er => { + if (er.code === 'ENOENT') + return mkdirpManual(path, opts) + else + throw er + })) +} + +const mkdirpNativeSync = (path, opts) => { + opts.recursive = true + const parent = dirname(path) + if (parent === path) + return opts.mkdirSync(path, opts) + + const made = findMadeSync(opts, path) + try { + opts.mkdirSync(path, opts) + return made + } catch (er) { + if (er.code === 'ENOENT') + return mkdirpManualSync(path, opts) + else + throw er + } +} + +module.exports = {mkdirpNative, mkdirpNativeSync} diff --git a/node_modules/mkdirp/lib/opts-arg.js b/node_modules/mkdirp/lib/opts-arg.js new file mode 100644 index 0000000..2fa4833 --- /dev/null +++ b/node_modules/mkdirp/lib/opts-arg.js @@ -0,0 +1,23 @@ +const { promisify } = require('util') +const fs = require('fs') +const optsArg = opts => { + if (!opts) + opts = { mode: 0o777, fs } + else if (typeof opts === 'object') + opts = { mode: 0o777, fs, ...opts } + else if (typeof opts === 'number') + opts = { mode: opts, fs } + else if (typeof opts === 'string') + opts = { mode: parseInt(opts, 8), fs } + else + throw new TypeError('invalid options argument') + + opts.mkdir = opts.mkdir || opts.fs.mkdir || fs.mkdir + opts.mkdirAsync = promisify(opts.mkdir) + opts.stat = opts.stat || opts.fs.stat || fs.stat + opts.statAsync = promisify(opts.stat) + opts.statSync = opts.statSync || opts.fs.statSync || fs.statSync + opts.mkdirSync = opts.mkdirSync || opts.fs.mkdirSync || fs.mkdirSync + return opts +} +module.exports = optsArg diff --git a/node_modules/mkdirp/lib/path-arg.js b/node_modules/mkdirp/lib/path-arg.js new file mode 100644 index 0000000..cc07de5 --- /dev/null +++ b/node_modules/mkdirp/lib/path-arg.js @@ -0,0 +1,29 @@ +const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform +const { resolve, parse } = require('path') +const pathArg = path => { + if (/\0/.test(path)) { + // simulate same failure that node raises + throw Object.assign( + new TypeError('path must be a string without null bytes'), + { + path, + code: 'ERR_INVALID_ARG_VALUE', + } + ) + } + + path = resolve(path) + if (platform === 'win32') { + const badWinChars = /[*|"<>?:]/ + const {root} = parse(path) + if (badWinChars.test(path.substr(root.length))) { + throw Object.assign(new Error('Illegal characters in path.'), { + path, + code: 'EINVAL', + }) + } + } + + return path +} +module.exports = pathArg diff --git a/node_modules/mkdirp/lib/use-native.js b/node_modules/mkdirp/lib/use-native.js new file mode 100644 index 0000000..079361d --- /dev/null +++ b/node_modules/mkdirp/lib/use-native.js @@ -0,0 +1,10 @@ +const fs = require('fs') + +const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version +const versArr = version.replace(/^v/, '').split('.') +const hasNative = +versArr[0] > 10 || +versArr[0] === 10 && +versArr[1] >= 12 + +const useNative = !hasNative ? () => false : opts => opts.mkdir === fs.mkdir +const useNativeSync = !hasNative ? () => false : opts => opts.mkdirSync === fs.mkdirSync + +module.exports = {useNative, useNativeSync} diff --git a/node_modules/mkdirp/package.json b/node_modules/mkdirp/package.json new file mode 100644 index 0000000..2913ed0 --- /dev/null +++ b/node_modules/mkdirp/package.json @@ -0,0 +1,44 @@ +{ + "name": "mkdirp", + "description": "Recursively mkdir, like `mkdir -p`", + "version": "1.0.4", + "main": "index.js", + "keywords": [ + "mkdir", + "directory", + "make dir", + "make", + "dir", + "recursive", + "native" + ], + "repository": { + "type": "git", + "url": "https://github.com/isaacs/node-mkdirp.git" + }, + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags" + }, + "tap": { + "check-coverage": true, + "coverage-map": "map.js" + }, + "devDependencies": { + "require-inject": "^1.4.4", + "tap": "^14.10.7" + }, + "bin": "bin/cmd.js", + "license": "MIT", + "engines": { + "node": ">=10" + }, + "files": [ + "bin", + "lib", + "index.js" + ] +} diff --git a/node_modules/mkdirp/readme.markdown b/node_modules/mkdirp/readme.markdown new file mode 100644 index 0000000..827de59 --- /dev/null +++ b/node_modules/mkdirp/readme.markdown @@ -0,0 +1,266 @@ +# mkdirp + +Like `mkdir -p`, but in Node.js! + +Now with a modern API and no\* bugs! + +\* may contain some bugs + +# example + +## pow.js + +```js +const mkdirp = require('mkdirp') + +// return value is a Promise resolving to the first directory created +mkdirp('/tmp/foo/bar/baz').then(made => + console.log(`made directories, starting with ${made}`)) +``` + +Output (where `/tmp/foo` already exists) + +``` +made directories, starting with /tmp/foo/bar +``` + +Or, if you don't have time to wait around for promises: + +```js +const mkdirp = require('mkdirp') + +// return value is the first directory created +const made = mkdirp.sync('/tmp/foo/bar/baz') +console.log(`made directories, starting with ${made}`) +``` + +And now /tmp/foo/bar/baz exists, huzzah! + +# methods + +```js +const mkdirp = require('mkdirp') +``` + +## mkdirp(dir, [opts]) -> Promise + +Create a new directory and any necessary subdirectories at `dir` with octal +permission string `opts.mode`. If `opts` is a string or number, it will be +treated as the `opts.mode`. + +If `opts.mode` isn't specified, it defaults to `0o777 & +(~process.umask())`. + +Promise resolves to first directory `made` that had to be created, or +`undefined` if everything already exists. Promise rejects if any errors +are encountered. Note that, in the case of promise rejection, some +directories _may_ have been created, as recursive directory creation is not +an atomic operation. + +You can optionally pass in an alternate `fs` implementation by passing in +`opts.fs`. Your implementation should have `opts.fs.mkdir(path, opts, cb)` +and `opts.fs.stat(path, cb)`. + +You can also override just one or the other of `mkdir` and `stat` by +passing in `opts.stat` or `opts.mkdir`, or providing an `fs` option that +only overrides one of these. + +## mkdirp.sync(dir, opts) -> String|null + +Synchronously create a new directory and any necessary subdirectories at +`dir` with octal permission string `opts.mode`. If `opts` is a string or +number, it will be treated as the `opts.mode`. + +If `opts.mode` isn't specified, it defaults to `0o777 & +(~process.umask())`. + +Returns the first directory that had to be created, or undefined if +everything already exists. + +You can optionally pass in an alternate `fs` implementation by passing in +`opts.fs`. Your implementation should have `opts.fs.mkdirSync(path, mode)` +and `opts.fs.statSync(path)`. + +You can also override just one or the other of `mkdirSync` and `statSync` +by passing in `opts.statSync` or `opts.mkdirSync`, or providing an `fs` +option that only overrides one of these. + +## mkdirp.manual, mkdirp.manualSync + +Use the manual implementation (not the native one). This is the default +when the native implementation is not available or the stat/mkdir +implementation is overridden. + +## mkdirp.native, mkdirp.nativeSync + +Use the native implementation (not the manual one). This is the default +when the native implementation is available and stat/mkdir are not +overridden. + +# implementation + +On Node.js v10.12.0 and above, use the native `fs.mkdir(p, +{recursive:true})` option, unless `fs.mkdir`/`fs.mkdirSync` has been +overridden by an option. + +## native implementation + +- If the path is a root directory, then pass it to the underlying + implementation and return the result/error. (In this case, it'll either + succeed or fail, but we aren't actually creating any dirs.) +- Walk up the path statting each directory, to find the first path that + will be created, `made`. +- Call `fs.mkdir(path, { recursive: true })` (or `fs.mkdirSync`) +- If error, raise it to the caller. +- Return `made`. + +## manual implementation + +- Call underlying `fs.mkdir` implementation, with `recursive: false` +- If error: + - If path is a root directory, raise to the caller and do not handle it + - If ENOENT, mkdirp parent dir, store result as `made` + - stat(path) + - If error, raise original `mkdir` error + - If directory, return `made` + - Else, raise original `mkdir` error +- else + - return `undefined` if a root dir, or `made` if set, or `path` + +## windows vs unix caveat + +On Windows file systems, attempts to create a root directory (ie, a drive +letter or root UNC path) will fail. If the root directory exists, then it +will fail with `EPERM`. If the root directory does not exist, then it will +fail with `ENOENT`. + +On posix file systems, attempts to create a root directory (in recursive +mode) will succeed silently, as it is treated like just another directory +that already exists. (In non-recursive mode, of course, it fails with +`EEXIST`.) + +In order to preserve this system-specific behavior (and because it's not as +if we can create the parent of a root directory anyway), attempts to create +a root directory are passed directly to the `fs` implementation, and any +errors encountered are not handled. + +## native error caveat + +The native implementation (as of at least Node.js v13.4.0) does not provide +appropriate errors in some cases (see +[nodejs/node#31481](https://github.com/nodejs/node/issues/31481) and +[nodejs/node#28015](https://github.com/nodejs/node/issues/28015)). + +In order to work around this issue, the native implementation will fall +back to the manual implementation if an `ENOENT` error is encountered. + +# choosing a recursive mkdir implementation + +There are a few to choose from! Use the one that suits your needs best :D + +## use `fs.mkdir(path, {recursive: true}, cb)` if: + +- You wish to optimize performance even at the expense of other factors. +- You don't need to know the first dir created. +- You are ok with getting `ENOENT` as the error when some other problem is + the actual cause. +- You can limit your platforms to Node.js v10.12 and above. +- You're ok with using callbacks instead of promises. +- You don't need/want a CLI. +- You don't need to override the `fs` methods in use. + +## use this module (mkdirp 1.x) if: + +- You need to know the first directory that was created. +- You wish to use the native implementation if available, but fall back + when it's not. +- You prefer promise-returning APIs to callback-taking APIs. +- You want more useful error messages than the native recursive mkdir + provides (at least as of Node.js v13.4), and are ok with re-trying on + `ENOENT` to achieve this. +- You need (or at least, are ok with) a CLI. +- You need to override the `fs` methods in use. + +## use [`make-dir`](http://npm.im/make-dir) if: + +- You do not need to know the first dir created (and wish to save a few + `stat` calls when using the native implementation for this reason). +- You wish to use the native implementation if available, but fall back + when it's not. +- You prefer promise-returning APIs to callback-taking APIs. +- You are ok with occasionally getting `ENOENT` errors for failures that + are actually related to something other than a missing file system entry. +- You don't need/want a CLI. +- You need to override the `fs` methods in use. + +## use mkdirp 0.x if: + +- You need to know the first directory that was created. +- You need (or at least, are ok with) a CLI. +- You need to override the `fs` methods in use. +- You're ok with using callbacks instead of promises. +- You are not running on Windows, where the root-level ENOENT errors can + lead to infinite regress. +- You think vinyl just sounds warmer and richer for some weird reason. +- You are supporting truly ancient Node.js versions, before even the advent + of a `Promise` language primitive. (Please don't. You deserve better.) + +# cli + +This package also ships with a `mkdirp` command. + +``` +$ mkdirp -h + +usage: mkdirp [DIR1,DIR2..] {OPTIONS} + + Create each supplied directory including any necessary parent directories + that don't yet exist. + + If the directory already exists, do nothing. + +OPTIONS are: + + -m If a directory needs to be created, set the mode as an octal + --mode= permission string. + + -v --version Print the mkdirp version number + + -h --help Print this helpful banner + + -p --print Print the first directories created for each path provided + + --manual Use manual implementation, even if native is available +``` + +# install + +With [npm](http://npmjs.org) do: + +``` +npm install mkdirp +``` + +to get the library locally, or + +``` +npm install -g mkdirp +``` + +to get the command everywhere, or + +``` +npx mkdirp ... +``` + +to run the command without installing it globally. + +# platform support + +This module works on node v8, but only v10 and above are officially +supported, as Node v8 reached its LTS end of life 2020-01-01, which is in +the past, as of this writing. + +# license + +MIT diff --git a/node_modules/node-addon-api/LICENSE.md b/node_modules/node-addon-api/LICENSE.md new file mode 100644 index 0000000..e2fad66 --- /dev/null +++ b/node_modules/node-addon-api/LICENSE.md @@ -0,0 +1,13 @@ +The MIT License (MIT) +===================== + +Copyright (c) 2017 Node.js API collaborators +----------------------------------- + +*Node.js API collaborators listed at * + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/node-addon-api/README.md b/node_modules/node-addon-api/README.md new file mode 100644 index 0000000..6a79c91 --- /dev/null +++ b/node_modules/node-addon-api/README.md @@ -0,0 +1,317 @@ +NOTE: The default branch has been renamed! +master is now named main + +If you have a local clone, you can update it by running: + +```shell +git branch -m master main +git fetch origin +git branch -u origin/main main +``` + +# **node-addon-api module** +This module contains **header-only C++ wrapper classes** which simplify +the use of the C based [Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html) +provided by Node.js when using C++. It provides a C++ object model +and exception handling semantics with low overhead. + +There are three options for implementing addons: Node-API, nan, or direct +use of internal V8, libuv, and Node.js libraries. Unless there is a need for +direct access to functionality that is not exposed by Node-API as outlined +in [C/C++ addons](https://nodejs.org/dist/latest/docs/api/addons.html) +in Node.js core, use Node-API. Refer to +[C/C++ addons with Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html) +for more information on Node-API. + +Node-API is an ABI stable C interface provided by Node.js for building native +addons. It is independent of the underlying JavaScript runtime (e.g. V8 or ChakraCore) +and is maintained as part of Node.js itself. It is intended to insulate +native addons from changes in the underlying JavaScript engine and allow +modules compiled for one version to run on later versions of Node.js without +recompilation. + +The `node-addon-api` module, which is not part of Node.js, preserves the benefits +of the Node-API as it consists only of inline code that depends only on the stable API +provided by Node-API. As such, modules built against one version of Node.js +using node-addon-api should run without having to be rebuilt with newer versions +of Node.js. + +It is important to remember that *other* Node.js interfaces such as +`libuv` (included in a project via `#include `) are not ABI-stable across +Node.js major versions. Thus, an addon must use Node-API and/or `node-addon-api` +exclusively and build against a version of Node.js that includes an +implementation of Node-API (meaning an active LTS version of Node.js) in +order to benefit from ABI stability across Node.js major versions. Node.js +provides an [ABI stability guide][] containing a detailed explanation of ABI +stability in general, and the Node-API ABI stability guarantee in particular. + +As new APIs are added to Node-API, node-addon-api must be updated to provide +wrappers for those new APIs. For this reason, node-addon-api provides +methods that allow callers to obtain the underlying Node-API handles so +direct calls to Node-API and the use of the objects/methods provided by +node-addon-api can be used together. For example, in order to be able +to use an API for which the node-addon-api does not yet provide a wrapper. + +APIs exposed by node-addon-api are generally used to create and +manipulate JavaScript values. Concepts and operations generally map +to ideas specified in the **ECMA262 Language Specification**. + +The [Node-API Resource](https://nodejs.github.io/node-addon-examples/) offers an +excellent orientation and tips for developers just getting started with Node-API +and node-addon-api. + +- **[Setup](#setup)** +- **[API Documentation](#api)** +- **[Examples](#examples)** +- **[Tests](#tests)** +- **[More resource and info about native Addons](#resources)** +- **[Badges](#badges)** +- **[Code of Conduct](CODE_OF_CONDUCT.md)** +- **[Contributors](#contributors)** +- **[License](#license)** + +## **Current version: 5.1.0** + +(See [CHANGELOG.md](CHANGELOG.md) for complete Changelog) + +[![NPM](https://nodei.co/npm/node-addon-api.png?downloads=true&downloadRank=true)](https://nodei.co/npm/node-addon-api/) [![NPM](https://nodei.co/npm-dl/node-addon-api.png?months=6&height=1)](https://nodei.co/npm/node-addon-api/) + + + +node-addon-api is based on [Node-API](https://nodejs.org/api/n-api.html) and supports using different Node-API versions. +This allows addons built with it to run with Node.js versions which support the targeted Node-API version. +**However** the node-addon-api support model is to support only the active LTS Node.js versions. This means that +every year there will be a new major which drops support for the Node.js LTS version which has gone out of service. + +The oldest Node.js version supported by the current version of node-addon-api is Node.js 14.x. + +## Setup + - [Installation and usage](doc/setup.md) + - [node-gyp](doc/node-gyp.md) + - [cmake-js](doc/cmake-js.md) + - [Conversion tool](doc/conversion-tool.md) + - [Checker tool](doc/checker-tool.md) + - [Generator](doc/generator.md) + - [Prebuild tools](doc/prebuild_tools.md) + + + +### **API Documentation** + +The following is the documentation for node-addon-api. + + - [Full Class Hierarchy](doc/hierarchy.md) + - [Addon Structure](doc/addon.md) + - Data Types: + - [Env](doc/env.md) + - [CallbackInfo](doc/callbackinfo.md) + - [Reference](doc/reference.md) + - [Value](doc/value.md) + - [Name](doc/name.md) + - [Symbol](doc/symbol.md) + - [String](doc/string.md) + - [Number](doc/number.md) + - [Date](doc/date.md) + - [BigInt](doc/bigint.md) + - [Boolean](doc/boolean.md) + - [External](doc/external.md) + - [Object](doc/object.md) + - [Array](doc/array.md) + - [ObjectReference](doc/object_reference.md) + - [PropertyDescriptor](doc/property_descriptor.md) + - [Function](doc/function.md) + - [FunctionReference](doc/function_reference.md) + - [ObjectWrap](doc/object_wrap.md) + - [ClassPropertyDescriptor](doc/class_property_descriptor.md) + - [Buffer](doc/buffer.md) + - [ArrayBuffer](doc/array_buffer.md) + - [TypedArray](doc/typed_array.md) + - [TypedArrayOf](doc/typed_array_of.md) + - [DataView](doc/dataview.md) + - [Error Handling](doc/error_handling.md) + - [Error](doc/error.md) + - [TypeError](doc/type_error.md) + - [RangeError](doc/range_error.md) + - [Object Lifetime Management](doc/object_lifetime_management.md) + - [HandleScope](doc/handle_scope.md) + - [EscapableHandleScope](doc/escapable_handle_scope.md) + - [Memory Management](doc/memory_management.md) + - [Async Operations](doc/async_operations.md) + - [AsyncWorker](doc/async_worker.md) + - [AsyncContext](doc/async_context.md) + - [AsyncWorker Variants](doc/async_worker_variants.md) + - [Thread-safe Functions](doc/threadsafe.md) + - [ThreadSafeFunction](doc/threadsafe_function.md) + - [TypedThreadSafeFunction](doc/typed_threadsafe_function.md) + - [Promises](doc/promises.md) + - [Version management](doc/version_management.md) + + + +### **Examples** + +Are you new to **node-addon-api**? Take a look at our **[examples](https://github.com/nodejs/node-addon-examples)** + +- **[Hello World](https://github.com/nodejs/node-addon-examples/tree/HEAD/1_hello_world/node-addon-api)** +- **[Pass arguments to a function](https://github.com/nodejs/node-addon-examples/tree/HEAD/2_function_arguments/node-addon-api)** +- **[Callbacks](https://github.com/nodejs/node-addon-examples/tree/HEAD/3_callbacks/node-addon-api)** +- **[Object factory](https://github.com/nodejs/node-addon-examples/tree/HEAD/4_object_factory/node-addon-api)** +- **[Function factory](https://github.com/nodejs/node-addon-examples/tree/HEAD/5_function_factory/node-addon-api)** +- **[Wrapping C++ Object](https://github.com/nodejs/node-addon-examples/tree/HEAD/6_object_wrap/node-addon-api)** +- **[Factory of wrapped object](https://github.com/nodejs/node-addon-examples/tree/HEAD/7_factory_wrap/node-addon-api)** +- **[Passing wrapped object around](https://github.com/nodejs/node-addon-examples/tree/HEAD/8_passing_wrapped/node-addon-api)** + + + +### **Tests** + +To run the **node-addon-api** tests do: + +``` +npm install +npm test +``` + +To avoid testing the deprecated portions of the API run +``` +npm install +npm test --disable-deprecated +``` + +To run the tests targeting a specific version of Node-API run +``` +npm install +export NAPI_VERSION=X +npm test --NAPI_VERSION=X +``` + +where X is the version of Node-API you want to target. + +To run a specific unit test, filter conditions are available + +**Example:** + compile and run only tests on objectwrap.cc and objectwrap.js + ``` + npm run unit --filter=objectwrap + ``` + +Multiple unit tests cane be selected with wildcards + +**Example:** +compile and run all test files ending with "reference" -> function_reference.cc, object_reference.cc, reference.cc + ``` + npm run unit --filter=*reference + ``` + +Multiple filter conditions can be joined to broaden the test selection + +**Example:** + compile and run all tests under folders threadsafe_function and typed_threadsafe_function and also the objectwrap.cc file + npm run unit --filter='*function objectwrap' + +### **Debug** + +To run the **node-addon-api** tests with `--debug` option: + +``` +npm run-script dev +``` + +If you want a faster build, you might use the following option: + +``` +npm run-script dev:incremental +``` + +Take a look and get inspired by our **[test suite](https://github.com/nodejs/node-addon-api/tree/HEAD/test)** + +### **Benchmarks** + +You can run the available benchmarks using the following command: + +``` +npm run-script benchmark +``` + +See [benchmark/README.md](benchmark/README.md) for more details about running and adding benchmarks. + + + +### **More resource and info about native Addons** +- **[C++ Addons](https://nodejs.org/dist/latest/docs/api/addons.html)** +- **[Node-API](https://nodejs.org/dist/latest/docs/api/n-api.html)** +- **[Node-API - Next Generation Node API for Native Modules](https://youtu.be/-Oniup60Afs)** +- **[How We Migrated Realm JavaScript From NAN to Node-API](https://developer.mongodb.com/article/realm-javascript-nan-to-n-api)** + +As node-addon-api's core mission is to expose the plain C Node-API as C++ +wrappers, tools that facilitate n-api/node-addon-api providing more +convenient patterns for developing a Node.js add-on with n-api/node-addon-api +can be published to NPM as standalone packages. It is also recommended to tag +such packages with `node-addon-api` to provide more visibility to the community. + +Quick links to NPM searches: [keywords:node-addon-api](https://www.npmjs.com/search?q=keywords%3Anode-addon-api). + + + +### **Other bindings** + +- **[napi-rs](https://napi.rs)** - (`Rust`) + + + +### **Badges** + +The use of badges is recommended to indicate the minimum version of Node-API +required for the module. This helps to determine which Node.js major versions are +supported. Addon maintainers can consult the [Node-API support matrix][] to determine +which Node.js versions provide a given Node-API version. The following badges are +available: + +![Node-API v1 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v1%20Badge.svg) +![Node-API v2 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v2%20Badge.svg) +![Node-API v3 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v3%20Badge.svg) +![Node-API v4 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v4%20Badge.svg) +![Node-API v5 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v5%20Badge.svg) +![Node-API v6 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v6%20Badge.svg) +![Node-API v7 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v7%20Badge.svg) +![Node-API v8 Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20v8%20Badge.svg) +![Node-API Experimental Version Badge](https://github.com/nodejs/abi-stable-node/blob/doc/assets/Node-API%20Experimental%20Version%20Badge.svg) + +## **Contributing** + +We love contributions from the community to **node-addon-api**! +See [CONTRIBUTING.md](CONTRIBUTING.md) for more details on our philosophy around extending this module. + + + +## Team members + +### Active +| Name | GitHub Link | +| ------------------- | ----------------------------------------------------- | +| Anna Henningsen | [addaleax](https://github.com/addaleax) | +| Chengzhong Wu | [legendecas](https://github.com/legendecas) | +| Jack Xia | [JckXia](https://github.com/JckXia) | +| Kevin Eady | [KevinEady](https://github.com/KevinEady) | +| Michael Dawson | [mhdawson](https://github.com/mhdawson) | +| Nicola Del Gobbo | [NickNaso](https://github.com/NickNaso) | +| Vladimir Morozov | [vmoroz](https://github.com/vmoroz) | + +### Emeritus +| Name | GitHub Link | +| ------------------- | ----------------------------------------------------- | +| Arunesh Chandra | [aruneshchandra](https://github.com/aruneshchandra) | +| Benjamin Byholm | [kkoopa](https://github.com/kkoopa) | +| Gabriel Schulhof | [gabrielschulhof](https://github.com/gabrielschulhof) | +| Hitesh Kanwathirtha | [digitalinfinity](https://github.com/digitalinfinity) | +| Jason Ginchereau | [jasongin](https://github.com/jasongin) | +| Jim Schlight | [jschlight](https://github.com/jschlight) | +| Sampson Gao | [sampsongao](https://github.com/sampsongao) | +| Taylor Woll | [boingoing](https://github.com/boingoing) | + + + +Licensed under [MIT](./LICENSE.md) + +[ABI stability guide]: https://nodejs.org/en/docs/guides/abi-stability/ +[Node-API support matrix]: https://nodejs.org/dist/latest/docs/api/n-api.html#n_api_n_api_version_matrix diff --git a/node_modules/node-addon-api/common.gypi b/node_modules/node-addon-api/common.gypi new file mode 100644 index 0000000..9be254f --- /dev/null +++ b/node_modules/node-addon-api/common.gypi @@ -0,0 +1,21 @@ +{ + 'variables': { + 'NAPI_VERSION%': " +inline PropertyDescriptor PropertyDescriptor::Accessor( + const char* utf8name, + Getter getter, + napi_property_attributes attributes, + void* /*data*/) { + using CbData = details::CallbackData; + // TODO: Delete when the function is destroyed + auto callbackData = new CbData({getter, nullptr}); + + return PropertyDescriptor({utf8name, + nullptr, + nullptr, + CbData::Wrapper, + nullptr, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + const std::string& utf8name, + Getter getter, + napi_property_attributes attributes, + void* data) { + return Accessor(utf8name.c_str(), getter, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + napi_value name, + Getter getter, + napi_property_attributes attributes, + void* /*data*/) { + using CbData = details::CallbackData; + // TODO: Delete when the function is destroyed + auto callbackData = new CbData({getter, nullptr}); + + return PropertyDescriptor({nullptr, + name, + nullptr, + CbData::Wrapper, + nullptr, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Name name, Getter getter, napi_property_attributes attributes, void* data) { + napi_value nameValue = name; + return PropertyDescriptor::Accessor(nameValue, getter, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + const char* utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* /*data*/) { + using CbData = details::AccessorCallbackData; + // TODO: Delete when the function is destroyed + auto callbackData = new CbData({getter, setter, nullptr}); + + return PropertyDescriptor({utf8name, + nullptr, + nullptr, + CbData::GetterWrapper, + CbData::SetterWrapper, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + const std::string& utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* data) { + return Accessor(utf8name.c_str(), getter, setter, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + napi_value name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* /*data*/) { + using CbData = details::AccessorCallbackData; + // TODO: Delete when the function is destroyed + auto callbackData = new CbData({getter, setter, nullptr}); + + return PropertyDescriptor({nullptr, + name, + nullptr, + CbData::GetterWrapper, + CbData::SetterWrapper, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Name name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* data) { + napi_value nameValue = name; + return PropertyDescriptor::Accessor( + nameValue, getter, setter, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + const char* utf8name, + Callable cb, + napi_property_attributes attributes, + void* /*data*/) { + using ReturnType = decltype(cb(CallbackInfo(nullptr, nullptr))); + using CbData = details::CallbackData; + // TODO: Delete when the function is destroyed + auto callbackData = new CbData({cb, nullptr}); + + return PropertyDescriptor({utf8name, + nullptr, + CbData::Wrapper, + nullptr, + nullptr, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + const std::string& utf8name, + Callable cb, + napi_property_attributes attributes, + void* data) { + return Function(utf8name.c_str(), cb, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + napi_value name, + Callable cb, + napi_property_attributes attributes, + void* /*data*/) { + using ReturnType = decltype(cb(CallbackInfo(nullptr, nullptr))); + using CbData = details::CallbackData; + // TODO: Delete when the function is destroyed + auto callbackData = new CbData({cb, nullptr}); + + return PropertyDescriptor({nullptr, + name, + CbData::Wrapper, + nullptr, + nullptr, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + Name name, Callable cb, napi_property_attributes attributes, void* data) { + napi_value nameValue = name; + return PropertyDescriptor::Function(nameValue, cb, attributes, data); +} + +#endif // !SRC_NAPI_INL_DEPRECATED_H_ diff --git a/node_modules/node-addon-api/napi-inl.h b/node_modules/node-addon-api/napi-inl.h new file mode 100644 index 0000000..3ddc1ba --- /dev/null +++ b/node_modules/node-addon-api/napi-inl.h @@ -0,0 +1,6303 @@ +#ifndef SRC_NAPI_INL_H_ +#define SRC_NAPI_INL_H_ + +//////////////////////////////////////////////////////////////////////////////// +// Node-API C++ Wrapper Classes +// +// Inline header-only implementations for "Node-API" ABI-stable C APIs for +// Node.js. +//////////////////////////////////////////////////////////////////////////////// + +// Note: Do not include this file directly! Include "napi.h" instead. + +#include +#include +#include +#include +#include + +namespace Napi { + +#ifdef NAPI_CPP_CUSTOM_NAMESPACE +namespace NAPI_CPP_CUSTOM_NAMESPACE { +#endif + +// Helpers to handle functions exposed from C++. +namespace details { + +// Attach a data item to an object and delete it when the object gets +// garbage-collected. +// TODO: Replace this code with `napi_add_finalizer()` whenever it becomes +// available on all supported versions of Node.js. +template +inline napi_status AttachData(napi_env env, + napi_value obj, + FreeType* data, + napi_finalize finalizer = nullptr, + void* hint = nullptr) { + napi_status status; + if (finalizer == nullptr) { + finalizer = [](napi_env /*env*/, void* data, void* /*hint*/) { + delete static_cast(data); + }; + } +#if (NAPI_VERSION < 5) + napi_value symbol, external; + status = napi_create_symbol(env, nullptr, &symbol); + if (status == napi_ok) { + status = napi_create_external(env, data, finalizer, hint, &external); + if (status == napi_ok) { + napi_property_descriptor desc = {nullptr, + symbol, + nullptr, + nullptr, + nullptr, + external, + napi_default, + nullptr}; + status = napi_define_properties(env, obj, 1, &desc); + } + } +#else // NAPI_VERSION >= 5 + status = napi_add_finalizer(env, obj, data, finalizer, hint, nullptr); +#endif + return status; +} + +// For use in JS to C++ callback wrappers to catch any Napi::Error exceptions +// and rethrow them as JavaScript exceptions before returning from the callback. +template +inline napi_value WrapCallback(Callable callback) { +#ifdef NAPI_CPP_EXCEPTIONS + try { + return callback(); + } catch (const Error& e) { + e.ThrowAsJavaScriptException(); + return nullptr; + } +#else // NAPI_CPP_EXCEPTIONS + // When C++ exceptions are disabled, errors are immediately thrown as JS + // exceptions, so there is no need to catch and rethrow them here. + return callback(); +#endif // NAPI_CPP_EXCEPTIONS +} + +// For use in JS to C++ void callback wrappers to catch any Napi::Error +// exceptions and rethrow them as JavaScript exceptions before returning from +// the callback. +template +inline void WrapVoidCallback(Callable callback) { +#ifdef NAPI_CPP_EXCEPTIONS + try { + callback(); + } catch (const Error& e) { + e.ThrowAsJavaScriptException(); + } +#else // NAPI_CPP_EXCEPTIONS + // When C++ exceptions are disabled, errors are immediately thrown as JS + // exceptions, so there is no need to catch and rethrow them here. + callback(); +#endif // NAPI_CPP_EXCEPTIONS +} + +template +struct CallbackData { + static inline napi_value Wrapper(napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + CallbackData* callbackData = + static_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + return callbackData->callback(callbackInfo); + }); + } + + Callable callback; + void* data; +}; + +template +struct CallbackData { + static inline napi_value Wrapper(napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + CallbackData* callbackData = + static_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + callbackData->callback(callbackInfo); + return nullptr; + }); + } + + Callable callback; + void* data; +}; + +template +napi_value TemplatedVoidCallback(napi_env env, + napi_callback_info info) NAPI_NOEXCEPT { + return details::WrapCallback([&] { + CallbackInfo cbInfo(env, info); + Callback(cbInfo); + return nullptr; + }); +} + +template +napi_value TemplatedCallback(napi_env env, + napi_callback_info info) NAPI_NOEXCEPT { + return details::WrapCallback([&] { + CallbackInfo cbInfo(env, info); + return Callback(cbInfo); + }); +} + +template +napi_value TemplatedInstanceCallback(napi_env env, + napi_callback_info info) NAPI_NOEXCEPT { + return details::WrapCallback([&] { + CallbackInfo cbInfo(env, info); + T* instance = T::Unwrap(cbInfo.This().As()); + return (instance->*UnwrapCallback)(cbInfo); + }); +} + +template +napi_value TemplatedInstanceVoidCallback(napi_env env, napi_callback_info info) + NAPI_NOEXCEPT { + return details::WrapCallback([&] { + CallbackInfo cbInfo(env, info); + T* instance = T::Unwrap(cbInfo.This().As()); + (instance->*UnwrapCallback)(cbInfo); + return nullptr; + }); +} + +template +struct FinalizeData { + static inline void Wrapper(napi_env env, + void* data, + void* finalizeHint) NAPI_NOEXCEPT { + WrapVoidCallback([&] { + FinalizeData* finalizeData = static_cast(finalizeHint); + finalizeData->callback(Env(env), static_cast(data)); + delete finalizeData; + }); + } + + static inline void WrapperWithHint(napi_env env, + void* data, + void* finalizeHint) NAPI_NOEXCEPT { + WrapVoidCallback([&] { + FinalizeData* finalizeData = static_cast(finalizeHint); + finalizeData->callback( + Env(env), static_cast(data), finalizeData->hint); + delete finalizeData; + }); + } + + Finalizer callback; + Hint* hint; +}; + +#if (NAPI_VERSION > 3 && !defined(__wasm32__)) +template , + typename FinalizerDataType = void> +struct ThreadSafeFinalize { + static inline void Wrapper(napi_env env, + void* rawFinalizeData, + void* /* rawContext */) { + if (rawFinalizeData == nullptr) return; + + ThreadSafeFinalize* finalizeData = + static_cast(rawFinalizeData); + finalizeData->callback(Env(env)); + delete finalizeData; + } + + static inline void FinalizeWrapperWithData(napi_env env, + void* rawFinalizeData, + void* /* rawContext */) { + if (rawFinalizeData == nullptr) return; + + ThreadSafeFinalize* finalizeData = + static_cast(rawFinalizeData); + finalizeData->callback(Env(env), finalizeData->data); + delete finalizeData; + } + + static inline void FinalizeWrapperWithContext(napi_env env, + void* rawFinalizeData, + void* rawContext) { + if (rawFinalizeData == nullptr) return; + + ThreadSafeFinalize* finalizeData = + static_cast(rawFinalizeData); + finalizeData->callback(Env(env), static_cast(rawContext)); + delete finalizeData; + } + + static inline void FinalizeFinalizeWrapperWithDataAndContext( + napi_env env, void* rawFinalizeData, void* rawContext) { + if (rawFinalizeData == nullptr) return; + + ThreadSafeFinalize* finalizeData = + static_cast(rawFinalizeData); + finalizeData->callback( + Env(env), finalizeData->data, static_cast(rawContext)); + delete finalizeData; + } + + FinalizerDataType* data; + Finalizer callback; +}; + +template +inline typename std::enable_if(nullptr)>::type +CallJsWrapper(napi_env env, napi_value jsCallback, void* context, void* data) { + call(env, + Function(env, jsCallback), + static_cast(context), + static_cast(data)); +} + +template +inline typename std::enable_if(nullptr)>::type +CallJsWrapper(napi_env env, + napi_value jsCallback, + void* /*context*/, + void* /*data*/) { + if (jsCallback != nullptr) { + Function(env, jsCallback).Call(0, nullptr); + } +} + +#if NAPI_VERSION > 4 + +template +napi_value DefaultCallbackWrapper(napi_env /*env*/, std::nullptr_t /*cb*/) { + return nullptr; +} + +template +napi_value DefaultCallbackWrapper(napi_env /*env*/, Napi::Function cb) { + return cb; +} + +#else +template +napi_value DefaultCallbackWrapper(napi_env env, Napi::Function cb) { + if (cb.IsEmpty()) { + return TSFN::EmptyFunctionFactory(env); + } + return cb; +} +#endif // NAPI_VERSION > 4 +#endif // NAPI_VERSION > 3 && !defined(__wasm32__) + +template +struct AccessorCallbackData { + static inline napi_value GetterWrapper(napi_env env, + napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + AccessorCallbackData* callbackData = + static_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + return callbackData->getterCallback(callbackInfo); + }); + } + + static inline napi_value SetterWrapper(napi_env env, + napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + AccessorCallbackData* callbackData = + static_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + callbackData->setterCallback(callbackInfo); + return nullptr; + }); + } + + Getter getterCallback; + Setter setterCallback; + void* data; +}; + +} // namespace details + +#ifndef NODE_ADDON_API_DISABLE_DEPRECATED +#include "napi-inl.deprecated.h" +#endif // !NODE_ADDON_API_DISABLE_DEPRECATED + +//////////////////////////////////////////////////////////////////////////////// +// Module registration +//////////////////////////////////////////////////////////////////////////////// + +// Register an add-on based on an initializer function. +#define NODE_API_MODULE(modname, regfunc) \ + static napi_value __napi_##regfunc(napi_env env, napi_value exports) { \ + return Napi::RegisterModule(env, exports, regfunc); \ + } \ + NAPI_MODULE(modname, __napi_##regfunc) + +// Register an add-on based on a subclass of `Addon` with a custom Node.js +// module name. +#define NODE_API_NAMED_ADDON(modname, classname) \ + static napi_value __napi_##classname(napi_env env, napi_value exports) { \ + return Napi::RegisterModule(env, exports, &classname::Init); \ + } \ + NAPI_MODULE(modname, __napi_##classname) + +// Register an add-on based on a subclass of `Addon` with the Node.js module +// name given by node-gyp from the `target_name` in binding.gyp. +#define NODE_API_ADDON(classname) \ + NODE_API_NAMED_ADDON(NODE_GYP_MODULE_NAME, classname) + +// Adapt the NAPI_MODULE registration function: +// - Wrap the arguments in NAPI wrappers. +// - Catch any NAPI errors and rethrow as JS exceptions. +inline napi_value RegisterModule(napi_env env, + napi_value exports, + ModuleRegisterCallback registerCallback) { + return details::WrapCallback([&] { + return napi_value( + registerCallback(Napi::Env(env), Napi::Object(env, exports))); + }); +} + +//////////////////////////////////////////////////////////////////////////////// +// Maybe class +//////////////////////////////////////////////////////////////////////////////// + +template +bool Maybe::IsNothing() const { + return !_has_value; +} + +template +bool Maybe::IsJust() const { + return _has_value; +} + +template +void Maybe::Check() const { + NAPI_CHECK(IsJust(), "Napi::Maybe::Check", "Maybe value is Nothing."); +} + +template +T Maybe::Unwrap() const { + NAPI_CHECK(IsJust(), "Napi::Maybe::Unwrap", "Maybe value is Nothing."); + return _value; +} + +template +T Maybe::UnwrapOr(const T& default_value) const { + return _has_value ? _value : default_value; +} + +template +bool Maybe::UnwrapTo(T* out) const { + if (IsJust()) { + *out = _value; + return true; + }; + return false; +} + +template +bool Maybe::operator==(const Maybe& other) const { + return (IsJust() == other.IsJust()) && + (!IsJust() || Unwrap() == other.Unwrap()); +} + +template +bool Maybe::operator!=(const Maybe& other) const { + return !operator==(other); +} + +template +Maybe::Maybe() : _has_value(false) {} + +template +Maybe::Maybe(const T& t) : _has_value(true), _value(t) {} + +template +inline Maybe Nothing() { + return Maybe(); +} + +template +inline Maybe Just(const T& t) { + return Maybe(t); +} + +//////////////////////////////////////////////////////////////////////////////// +// Env class +//////////////////////////////////////////////////////////////////////////////// + +inline Env::Env(napi_env env) : _env(env) {} + +inline Env::operator napi_env() const { + return _env; +} + +inline Object Env::Global() const { + napi_value value; + napi_status status = napi_get_global(*this, &value); + NAPI_THROW_IF_FAILED(*this, status, Object()); + return Object(*this, value); +} + +inline Value Env::Undefined() const { + napi_value value; + napi_status status = napi_get_undefined(*this, &value); + NAPI_THROW_IF_FAILED(*this, status, Value()); + return Value(*this, value); +} + +inline Value Env::Null() const { + napi_value value; + napi_status status = napi_get_null(*this, &value); + NAPI_THROW_IF_FAILED(*this, status, Value()); + return Value(*this, value); +} + +inline bool Env::IsExceptionPending() const { + bool result; + napi_status status = napi_is_exception_pending(_env, &result); + if (status != napi_ok) + result = false; // Checking for a pending exception shouldn't throw. + return result; +} + +inline Error Env::GetAndClearPendingException() const { + napi_value value; + napi_status status = napi_get_and_clear_last_exception(_env, &value); + if (status != napi_ok) { + // Don't throw another exception when failing to get the exception! + return Error(); + } + return Error(_env, value); +} + +inline MaybeOrValue Env::RunScript(const char* utf8script) const { + String script = String::New(_env, utf8script); + return RunScript(script); +} + +inline MaybeOrValue Env::RunScript(const std::string& utf8script) const { + return RunScript(utf8script.c_str()); +} + +inline MaybeOrValue Env::RunScript(String script) const { + napi_value result; + napi_status status = napi_run_script(_env, script, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Value(_env, result), Napi::Value); +} + +#if NAPI_VERSION > 2 +template +void Env::CleanupHook::Wrapper(void* data) NAPI_NOEXCEPT { + auto* cleanupData = + static_cast::CleanupData*>( + data); + cleanupData->hook(); + delete cleanupData; +} + +template +void Env::CleanupHook::WrapperWithArg(void* data) NAPI_NOEXCEPT { + auto* cleanupData = + static_cast::CleanupData*>( + data); + cleanupData->hook(static_cast(cleanupData->arg)); + delete cleanupData; +} +#endif // NAPI_VERSION > 2 + +#if NAPI_VERSION > 5 +template fini> +inline void Env::SetInstanceData(T* data) const { + napi_status status = napi_set_instance_data( + _env, + data, + [](napi_env env, void* data, void*) { fini(env, static_cast(data)); }, + nullptr); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +template fini> +inline void Env::SetInstanceData(DataType* data, HintType* hint) const { + napi_status status = napi_set_instance_data( + _env, + data, + [](napi_env env, void* data, void* hint) { + fini(env, static_cast(data), static_cast(hint)); + }, + hint); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +template +inline T* Env::GetInstanceData() const { + void* data = nullptr; + + napi_status status = napi_get_instance_data(_env, &data); + NAPI_THROW_IF_FAILED(_env, status, nullptr); + + return static_cast(data); +} + +template +void Env::DefaultFini(Env, T* data) { + delete data; +} + +template +void Env::DefaultFiniWithHint(Env, DataType* data, HintType*) { + delete data; +} +#endif // NAPI_VERSION > 5 + +//////////////////////////////////////////////////////////////////////////////// +// Value class +//////////////////////////////////////////////////////////////////////////////// + +inline Value::Value() : _env(nullptr), _value(nullptr) {} + +inline Value::Value(napi_env env, napi_value value) + : _env(env), _value(value) {} + +inline Value::operator napi_value() const { + return _value; +} + +inline bool Value::operator==(const Value& other) const { + return StrictEquals(other); +} + +inline bool Value::operator!=(const Value& other) const { + return !this->operator==(other); +} + +inline bool Value::StrictEquals(const Value& other) const { + bool result; + napi_status status = napi_strict_equals(_env, *this, other, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline Napi::Env Value::Env() const { + return Napi::Env(_env); +} + +inline bool Value::IsEmpty() const { + return _value == nullptr; +} + +inline napi_valuetype Value::Type() const { + if (IsEmpty()) { + return napi_undefined; + } + + napi_valuetype type; + napi_status status = napi_typeof(_env, _value, &type); + NAPI_THROW_IF_FAILED(_env, status, napi_undefined); + return type; +} + +inline bool Value::IsUndefined() const { + return Type() == napi_undefined; +} + +inline bool Value::IsNull() const { + return Type() == napi_null; +} + +inline bool Value::IsBoolean() const { + return Type() == napi_boolean; +} + +inline bool Value::IsNumber() const { + return Type() == napi_number; +} + +#if NAPI_VERSION > 5 +inline bool Value::IsBigInt() const { + return Type() == napi_bigint; +} +#endif // NAPI_VERSION > 5 + +#if (NAPI_VERSION > 4) +inline bool Value::IsDate() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_date(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} +#endif + +inline bool Value::IsString() const { + return Type() == napi_string; +} + +inline bool Value::IsSymbol() const { + return Type() == napi_symbol; +} + +inline bool Value::IsArray() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_array(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline bool Value::IsArrayBuffer() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_arraybuffer(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline bool Value::IsTypedArray() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_typedarray(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline bool Value::IsObject() const { + return Type() == napi_object || IsFunction(); +} + +inline bool Value::IsFunction() const { + return Type() == napi_function; +} + +inline bool Value::IsPromise() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_promise(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline bool Value::IsDataView() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_dataview(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline bool Value::IsBuffer() const { + if (IsEmpty()) { + return false; + } + + bool result; + napi_status status = napi_is_buffer(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +inline bool Value::IsExternal() const { + return Type() == napi_external; +} + +template +inline T Value::As() const { + return T(_env, _value); +} + +inline MaybeOrValue Value::ToBoolean() const { + napi_value result; + napi_status status = napi_coerce_to_bool(_env, _value, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Boolean(_env, result), Napi::Boolean); +} + +inline MaybeOrValue Value::ToNumber() const { + napi_value result; + napi_status status = napi_coerce_to_number(_env, _value, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Number(_env, result), Napi::Number); +} + +inline MaybeOrValue Value::ToString() const { + napi_value result; + napi_status status = napi_coerce_to_string(_env, _value, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::String(_env, result), Napi::String); +} + +inline MaybeOrValue Value::ToObject() const { + napi_value result; + napi_status status = napi_coerce_to_object(_env, _value, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Object(_env, result), Napi::Object); +} + +//////////////////////////////////////////////////////////////////////////////// +// Boolean class +//////////////////////////////////////////////////////////////////////////////// + +inline Boolean Boolean::New(napi_env env, bool val) { + napi_value value; + napi_status status = napi_get_boolean(env, val, &value); + NAPI_THROW_IF_FAILED(env, status, Boolean()); + return Boolean(env, value); +} + +inline Boolean::Boolean() : Napi::Value() {} + +inline Boolean::Boolean(napi_env env, napi_value value) + : Napi::Value(env, value) {} + +inline Boolean::operator bool() const { + return Value(); +} + +inline bool Boolean::Value() const { + bool result; + napi_status status = napi_get_value_bool(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, false); + return result; +} + +//////////////////////////////////////////////////////////////////////////////// +// Number class +//////////////////////////////////////////////////////////////////////////////// + +inline Number Number::New(napi_env env, double val) { + napi_value value; + napi_status status = napi_create_double(env, val, &value); + NAPI_THROW_IF_FAILED(env, status, Number()); + return Number(env, value); +} + +inline Number::Number() : Value() {} + +inline Number::Number(napi_env env, napi_value value) : Value(env, value) {} + +inline Number::operator int32_t() const { + return Int32Value(); +} + +inline Number::operator uint32_t() const { + return Uint32Value(); +} + +inline Number::operator int64_t() const { + return Int64Value(); +} + +inline Number::operator float() const { + return FloatValue(); +} + +inline Number::operator double() const { + return DoubleValue(); +} + +inline int32_t Number::Int32Value() const { + int32_t result; + napi_status status = napi_get_value_int32(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +inline uint32_t Number::Uint32Value() const { + uint32_t result; + napi_status status = napi_get_value_uint32(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +inline int64_t Number::Int64Value() const { + int64_t result; + napi_status status = napi_get_value_int64(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +inline float Number::FloatValue() const { + return static_cast(DoubleValue()); +} + +inline double Number::DoubleValue() const { + double result; + napi_status status = napi_get_value_double(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +#if NAPI_VERSION > 5 +//////////////////////////////////////////////////////////////////////////////// +// BigInt Class +//////////////////////////////////////////////////////////////////////////////// + +inline BigInt BigInt::New(napi_env env, int64_t val) { + napi_value value; + napi_status status = napi_create_bigint_int64(env, val, &value); + NAPI_THROW_IF_FAILED(env, status, BigInt()); + return BigInt(env, value); +} + +inline BigInt BigInt::New(napi_env env, uint64_t val) { + napi_value value; + napi_status status = napi_create_bigint_uint64(env, val, &value); + NAPI_THROW_IF_FAILED(env, status, BigInt()); + return BigInt(env, value); +} + +inline BigInt BigInt::New(napi_env env, + int sign_bit, + size_t word_count, + const uint64_t* words) { + napi_value value; + napi_status status = + napi_create_bigint_words(env, sign_bit, word_count, words, &value); + NAPI_THROW_IF_FAILED(env, status, BigInt()); + return BigInt(env, value); +} + +inline BigInt::BigInt() : Value() {} + +inline BigInt::BigInt(napi_env env, napi_value value) : Value(env, value) {} + +inline int64_t BigInt::Int64Value(bool* lossless) const { + int64_t result; + napi_status status = + napi_get_value_bigint_int64(_env, _value, &result, lossless); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +inline uint64_t BigInt::Uint64Value(bool* lossless) const { + uint64_t result; + napi_status status = + napi_get_value_bigint_uint64(_env, _value, &result, lossless); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +inline size_t BigInt::WordCount() const { + size_t word_count; + napi_status status = + napi_get_value_bigint_words(_env, _value, nullptr, &word_count, nullptr); + NAPI_THROW_IF_FAILED(_env, status, 0); + return word_count; +} + +inline void BigInt::ToWords(int* sign_bit, + size_t* word_count, + uint64_t* words) { + napi_status status = + napi_get_value_bigint_words(_env, _value, sign_bit, word_count, words); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} +#endif // NAPI_VERSION > 5 + +#if (NAPI_VERSION > 4) +//////////////////////////////////////////////////////////////////////////////// +// Date Class +//////////////////////////////////////////////////////////////////////////////// + +inline Date Date::New(napi_env env, double val) { + napi_value value; + napi_status status = napi_create_date(env, val, &value); + NAPI_THROW_IF_FAILED(env, status, Date()); + return Date(env, value); +} + +inline Date::Date() : Value() {} + +inline Date::Date(napi_env env, napi_value value) : Value(env, value) {} + +inline Date::operator double() const { + return ValueOf(); +} + +inline double Date::ValueOf() const { + double result; + napi_status status = napi_get_date_value(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} +#endif + +//////////////////////////////////////////////////////////////////////////////// +// Name class +//////////////////////////////////////////////////////////////////////////////// + +inline Name::Name() : Value() {} + +inline Name::Name(napi_env env, napi_value value) : Value(env, value) {} + +//////////////////////////////////////////////////////////////////////////////// +// String class +//////////////////////////////////////////////////////////////////////////////// + +inline String String::New(napi_env env, const std::string& val) { + return String::New(env, val.c_str(), val.size()); +} + +inline String String::New(napi_env env, const std::u16string& val) { + return String::New(env, val.c_str(), val.size()); +} + +inline String String::New(napi_env env, const char* val) { + // TODO(@gabrielschulhof) Remove if-statement when core's error handling is + // available in all supported versions. + if (val == nullptr) { + // Throw an error that looks like it came from core. + NAPI_THROW_IF_FAILED(env, napi_invalid_arg, String()); + } + napi_value value; + napi_status status = + napi_create_string_utf8(env, val, std::strlen(val), &value); + NAPI_THROW_IF_FAILED(env, status, String()); + return String(env, value); +} + +inline String String::New(napi_env env, const char16_t* val) { + napi_value value; + // TODO(@gabrielschulhof) Remove if-statement when core's error handling is + // available in all supported versions. + if (val == nullptr) { + // Throw an error that looks like it came from core. + NAPI_THROW_IF_FAILED(env, napi_invalid_arg, String()); + } + napi_status status = + napi_create_string_utf16(env, val, std::u16string(val).size(), &value); + NAPI_THROW_IF_FAILED(env, status, String()); + return String(env, value); +} + +inline String String::New(napi_env env, const char* val, size_t length) { + napi_value value; + napi_status status = napi_create_string_utf8(env, val, length, &value); + NAPI_THROW_IF_FAILED(env, status, String()); + return String(env, value); +} + +inline String String::New(napi_env env, const char16_t* val, size_t length) { + napi_value value; + napi_status status = napi_create_string_utf16(env, val, length, &value); + NAPI_THROW_IF_FAILED(env, status, String()); + return String(env, value); +} + +inline String::String() : Name() {} + +inline String::String(napi_env env, napi_value value) : Name(env, value) {} + +inline String::operator std::string() const { + return Utf8Value(); +} + +inline String::operator std::u16string() const { + return Utf16Value(); +} + +inline std::string String::Utf8Value() const { + size_t length; + napi_status status = + napi_get_value_string_utf8(_env, _value, nullptr, 0, &length); + NAPI_THROW_IF_FAILED(_env, status, ""); + + std::string value; + value.reserve(length + 1); + value.resize(length); + status = napi_get_value_string_utf8( + _env, _value, &value[0], value.capacity(), nullptr); + NAPI_THROW_IF_FAILED(_env, status, ""); + return value; +} + +inline std::u16string String::Utf16Value() const { + size_t length; + napi_status status = + napi_get_value_string_utf16(_env, _value, nullptr, 0, &length); + NAPI_THROW_IF_FAILED(_env, status, NAPI_WIDE_TEXT("")); + + std::u16string value; + value.reserve(length + 1); + value.resize(length); + status = napi_get_value_string_utf16( + _env, _value, &value[0], value.capacity(), nullptr); + NAPI_THROW_IF_FAILED(_env, status, NAPI_WIDE_TEXT("")); + return value; +} + +//////////////////////////////////////////////////////////////////////////////// +// Symbol class +//////////////////////////////////////////////////////////////////////////////// + +inline Symbol Symbol::New(napi_env env, const char* description) { + napi_value descriptionValue = description != nullptr + ? String::New(env, description) + : static_cast(nullptr); + return Symbol::New(env, descriptionValue); +} + +inline Symbol Symbol::New(napi_env env, const std::string& description) { + napi_value descriptionValue = String::New(env, description); + return Symbol::New(env, descriptionValue); +} + +inline Symbol Symbol::New(napi_env env, String description) { + napi_value descriptionValue = description; + return Symbol::New(env, descriptionValue); +} + +inline Symbol Symbol::New(napi_env env, napi_value description) { + napi_value value; + napi_status status = napi_create_symbol(env, description, &value); + NAPI_THROW_IF_FAILED(env, status, Symbol()); + return Symbol(env, value); +} + +inline MaybeOrValue Symbol::WellKnown(napi_env env, + const std::string& name) { +#if defined(NODE_ADDON_API_ENABLE_MAYBE) + Value symbol_obj; + Value symbol_value; + if (Napi::Env(env).Global().Get("Symbol").UnwrapTo(&symbol_obj) && + symbol_obj.As().Get(name).UnwrapTo(&symbol_value)) { + return Just(symbol_value.As()); + } + return Nothing(); +#else + return Napi::Env(env) + .Global() + .Get("Symbol") + .As() + .Get(name) + .As(); +#endif +} + +inline MaybeOrValue Symbol::For(napi_env env, + const std::string& description) { + napi_value descriptionValue = String::New(env, description); + return Symbol::For(env, descriptionValue); +} + +inline MaybeOrValue Symbol::For(napi_env env, const char* description) { + napi_value descriptionValue = String::New(env, description); + return Symbol::For(env, descriptionValue); +} + +inline MaybeOrValue Symbol::For(napi_env env, String description) { + return Symbol::For(env, static_cast(description)); +} + +inline MaybeOrValue Symbol::For(napi_env env, napi_value description) { +#if defined(NODE_ADDON_API_ENABLE_MAYBE) + Value symbol_obj; + Value symbol_for_value; + Value symbol_value; + if (Napi::Env(env).Global().Get("Symbol").UnwrapTo(&symbol_obj) && + symbol_obj.As().Get("for").UnwrapTo(&symbol_for_value) && + symbol_for_value.As() + .Call(symbol_obj, {description}) + .UnwrapTo(&symbol_value)) { + return Just(symbol_value.As()); + } + return Nothing(); +#else + Object symbol_obj = Napi::Env(env).Global().Get("Symbol").As(); + return symbol_obj.Get("for") + .As() + .Call(symbol_obj, {description}) + .As(); +#endif +} + +inline Symbol::Symbol() : Name() {} + +inline Symbol::Symbol(napi_env env, napi_value value) : Name(env, value) {} + +//////////////////////////////////////////////////////////////////////////////// +// Automagic value creation +//////////////////////////////////////////////////////////////////////////////// + +namespace details { +template +struct vf_number { + static Number From(napi_env env, T value) { + return Number::New(env, static_cast(value)); + } +}; + +template <> +struct vf_number { + static Boolean From(napi_env env, bool value) { + return Boolean::New(env, value); + } +}; + +struct vf_utf8_charp { + static String From(napi_env env, const char* value) { + return String::New(env, value); + } +}; + +struct vf_utf16_charp { + static String From(napi_env env, const char16_t* value) { + return String::New(env, value); + } +}; +struct vf_utf8_string { + static String From(napi_env env, const std::string& value) { + return String::New(env, value); + } +}; + +struct vf_utf16_string { + static String From(napi_env env, const std::u16string& value) { + return String::New(env, value); + } +}; + +template +struct vf_fallback { + static Value From(napi_env env, const T& value) { return Value(env, value); } +}; + +template +struct disjunction : std::false_type {}; +template +struct disjunction : B {}; +template +struct disjunction + : std::conditional>::type {}; + +template +struct can_make_string + : disjunction::type, + typename std::is_convertible::type, + typename std::is_convertible::type, + typename std::is_convertible::type> {}; +} // namespace details + +template +Value Value::From(napi_env env, const T& value) { + using Helper = typename std::conditional< + std::is_integral::value || std::is_floating_point::value, + details::vf_number, + typename std::conditional::value, + String, + details::vf_fallback>::type>::type; + return Helper::From(env, value); +} + +template +String String::From(napi_env env, const T& value) { + struct Dummy {}; + using Helper = typename std::conditional< + std::is_convertible::value, + details::vf_utf8_charp, + typename std::conditional< + std::is_convertible::value, + details::vf_utf16_charp, + typename std::conditional< + std::is_convertible::value, + details::vf_utf8_string, + typename std::conditional< + std::is_convertible::value, + details::vf_utf16_string, + Dummy>::type>::type>::type>::type; + return Helper::From(env, value); +} + +//////////////////////////////////////////////////////////////////////////////// +// Object class +//////////////////////////////////////////////////////////////////////////////// + +template +inline Object::PropertyLValue::operator Value() const { + MaybeOrValue val = Object(_env, _object).Get(_key); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + return val.Unwrap(); +#else + return val; +#endif +} + +template +template +inline Object::PropertyLValue& Object::PropertyLValue::operator=( + ValueType value) { +#ifdef NODE_ADDON_API_ENABLE_MAYBE + MaybeOrValue result = +#endif + Object(_env, _object).Set(_key, value); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + result.Unwrap(); +#endif + return *this; +} + +template +inline Object::PropertyLValue::PropertyLValue(Object object, Key key) + : _env(object.Env()), _object(object), _key(key) {} + +inline Object Object::New(napi_env env) { + napi_value value; + napi_status status = napi_create_object(env, &value); + NAPI_THROW_IF_FAILED(env, status, Object()); + return Object(env, value); +} + +inline Object::Object() : Value() {} + +inline Object::Object(napi_env env, napi_value value) : Value(env, value) {} + +inline Object::PropertyLValue Object::operator[]( + const char* utf8name) { + return PropertyLValue(*this, utf8name); +} + +inline Object::PropertyLValue Object::operator[]( + const std::string& utf8name) { + return PropertyLValue(*this, utf8name); +} + +inline Object::PropertyLValue Object::operator[](uint32_t index) { + return PropertyLValue(*this, index); +} + +inline Object::PropertyLValue Object::operator[](Value index) const { + return PropertyLValue(*this, index); +} + +inline MaybeOrValue Object::operator[](const char* utf8name) const { + return Get(utf8name); +} + +inline MaybeOrValue Object::operator[]( + const std::string& utf8name) const { + return Get(utf8name); +} + +inline MaybeOrValue Object::operator[](uint32_t index) const { + return Get(index); +} + +inline MaybeOrValue Object::Has(napi_value key) const { + bool result; + napi_status status = napi_has_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::Has(Value key) const { + bool result; + napi_status status = napi_has_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::Has(const char* utf8name) const { + bool result; + napi_status status = napi_has_named_property(_env, _value, utf8name, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::Has(const std::string& utf8name) const { + return Has(utf8name.c_str()); +} + +inline MaybeOrValue Object::HasOwnProperty(napi_value key) const { + bool result; + napi_status status = napi_has_own_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::HasOwnProperty(Value key) const { + bool result; + napi_status status = napi_has_own_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::HasOwnProperty(const char* utf8name) const { + napi_value key; + napi_status status = + napi_create_string_utf8(_env, utf8name, std::strlen(utf8name), &key); + NAPI_MAYBE_THROW_IF_FAILED(_env, status, bool); + return HasOwnProperty(key); +} + +inline MaybeOrValue Object::HasOwnProperty( + const std::string& utf8name) const { + return HasOwnProperty(utf8name.c_str()); +} + +inline MaybeOrValue Object::Get(napi_value key) const { + napi_value result; + napi_status status = napi_get_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Value(_env, result), Value); +} + +inline MaybeOrValue Object::Get(Value key) const { + napi_value result; + napi_status status = napi_get_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Value(_env, result), Value); +} + +inline MaybeOrValue Object::Get(const char* utf8name) const { + napi_value result; + napi_status status = napi_get_named_property(_env, _value, utf8name, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Value(_env, result), Value); +} + +inline MaybeOrValue Object::Get(const std::string& utf8name) const { + return Get(utf8name.c_str()); +} + +template +inline MaybeOrValue Object::Set(napi_value key, + const ValueType& value) const { + napi_status status = + napi_set_property(_env, _value, key, Value::From(_env, value)); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +template +inline MaybeOrValue Object::Set(Value key, const ValueType& value) const { + napi_status status = + napi_set_property(_env, _value, key, Value::From(_env, value)); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +template +inline MaybeOrValue Object::Set(const char* utf8name, + const ValueType& value) const { + napi_status status = + napi_set_named_property(_env, _value, utf8name, Value::From(_env, value)); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +template +inline MaybeOrValue Object::Set(const std::string& utf8name, + const ValueType& value) const { + return Set(utf8name.c_str(), value); +} + +inline MaybeOrValue Object::Delete(napi_value key) const { + bool result; + napi_status status = napi_delete_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::Delete(Value key) const { + bool result; + napi_status status = napi_delete_property(_env, _value, key, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::Delete(const char* utf8name) const { + return Delete(String::New(_env, utf8name)); +} + +inline MaybeOrValue Object::Delete(const std::string& utf8name) const { + return Delete(String::New(_env, utf8name)); +} + +inline MaybeOrValue Object::Has(uint32_t index) const { + bool result; + napi_status status = napi_has_element(_env, _value, index, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::Get(uint32_t index) const { + napi_value value; + napi_status status = napi_get_element(_env, _value, index, &value); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Value(_env, value), Value); +} + +template +inline MaybeOrValue Object::Set(uint32_t index, + const ValueType& value) const { + napi_status status = + napi_set_element(_env, _value, index, Value::From(_env, value)); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +inline MaybeOrValue Object::Delete(uint32_t index) const { + bool result; + napi_status status = napi_delete_element(_env, _value, index, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +inline MaybeOrValue Object::GetPropertyNames() const { + napi_value result; + napi_status status = napi_get_property_names(_env, _value, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, Array(_env, result), Array); +} + +inline MaybeOrValue Object::DefineProperty( + const PropertyDescriptor& property) const { + napi_status status = napi_define_properties( + _env, + _value, + 1, + reinterpret_cast(&property)); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +inline MaybeOrValue Object::DefineProperties( + const std::initializer_list& properties) const { + napi_status status = napi_define_properties( + _env, + _value, + properties.size(), + reinterpret_cast(properties.begin())); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +inline MaybeOrValue Object::DefineProperties( + const std::vector& properties) const { + napi_status status = napi_define_properties( + _env, + _value, + properties.size(), + reinterpret_cast(properties.data())); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +inline MaybeOrValue Object::InstanceOf( + const Function& constructor) const { + bool result; + napi_status status = napi_instanceof(_env, _value, constructor, &result); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, result, bool); +} + +template +inline void Object::AddFinalizer(Finalizer finalizeCallback, T* data) const { + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), nullptr}); + napi_status status = + details::AttachData(_env, + *this, + data, + details::FinalizeData::Wrapper, + finalizeData); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED_VOID(_env, status); + } +} + +template +inline void Object::AddFinalizer(Finalizer finalizeCallback, + T* data, + Hint* finalizeHint) const { + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), finalizeHint}); + napi_status status = details::AttachData( + _env, + *this, + data, + details::FinalizeData::WrapperWithHint, + finalizeData); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED_VOID(_env, status); + } +} + +#ifdef NAPI_CPP_EXCEPTIONS +inline Object::const_iterator::const_iterator(const Object* object, + const Type type) { + _object = object; + _keys = object->GetPropertyNames(); + _index = type == Type::BEGIN ? 0 : _keys.Length(); +} + +inline Object::const_iterator Napi::Object::begin() const { + const_iterator it(this, Object::const_iterator::Type::BEGIN); + return it; +} + +inline Object::const_iterator Napi::Object::end() const { + const_iterator it(this, Object::const_iterator::Type::END); + return it; +} + +inline Object::const_iterator& Object::const_iterator::operator++() { + ++_index; + return *this; +} + +inline bool Object::const_iterator::operator==( + const const_iterator& other) const { + return _index == other._index; +} + +inline bool Object::const_iterator::operator!=( + const const_iterator& other) const { + return _index != other._index; +} + +inline const std::pair> +Object::const_iterator::operator*() const { + const Value key = _keys[_index]; + const PropertyLValue value = (*_object)[key]; + return {key, value}; +} + +inline Object::iterator::iterator(Object* object, const Type type) { + _object = object; + _keys = object->GetPropertyNames(); + _index = type == Type::BEGIN ? 0 : _keys.Length(); +} + +inline Object::iterator Napi::Object::begin() { + iterator it(this, Object::iterator::Type::BEGIN); + return it; +} + +inline Object::iterator Napi::Object::end() { + iterator it(this, Object::iterator::Type::END); + return it; +} + +inline Object::iterator& Object::iterator::operator++() { + ++_index; + return *this; +} + +inline bool Object::iterator::operator==(const iterator& other) const { + return _index == other._index; +} + +inline bool Object::iterator::operator!=(const iterator& other) const { + return _index != other._index; +} + +inline std::pair> +Object::iterator::operator*() { + Value key = _keys[_index]; + PropertyLValue value = (*_object)[key]; + return {key, value}; +} +#endif // NAPI_CPP_EXCEPTIONS + +#if NAPI_VERSION >= 8 +inline MaybeOrValue Object::Freeze() const { + napi_status status = napi_object_freeze(_env, _value); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} + +inline MaybeOrValue Object::Seal() const { + napi_status status = napi_object_seal(_env, _value); + NAPI_RETURN_OR_THROW_IF_FAILED(_env, status, status == napi_ok, bool); +} +#endif // NAPI_VERSION >= 8 + +//////////////////////////////////////////////////////////////////////////////// +// External class +//////////////////////////////////////////////////////////////////////////////// + +template +inline External External::New(napi_env env, T* data) { + napi_value value; + napi_status status = + napi_create_external(env, data, nullptr, nullptr, &value); + NAPI_THROW_IF_FAILED(env, status, External()); + return External(env, value); +} + +template +template +inline External External::New(napi_env env, + T* data, + Finalizer finalizeCallback) { + napi_value value; + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), nullptr}); + napi_status status = + napi_create_external(env, + data, + details::FinalizeData::Wrapper, + finalizeData, + &value); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, External()); + } + return External(env, value); +} + +template +template +inline External External::New(napi_env env, + T* data, + Finalizer finalizeCallback, + Hint* finalizeHint) { + napi_value value; + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), finalizeHint}); + napi_status status = napi_create_external( + env, + data, + details::FinalizeData::WrapperWithHint, + finalizeData, + &value); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, External()); + } + return External(env, value); +} + +template +inline External::External() : Value() {} + +template +inline External::External(napi_env env, napi_value value) + : Value(env, value) {} + +template +inline T* External::Data() const { + void* data; + napi_status status = napi_get_value_external(_env, _value, &data); + NAPI_THROW_IF_FAILED(_env, status, nullptr); + return reinterpret_cast(data); +} + +//////////////////////////////////////////////////////////////////////////////// +// Array class +//////////////////////////////////////////////////////////////////////////////// + +inline Array Array::New(napi_env env) { + napi_value value; + napi_status status = napi_create_array(env, &value); + NAPI_THROW_IF_FAILED(env, status, Array()); + return Array(env, value); +} + +inline Array Array::New(napi_env env, size_t length) { + napi_value value; + napi_status status = napi_create_array_with_length(env, length, &value); + NAPI_THROW_IF_FAILED(env, status, Array()); + return Array(env, value); +} + +inline Array::Array() : Object() {} + +inline Array::Array(napi_env env, napi_value value) : Object(env, value) {} + +inline uint32_t Array::Length() const { + uint32_t result; + napi_status status = napi_get_array_length(_env, _value, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +//////////////////////////////////////////////////////////////////////////////// +// ArrayBuffer class +//////////////////////////////////////////////////////////////////////////////// + +inline ArrayBuffer ArrayBuffer::New(napi_env env, size_t byteLength) { + napi_value value; + void* data; + napi_status status = napi_create_arraybuffer(env, byteLength, &data, &value); + NAPI_THROW_IF_FAILED(env, status, ArrayBuffer()); + + return ArrayBuffer(env, value); +} + +inline ArrayBuffer ArrayBuffer::New(napi_env env, + void* externalData, + size_t byteLength) { + napi_value value; + napi_status status = napi_create_external_arraybuffer( + env, externalData, byteLength, nullptr, nullptr, &value); + NAPI_THROW_IF_FAILED(env, status, ArrayBuffer()); + + return ArrayBuffer(env, value); +} + +template +inline ArrayBuffer ArrayBuffer::New(napi_env env, + void* externalData, + size_t byteLength, + Finalizer finalizeCallback) { + napi_value value; + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), nullptr}); + napi_status status = napi_create_external_arraybuffer( + env, + externalData, + byteLength, + details::FinalizeData::Wrapper, + finalizeData, + &value); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, ArrayBuffer()); + } + + return ArrayBuffer(env, value); +} + +template +inline ArrayBuffer ArrayBuffer::New(napi_env env, + void* externalData, + size_t byteLength, + Finalizer finalizeCallback, + Hint* finalizeHint) { + napi_value value; + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), finalizeHint}); + napi_status status = napi_create_external_arraybuffer( + env, + externalData, + byteLength, + details::FinalizeData::WrapperWithHint, + finalizeData, + &value); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, ArrayBuffer()); + } + + return ArrayBuffer(env, value); +} + +inline ArrayBuffer::ArrayBuffer() : Object() {} + +inline ArrayBuffer::ArrayBuffer(napi_env env, napi_value value) + : Object(env, value) {} + +inline void* ArrayBuffer::Data() { + void* data; + napi_status status = napi_get_arraybuffer_info(_env, _value, &data, nullptr); + NAPI_THROW_IF_FAILED(_env, status, nullptr); + return data; +} + +inline size_t ArrayBuffer::ByteLength() { + size_t length; + napi_status status = + napi_get_arraybuffer_info(_env, _value, nullptr, &length); + NAPI_THROW_IF_FAILED(_env, status, 0); + return length; +} + +#if NAPI_VERSION >= 7 +inline bool ArrayBuffer::IsDetached() const { + bool detached; + napi_status status = napi_is_detached_arraybuffer(_env, _value, &detached); + NAPI_THROW_IF_FAILED(_env, status, false); + return detached; +} + +inline void ArrayBuffer::Detach() { + napi_status status = napi_detach_arraybuffer(_env, _value); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} +#endif // NAPI_VERSION >= 7 + +//////////////////////////////////////////////////////////////////////////////// +// DataView class +//////////////////////////////////////////////////////////////////////////////// +inline DataView DataView::New(napi_env env, Napi::ArrayBuffer arrayBuffer) { + return New(env, arrayBuffer, 0, arrayBuffer.ByteLength()); +} + +inline DataView DataView::New(napi_env env, + Napi::ArrayBuffer arrayBuffer, + size_t byteOffset) { + if (byteOffset > arrayBuffer.ByteLength()) { + NAPI_THROW(RangeError::New( + env, "Start offset is outside the bounds of the buffer"), + DataView()); + } + return New( + env, arrayBuffer, byteOffset, arrayBuffer.ByteLength() - byteOffset); +} + +inline DataView DataView::New(napi_env env, + Napi::ArrayBuffer arrayBuffer, + size_t byteOffset, + size_t byteLength) { + if (byteOffset + byteLength > arrayBuffer.ByteLength()) { + NAPI_THROW(RangeError::New(env, "Invalid DataView length"), DataView()); + } + napi_value value; + napi_status status = + napi_create_dataview(env, byteLength, arrayBuffer, byteOffset, &value); + NAPI_THROW_IF_FAILED(env, status, DataView()); + return DataView(env, value); +} + +inline DataView::DataView() : Object() {} + +inline DataView::DataView(napi_env env, napi_value value) : Object(env, value) { + napi_status status = napi_get_dataview_info(_env, + _value /* dataView */, + &_length /* byteLength */, + &_data /* data */, + nullptr /* arrayBuffer */, + nullptr /* byteOffset */); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline Napi::ArrayBuffer DataView::ArrayBuffer() const { + napi_value arrayBuffer; + napi_status status = napi_get_dataview_info(_env, + _value /* dataView */, + nullptr /* byteLength */, + nullptr /* data */, + &arrayBuffer /* arrayBuffer */, + nullptr /* byteOffset */); + NAPI_THROW_IF_FAILED(_env, status, Napi::ArrayBuffer()); + return Napi::ArrayBuffer(_env, arrayBuffer); +} + +inline size_t DataView::ByteOffset() const { + size_t byteOffset; + napi_status status = napi_get_dataview_info(_env, + _value /* dataView */, + nullptr /* byteLength */, + nullptr /* data */, + nullptr /* arrayBuffer */, + &byteOffset /* byteOffset */); + NAPI_THROW_IF_FAILED(_env, status, 0); + return byteOffset; +} + +inline size_t DataView::ByteLength() const { + return _length; +} + +inline void* DataView::Data() const { + return _data; +} + +inline float DataView::GetFloat32(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline double DataView::GetFloat64(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline int8_t DataView::GetInt8(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline int16_t DataView::GetInt16(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline int32_t DataView::GetInt32(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline uint8_t DataView::GetUint8(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline uint16_t DataView::GetUint16(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline uint32_t DataView::GetUint32(size_t byteOffset) const { + return ReadData(byteOffset); +} + +inline void DataView::SetFloat32(size_t byteOffset, float value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetFloat64(size_t byteOffset, double value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetInt8(size_t byteOffset, int8_t value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetInt16(size_t byteOffset, int16_t value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetInt32(size_t byteOffset, int32_t value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetUint8(size_t byteOffset, uint8_t value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetUint16(size_t byteOffset, uint16_t value) const { + WriteData(byteOffset, value); +} + +inline void DataView::SetUint32(size_t byteOffset, uint32_t value) const { + WriteData(byteOffset, value); +} + +template +inline T DataView::ReadData(size_t byteOffset) const { + if (byteOffset + sizeof(T) > _length || + byteOffset + sizeof(T) < byteOffset) { // overflow + NAPI_THROW( + RangeError::New(_env, "Offset is outside the bounds of the DataView"), + 0); + } + + return *reinterpret_cast(static_cast(_data) + byteOffset); +} + +template +inline void DataView::WriteData(size_t byteOffset, T value) const { + if (byteOffset + sizeof(T) > _length || + byteOffset + sizeof(T) < byteOffset) { // overflow + NAPI_THROW_VOID( + RangeError::New(_env, "Offset is outside the bounds of the DataView")); + } + + *reinterpret_cast(static_cast(_data) + byteOffset) = value; +} + +//////////////////////////////////////////////////////////////////////////////// +// TypedArray class +//////////////////////////////////////////////////////////////////////////////// + +inline TypedArray::TypedArray() + : Object(), _type(napi_typedarray_type::napi_int8_array), _length(0) {} + +inline TypedArray::TypedArray(napi_env env, napi_value value) + : Object(env, value), + _type(napi_typedarray_type::napi_int8_array), + _length(0) { + if (value != nullptr) { + napi_status status = + napi_get_typedarray_info(_env, + _value, + &const_cast(this)->_type, + &const_cast(this)->_length, + nullptr, + nullptr, + nullptr); + NAPI_THROW_IF_FAILED_VOID(_env, status); + } +} + +inline TypedArray::TypedArray(napi_env env, + napi_value value, + napi_typedarray_type type, + size_t length) + : Object(env, value), _type(type), _length(length) {} + +inline napi_typedarray_type TypedArray::TypedArrayType() const { + return _type; +} + +inline uint8_t TypedArray::ElementSize() const { + switch (_type) { + case napi_int8_array: + case napi_uint8_array: + case napi_uint8_clamped_array: + return 1; + case napi_int16_array: + case napi_uint16_array: + return 2; + case napi_int32_array: + case napi_uint32_array: + case napi_float32_array: + return 4; + case napi_float64_array: +#if (NAPI_VERSION > 5) + case napi_bigint64_array: + case napi_biguint64_array: +#endif // (NAPI_VERSION > 5) + return 8; + default: + return 0; + } +} + +inline size_t TypedArray::ElementLength() const { + return _length; +} + +inline size_t TypedArray::ByteOffset() const { + size_t byteOffset; + napi_status status = napi_get_typedarray_info( + _env, _value, nullptr, nullptr, nullptr, nullptr, &byteOffset); + NAPI_THROW_IF_FAILED(_env, status, 0); + return byteOffset; +} + +inline size_t TypedArray::ByteLength() const { + return ElementSize() * ElementLength(); +} + +inline Napi::ArrayBuffer TypedArray::ArrayBuffer() const { + napi_value arrayBuffer; + napi_status status = napi_get_typedarray_info( + _env, _value, nullptr, nullptr, nullptr, &arrayBuffer, nullptr); + NAPI_THROW_IF_FAILED(_env, status, Napi::ArrayBuffer()); + return Napi::ArrayBuffer(_env, arrayBuffer); +} + +//////////////////////////////////////////////////////////////////////////////// +// TypedArrayOf class +//////////////////////////////////////////////////////////////////////////////// + +template +inline TypedArrayOf TypedArrayOf::New(napi_env env, + size_t elementLength, + napi_typedarray_type type) { + Napi::ArrayBuffer arrayBuffer = + Napi::ArrayBuffer::New(env, elementLength * sizeof(T)); + return New(env, elementLength, arrayBuffer, 0, type); +} + +template +inline TypedArrayOf TypedArrayOf::New(napi_env env, + size_t elementLength, + Napi::ArrayBuffer arrayBuffer, + size_t bufferOffset, + napi_typedarray_type type) { + napi_value value; + napi_status status = napi_create_typedarray( + env, type, elementLength, arrayBuffer, bufferOffset, &value); + NAPI_THROW_IF_FAILED(env, status, TypedArrayOf()); + + return TypedArrayOf( + env, + value, + type, + elementLength, + reinterpret_cast(reinterpret_cast(arrayBuffer.Data()) + + bufferOffset)); +} + +template +inline TypedArrayOf::TypedArrayOf() : TypedArray(), _data(nullptr) {} + +template +inline TypedArrayOf::TypedArrayOf(napi_env env, napi_value value) + : TypedArray(env, value), _data(nullptr) { + napi_status status = napi_ok; + if (value != nullptr) { + void* data = nullptr; + status = napi_get_typedarray_info( + _env, _value, &_type, &_length, &data, nullptr, nullptr); + _data = static_cast(data); + } else { + _type = TypedArrayTypeForPrimitiveType(); + _length = 0; + } + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +template +inline TypedArrayOf::TypedArrayOf(napi_env env, + napi_value value, + napi_typedarray_type type, + size_t length, + T* data) + : TypedArray(env, value, type, length), _data(data) { + if (!(type == TypedArrayTypeForPrimitiveType() || + (type == napi_uint8_clamped_array && + std::is_same::value))) { + NAPI_THROW_VOID(TypeError::New( + env, + "Array type must match the template parameter. " + "(Uint8 arrays may optionally have the \"clamped\" array type.)")); + } +} + +template +inline T& TypedArrayOf::operator[](size_t index) { + return _data[index]; +} + +template +inline const T& TypedArrayOf::operator[](size_t index) const { + return _data[index]; +} + +template +inline T* TypedArrayOf::Data() { + return _data; +} + +template +inline const T* TypedArrayOf::Data() const { + return _data; +} + +//////////////////////////////////////////////////////////////////////////////// +// Function class +//////////////////////////////////////////////////////////////////////////////// + +template +inline napi_status CreateFunction(napi_env env, + const char* utf8name, + napi_callback cb, + CbData* data, + napi_value* result) { + napi_status status = + napi_create_function(env, utf8name, NAPI_AUTO_LENGTH, cb, data, result); + if (status == napi_ok) { + status = Napi::details::AttachData(env, *result, data); + } + + return status; +} + +template +inline Function Function::New(napi_env env, const char* utf8name, void* data) { + napi_value result = nullptr; + napi_status status = napi_create_function(env, + utf8name, + NAPI_AUTO_LENGTH, + details::TemplatedVoidCallback, + data, + &result); + NAPI_THROW_IF_FAILED(env, status, Function()); + return Function(env, result); +} + +template +inline Function Function::New(napi_env env, const char* utf8name, void* data) { + napi_value result = nullptr; + napi_status status = napi_create_function(env, + utf8name, + NAPI_AUTO_LENGTH, + details::TemplatedCallback, + data, + &result); + NAPI_THROW_IF_FAILED(env, status, Function()); + return Function(env, result); +} + +template +inline Function Function::New(napi_env env, + const std::string& utf8name, + void* data) { + return Function::New(env, utf8name.c_str(), data); +} + +template +inline Function Function::New(napi_env env, + const std::string& utf8name, + void* data) { + return Function::New(env, utf8name.c_str(), data); +} + +template +inline Function Function::New(napi_env env, + Callable cb, + const char* utf8name, + void* data) { + using ReturnType = decltype(cb(CallbackInfo(nullptr, nullptr))); + using CbData = details::CallbackData; + auto callbackData = new CbData{std::move(cb), data}; + + napi_value value; + napi_status status = + CreateFunction(env, utf8name, CbData::Wrapper, callbackData, &value); + if (status != napi_ok) { + delete callbackData; + NAPI_THROW_IF_FAILED(env, status, Function()); + } + + return Function(env, value); +} + +template +inline Function Function::New(napi_env env, + Callable cb, + const std::string& utf8name, + void* data) { + return New(env, cb, utf8name.c_str(), data); +} + +inline Function::Function() : Object() {} + +inline Function::Function(napi_env env, napi_value value) + : Object(env, value) {} + +inline MaybeOrValue Function::operator()( + const std::initializer_list& args) const { + return Call(Env().Undefined(), args); +} + +inline MaybeOrValue Function::Call( + const std::initializer_list& args) const { + return Call(Env().Undefined(), args); +} + +inline MaybeOrValue Function::Call( + const std::vector& args) const { + return Call(Env().Undefined(), args); +} + +inline MaybeOrValue Function::Call( + const std::vector& args) const { + return Call(Env().Undefined(), args); +} + +inline MaybeOrValue Function::Call(size_t argc, + const napi_value* args) const { + return Call(Env().Undefined(), argc, args); +} + +inline MaybeOrValue Function::Call( + napi_value recv, const std::initializer_list& args) const { + return Call(recv, args.size(), args.begin()); +} + +inline MaybeOrValue Function::Call( + napi_value recv, const std::vector& args) const { + return Call(recv, args.size(), args.data()); +} + +inline MaybeOrValue Function::Call( + napi_value recv, const std::vector& args) const { + const size_t argc = args.size(); + const size_t stackArgsCount = 6; + napi_value stackArgs[stackArgsCount]; + std::vector heapArgs; + napi_value* argv; + if (argc <= stackArgsCount) { + argv = stackArgs; + } else { + heapArgs.resize(argc); + argv = heapArgs.data(); + } + + for (size_t index = 0; index < argc; index++) { + argv[index] = static_cast(args[index]); + } + + return Call(recv, argc, argv); +} + +inline MaybeOrValue Function::Call(napi_value recv, + size_t argc, + const napi_value* args) const { + napi_value result; + napi_status status = + napi_call_function(_env, recv, _value, argc, args, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Value(_env, result), Napi::Value); +} + +inline MaybeOrValue Function::MakeCallback( + napi_value recv, + const std::initializer_list& args, + napi_async_context context) const { + return MakeCallback(recv, args.size(), args.begin(), context); +} + +inline MaybeOrValue Function::MakeCallback( + napi_value recv, + const std::vector& args, + napi_async_context context) const { + return MakeCallback(recv, args.size(), args.data(), context); +} + +inline MaybeOrValue Function::MakeCallback( + napi_value recv, + size_t argc, + const napi_value* args, + napi_async_context context) const { + napi_value result; + napi_status status = + napi_make_callback(_env, context, recv, _value, argc, args, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Value(_env, result), Napi::Value); +} + +inline MaybeOrValue Function::New( + const std::initializer_list& args) const { + return New(args.size(), args.begin()); +} + +inline MaybeOrValue Function::New( + const std::vector& args) const { + return New(args.size(), args.data()); +} + +inline MaybeOrValue Function::New(size_t argc, + const napi_value* args) const { + napi_value result; + napi_status status = napi_new_instance(_env, _value, argc, args, &result); + NAPI_RETURN_OR_THROW_IF_FAILED( + _env, status, Napi::Object(_env, result), Napi::Object); +} + +//////////////////////////////////////////////////////////////////////////////// +// Promise class +//////////////////////////////////////////////////////////////////////////////// + +inline Promise::Deferred Promise::Deferred::New(napi_env env) { + return Promise::Deferred(env); +} + +inline Promise::Deferred::Deferred(napi_env env) : _env(env) { + napi_status status = napi_create_promise(_env, &_deferred, &_promise); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline Promise Promise::Deferred::Promise() const { + return Napi::Promise(_env, _promise); +} + +inline Napi::Env Promise::Deferred::Env() const { + return Napi::Env(_env); +} + +inline void Promise::Deferred::Resolve(napi_value value) const { + napi_status status = napi_resolve_deferred(_env, _deferred, value); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline void Promise::Deferred::Reject(napi_value value) const { + napi_status status = napi_reject_deferred(_env, _deferred, value); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline Promise::Promise(napi_env env, napi_value value) : Object(env, value) {} + +//////////////////////////////////////////////////////////////////////////////// +// Buffer class +//////////////////////////////////////////////////////////////////////////////// + +template +inline Buffer Buffer::New(napi_env env, size_t length) { + napi_value value; + void* data; + napi_status status = + napi_create_buffer(env, length * sizeof(T), &data, &value); + NAPI_THROW_IF_FAILED(env, status, Buffer()); + return Buffer(env, value, length, static_cast(data)); +} + +template +inline Buffer Buffer::New(napi_env env, T* data, size_t length) { + napi_value value; + napi_status status = napi_create_external_buffer( + env, length * sizeof(T), data, nullptr, nullptr, &value); + NAPI_THROW_IF_FAILED(env, status, Buffer()); + return Buffer(env, value, length, data); +} + +template +template +inline Buffer Buffer::New(napi_env env, + T* data, + size_t length, + Finalizer finalizeCallback) { + napi_value value; + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), nullptr}); + napi_status status = + napi_create_external_buffer(env, + length * sizeof(T), + data, + details::FinalizeData::Wrapper, + finalizeData, + &value); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, Buffer()); + } + return Buffer(env, value, length, data); +} + +template +template +inline Buffer Buffer::New(napi_env env, + T* data, + size_t length, + Finalizer finalizeCallback, + Hint* finalizeHint) { + napi_value value; + details::FinalizeData* finalizeData = + new details::FinalizeData( + {std::move(finalizeCallback), finalizeHint}); + napi_status status = napi_create_external_buffer( + env, + length * sizeof(T), + data, + details::FinalizeData::WrapperWithHint, + finalizeData, + &value); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, Buffer()); + } + return Buffer(env, value, length, data); +} + +template +inline Buffer Buffer::Copy(napi_env env, const T* data, size_t length) { + napi_value value; + napi_status status = + napi_create_buffer_copy(env, length * sizeof(T), data, nullptr, &value); + NAPI_THROW_IF_FAILED(env, status, Buffer()); + return Buffer(env, value); +} + +template +inline Buffer::Buffer() : Uint8Array(), _length(0), _data(nullptr) {} + +template +inline Buffer::Buffer(napi_env env, napi_value value) + : Uint8Array(env, value), _length(0), _data(nullptr) {} + +template +inline Buffer::Buffer(napi_env env, napi_value value, size_t length, T* data) + : Uint8Array(env, value), _length(length), _data(data) {} + +template +inline size_t Buffer::Length() const { + EnsureInfo(); + return _length; +} + +template +inline T* Buffer::Data() const { + EnsureInfo(); + return _data; +} + +template +inline void Buffer::EnsureInfo() const { + // The Buffer instance may have been constructed from a napi_value whose + // length/data are not yet known. Fetch and cache these values just once, + // since they can never change during the lifetime of the Buffer. + if (_data == nullptr) { + size_t byteLength; + void* voidData; + napi_status status = + napi_get_buffer_info(_env, _value, &voidData, &byteLength); + NAPI_THROW_IF_FAILED_VOID(_env, status); + _length = byteLength / sizeof(T); + _data = static_cast(voidData); + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Error class +//////////////////////////////////////////////////////////////////////////////// + +inline Error Error::New(napi_env env) { + napi_status status; + napi_value error = nullptr; + bool is_exception_pending; + napi_extended_error_info last_error_info_copy; + + { + // We must retrieve the last error info before doing anything else because + // doing anything else will replace the last error info. + const napi_extended_error_info* last_error_info; + status = napi_get_last_error_info(env, &last_error_info); + NAPI_FATAL_IF_FAILED(status, "Error::New", "napi_get_last_error_info"); + + // All fields of the `napi_extended_error_info` structure gets reset in + // subsequent Node-API function calls on the same `env`. This includes a + // call to `napi_is_exception_pending()`. So here it is necessary to make a + // copy of the information as the `error_code` field is used later on. + memcpy(&last_error_info_copy, + last_error_info, + sizeof(napi_extended_error_info)); + } + + status = napi_is_exception_pending(env, &is_exception_pending); + NAPI_FATAL_IF_FAILED(status, "Error::New", "napi_is_exception_pending"); + + // A pending exception takes precedence over any internal error status. + if (is_exception_pending) { + status = napi_get_and_clear_last_exception(env, &error); + NAPI_FATAL_IF_FAILED( + status, "Error::New", "napi_get_and_clear_last_exception"); + } else { + const char* error_message = last_error_info_copy.error_message != nullptr + ? last_error_info_copy.error_message + : "Error in native callback"; + + napi_value message; + status = napi_create_string_utf8( + env, error_message, std::strlen(error_message), &message); + NAPI_FATAL_IF_FAILED(status, "Error::New", "napi_create_string_utf8"); + + switch (last_error_info_copy.error_code) { + case napi_object_expected: + case napi_string_expected: + case napi_boolean_expected: + case napi_number_expected: + status = napi_create_type_error(env, nullptr, message, &error); + break; + default: + status = napi_create_error(env, nullptr, message, &error); + break; + } + NAPI_FATAL_IF_FAILED(status, "Error::New", "napi_create_error"); + } + + return Error(env, error); +} + +inline Error Error::New(napi_env env, const char* message) { + return Error::New( + env, message, std::strlen(message), napi_create_error); +} + +inline Error Error::New(napi_env env, const std::string& message) { + return Error::New( + env, message.c_str(), message.size(), napi_create_error); +} + +inline NAPI_NO_RETURN void Error::Fatal(const char* location, + const char* message) { + napi_fatal_error(location, NAPI_AUTO_LENGTH, message, NAPI_AUTO_LENGTH); +} + +inline Error::Error() : ObjectReference() {} + +inline Error::Error(napi_env env, napi_value value) + : ObjectReference(env, nullptr) { + if (value != nullptr) { + // Attempting to create a reference on the error object. + // If it's not a Object/Function/Symbol, this call will return an error + // status. + napi_status status = napi_create_reference(env, value, 1, &_ref); + + if (status != napi_ok) { + napi_value wrappedErrorObj; + + // Create an error object + status = napi_create_object(env, &wrappedErrorObj); + NAPI_FATAL_IF_FAILED(status, "Error::Error", "napi_create_object"); + + // property flag that we attach to show the error object is wrapped + napi_property_descriptor wrapObjFlag = { + ERROR_WRAP_VALUE(), // Unique GUID identifier since Symbol isn't a + // viable option + nullptr, + nullptr, + nullptr, + nullptr, + Value::From(env, value), + napi_enumerable, + nullptr}; + + status = napi_define_properties(env, wrappedErrorObj, 1, &wrapObjFlag); + NAPI_FATAL_IF_FAILED(status, "Error::Error", "napi_define_properties"); + + // Create a reference on the newly wrapped object + status = napi_create_reference(env, wrappedErrorObj, 1, &_ref); + } + + // Avoid infinite recursion in the failure case. + NAPI_FATAL_IF_FAILED(status, "Error::Error", "napi_create_reference"); + } +} + +inline Object Error::Value() const { + if (_ref == nullptr) { + return Object(_env, nullptr); + } + + napi_value refValue; + napi_status status = napi_get_reference_value(_env, _ref, &refValue); + NAPI_THROW_IF_FAILED(_env, status, Object()); + + napi_valuetype type; + status = napi_typeof(_env, refValue, &type); + NAPI_THROW_IF_FAILED(_env, status, Object()); + + // If refValue isn't a symbol, then we proceed to whether the refValue has the + // wrapped error flag + if (type != napi_symbol) { + // We are checking if the object is wrapped + bool isWrappedObject = false; + + status = napi_has_property(_env, + refValue, + String::From(_env, ERROR_WRAP_VALUE()), + &isWrappedObject); + + // Don't care about status + if (isWrappedObject) { + napi_value unwrappedValue; + status = napi_get_property(_env, + refValue, + String::From(_env, ERROR_WRAP_VALUE()), + &unwrappedValue); + NAPI_THROW_IF_FAILED(_env, status, Object()); + + return Object(_env, unwrappedValue); + } + } + + return Object(_env, refValue); +} + +inline Error::Error(Error&& other) : ObjectReference(std::move(other)) {} + +inline Error& Error::operator=(Error&& other) { + static_cast*>(this)->operator=(std::move(other)); + return *this; +} + +inline Error::Error(const Error& other) : ObjectReference(other) {} + +inline Error& Error::operator=(const Error& other) { + Reset(); + + _env = other.Env(); + HandleScope scope(_env); + + napi_value value = other.Value(); + if (value != nullptr) { + napi_status status = napi_create_reference(_env, value, 1, &_ref); + NAPI_THROW_IF_FAILED(_env, status, *this); + } + + return *this; +} + +inline const std::string& Error::Message() const NAPI_NOEXCEPT { + if (_message.size() == 0 && _env != nullptr) { +#ifdef NAPI_CPP_EXCEPTIONS + try { + _message = Get("message").As(); + } catch (...) { + // Catch all errors here, to include e.g. a std::bad_alloc from + // the std::string::operator=, because this method may not throw. + } +#else // NAPI_CPP_EXCEPTIONS +#if defined(NODE_ADDON_API_ENABLE_MAYBE) + Napi::Value message_val; + if (Get("message").UnwrapTo(&message_val)) { + _message = message_val.As(); + } +#else + _message = Get("message").As(); +#endif +#endif // NAPI_CPP_EXCEPTIONS + } + return _message; +} + +// we created an object on the &_ref +inline void Error::ThrowAsJavaScriptException() const { + HandleScope scope(_env); + if (!IsEmpty()) { +#ifdef NODE_API_SWALLOW_UNTHROWABLE_EXCEPTIONS + bool pendingException = false; + + // check if there is already a pending exception. If so don't try to throw a + // new one as that is not allowed/possible + napi_status status = napi_is_exception_pending(_env, &pendingException); + + if ((status != napi_ok) || + ((status == napi_ok) && (pendingException == false))) { + // We intentionally don't use `NAPI_THROW_*` macros here to ensure + // that there is no possible recursion as `ThrowAsJavaScriptException` + // is part of `NAPI_THROW_*` macro definition for noexcept. + + status = napi_throw(_env, Value()); + + if (status == napi_pending_exception) { + // The environment must be terminating as we checked earlier and there + // was no pending exception. In this case continuing will result + // in a fatal error and there is nothing the author has done incorrectly + // in their code that is worth flagging through a fatal error + return; + } + } else { + status = napi_pending_exception; + } +#else + // We intentionally don't use `NAPI_THROW_*` macros here to ensure + // that there is no possible recursion as `ThrowAsJavaScriptException` + // is part of `NAPI_THROW_*` macro definition for noexcept. + + napi_status status = napi_throw(_env, Value()); +#endif + +#ifdef NAPI_CPP_EXCEPTIONS + if (status != napi_ok) { + throw Error::New(_env); + } +#else // NAPI_CPP_EXCEPTIONS + NAPI_FATAL_IF_FAILED( + status, "Error::ThrowAsJavaScriptException", "napi_throw"); +#endif // NAPI_CPP_EXCEPTIONS + } +} + +#ifdef NAPI_CPP_EXCEPTIONS + +inline const char* Error::what() const NAPI_NOEXCEPT { + return Message().c_str(); +} + +#endif // NAPI_CPP_EXCEPTIONS + +inline const char* Error::ERROR_WRAP_VALUE() NAPI_NOEXCEPT { + return "4bda9e7e-4913-4dbc-95de-891cbf66598e-errorVal"; +} + +template +inline TError Error::New(napi_env env, + const char* message, + size_t length, + create_error_fn create_error) { + napi_value str; + napi_status status = napi_create_string_utf8(env, message, length, &str); + NAPI_THROW_IF_FAILED(env, status, TError()); + + napi_value error; + status = create_error(env, nullptr, str, &error); + NAPI_THROW_IF_FAILED(env, status, TError()); + + return TError(env, error); +} + +inline TypeError TypeError::New(napi_env env, const char* message) { + return Error::New( + env, message, std::strlen(message), napi_create_type_error); +} + +inline TypeError TypeError::New(napi_env env, const std::string& message) { + return Error::New( + env, message.c_str(), message.size(), napi_create_type_error); +} + +inline TypeError::TypeError() : Error() {} + +inline TypeError::TypeError(napi_env env, napi_value value) + : Error(env, value) {} + +inline RangeError RangeError::New(napi_env env, const char* message) { + return Error::New( + env, message, std::strlen(message), napi_create_range_error); +} + +inline RangeError RangeError::New(napi_env env, const std::string& message) { + return Error::New( + env, message.c_str(), message.size(), napi_create_range_error); +} + +inline RangeError::RangeError() : Error() {} + +inline RangeError::RangeError(napi_env env, napi_value value) + : Error(env, value) {} + +//////////////////////////////////////////////////////////////////////////////// +// Reference class +//////////////////////////////////////////////////////////////////////////////// + +template +inline Reference Reference::New(const T& value, + uint32_t initialRefcount) { + napi_env env = value.Env(); + napi_value val = value; + + if (val == nullptr) { + return Reference(env, nullptr); + } + + napi_ref ref; + napi_status status = napi_create_reference(env, value, initialRefcount, &ref); + NAPI_THROW_IF_FAILED(env, status, Reference()); + + return Reference(env, ref); +} + +template +inline Reference::Reference() + : _env(nullptr), _ref(nullptr), _suppressDestruct(false) {} + +template +inline Reference::Reference(napi_env env, napi_ref ref) + : _env(env), _ref(ref), _suppressDestruct(false) {} + +template +inline Reference::~Reference() { + if (_ref != nullptr) { + if (!_suppressDestruct) { + napi_delete_reference(_env, _ref); + } + + _ref = nullptr; + } +} + +template +inline Reference::Reference(Reference&& other) + : _env(other._env), + _ref(other._ref), + _suppressDestruct(other._suppressDestruct) { + other._env = nullptr; + other._ref = nullptr; + other._suppressDestruct = false; +} + +template +inline Reference& Reference::operator=(Reference&& other) { + Reset(); + _env = other._env; + _ref = other._ref; + _suppressDestruct = other._suppressDestruct; + other._env = nullptr; + other._ref = nullptr; + other._suppressDestruct = false; + return *this; +} + +template +inline Reference::Reference(const Reference& other) + : _env(other._env), _ref(nullptr), _suppressDestruct(false) { + HandleScope scope(_env); + + napi_value value = other.Value(); + if (value != nullptr) { + // Copying is a limited scenario (currently only used for Error object) and + // always creates a strong reference to the given value even if the incoming + // reference is weak. + napi_status status = napi_create_reference(_env, value, 1, &_ref); + NAPI_FATAL_IF_FAILED( + status, "Reference::Reference", "napi_create_reference"); + } +} + +template +inline Reference::operator napi_ref() const { + return _ref; +} + +template +inline bool Reference::operator==(const Reference& other) const { + HandleScope scope(_env); + return this->Value().StrictEquals(other.Value()); +} + +template +inline bool Reference::operator!=(const Reference& other) const { + return !this->operator==(other); +} + +template +inline Napi::Env Reference::Env() const { + return Napi::Env(_env); +} + +template +inline bool Reference::IsEmpty() const { + return _ref == nullptr; +} + +template +inline T Reference::Value() const { + if (_ref == nullptr) { + return T(_env, nullptr); + } + + napi_value value; + napi_status status = napi_get_reference_value(_env, _ref, &value); + NAPI_THROW_IF_FAILED(_env, status, T()); + return T(_env, value); +} + +template +inline uint32_t Reference::Ref() const { + uint32_t result; + napi_status status = napi_reference_ref(_env, _ref, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +template +inline uint32_t Reference::Unref() const { + uint32_t result; + napi_status status = napi_reference_unref(_env, _ref, &result); + NAPI_THROW_IF_FAILED(_env, status, 0); + return result; +} + +template +inline void Reference::Reset() { + if (_ref != nullptr) { + napi_status status = napi_delete_reference(_env, _ref); + NAPI_THROW_IF_FAILED_VOID(_env, status); + _ref = nullptr; + } +} + +template +inline void Reference::Reset(const T& value, uint32_t refcount) { + Reset(); + _env = value.Env(); + + napi_value val = value; + if (val != nullptr) { + napi_status status = napi_create_reference(_env, value, refcount, &_ref); + NAPI_THROW_IF_FAILED_VOID(_env, status); + } +} + +template +inline void Reference::SuppressDestruct() { + _suppressDestruct = true; +} + +template +inline Reference Weak(T value) { + return Reference::New(value, 0); +} + +inline ObjectReference Weak(Object value) { + return Reference::New(value, 0); +} + +inline FunctionReference Weak(Function value) { + return Reference::New(value, 0); +} + +template +inline Reference Persistent(T value) { + return Reference::New(value, 1); +} + +inline ObjectReference Persistent(Object value) { + return Reference::New(value, 1); +} + +inline FunctionReference Persistent(Function value) { + return Reference::New(value, 1); +} + +//////////////////////////////////////////////////////////////////////////////// +// ObjectReference class +//////////////////////////////////////////////////////////////////////////////// + +inline ObjectReference::ObjectReference() : Reference() {} + +inline ObjectReference::ObjectReference(napi_env env, napi_ref ref) + : Reference(env, ref) {} + +inline ObjectReference::ObjectReference(Reference&& other) + : Reference(std::move(other)) {} + +inline ObjectReference& ObjectReference::operator=(Reference&& other) { + static_cast*>(this)->operator=(std::move(other)); + return *this; +} + +inline ObjectReference::ObjectReference(ObjectReference&& other) + : Reference(std::move(other)) {} + +inline ObjectReference& ObjectReference::operator=(ObjectReference&& other) { + static_cast*>(this)->operator=(std::move(other)); + return *this; +} + +inline ObjectReference::ObjectReference(const ObjectReference& other) + : Reference(other) {} + +inline MaybeOrValue ObjectReference::Get( + const char* utf8name) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Get(utf8name); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue ObjectReference::Get( + const std::string& utf8name) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Get(utf8name); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue ObjectReference::Set(const char* utf8name, + napi_value value) const { + HandleScope scope(_env); + return Value().Set(utf8name, value); +} + +inline MaybeOrValue ObjectReference::Set(const char* utf8name, + Napi::Value value) const { + HandleScope scope(_env); + return Value().Set(utf8name, value); +} + +inline MaybeOrValue ObjectReference::Set(const char* utf8name, + const char* utf8value) const { + HandleScope scope(_env); + return Value().Set(utf8name, utf8value); +} + +inline MaybeOrValue ObjectReference::Set(const char* utf8name, + bool boolValue) const { + HandleScope scope(_env); + return Value().Set(utf8name, boolValue); +} + +inline MaybeOrValue ObjectReference::Set(const char* utf8name, + double numberValue) const { + HandleScope scope(_env); + return Value().Set(utf8name, numberValue); +} + +inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, + napi_value value) const { + HandleScope scope(_env); + return Value().Set(utf8name, value); +} + +inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, + Napi::Value value) const { + HandleScope scope(_env); + return Value().Set(utf8name, value); +} + +inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, + std::string& utf8value) const { + HandleScope scope(_env); + return Value().Set(utf8name, utf8value); +} + +inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, + bool boolValue) const { + HandleScope scope(_env); + return Value().Set(utf8name, boolValue); +} + +inline MaybeOrValue ObjectReference::Set(const std::string& utf8name, + double numberValue) const { + HandleScope scope(_env); + return Value().Set(utf8name, numberValue); +} + +inline MaybeOrValue ObjectReference::Get(uint32_t index) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Get(index); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue ObjectReference::Set(uint32_t index, + napi_value value) const { + HandleScope scope(_env); + return Value().Set(index, value); +} + +inline MaybeOrValue ObjectReference::Set(uint32_t index, + Napi::Value value) const { + HandleScope scope(_env); + return Value().Set(index, value); +} + +inline MaybeOrValue ObjectReference::Set(uint32_t index, + const char* utf8value) const { + HandleScope scope(_env); + return Value().Set(index, utf8value); +} + +inline MaybeOrValue ObjectReference::Set( + uint32_t index, const std::string& utf8value) const { + HandleScope scope(_env); + return Value().Set(index, utf8value); +} + +inline MaybeOrValue ObjectReference::Set(uint32_t index, + bool boolValue) const { + HandleScope scope(_env); + return Value().Set(index, boolValue); +} + +inline MaybeOrValue ObjectReference::Set(uint32_t index, + double numberValue) const { + HandleScope scope(_env); + return Value().Set(index, numberValue); +} + +//////////////////////////////////////////////////////////////////////////////// +// FunctionReference class +//////////////////////////////////////////////////////////////////////////////// + +inline FunctionReference::FunctionReference() : Reference() {} + +inline FunctionReference::FunctionReference(napi_env env, napi_ref ref) + : Reference(env, ref) {} + +inline FunctionReference::FunctionReference(Reference&& other) + : Reference(std::move(other)) {} + +inline FunctionReference& FunctionReference::operator=( + Reference&& other) { + static_cast*>(this)->operator=(std::move(other)); + return *this; +} + +inline FunctionReference::FunctionReference(FunctionReference&& other) + : Reference(std::move(other)) {} + +inline FunctionReference& FunctionReference::operator=( + FunctionReference&& other) { + static_cast*>(this)->operator=(std::move(other)); + return *this; +} + +inline MaybeOrValue FunctionReference::operator()( + const std::initializer_list& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value()(args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::Call( + const std::initializer_list& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Call(args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::Call( + const std::vector& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Call(args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::Call( + napi_value recv, const std::initializer_list& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Call(recv, args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::Call( + napi_value recv, const std::vector& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Call(recv, args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::Call( + napi_value recv, size_t argc, const napi_value* args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().Call(recv, argc, args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::MakeCallback( + napi_value recv, + const std::initializer_list& args, + napi_async_context context) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().MakeCallback(recv, args, context); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::MakeCallback( + napi_value recv, + const std::vector& args, + napi_async_context context) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().MakeCallback(recv, args, context); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::MakeCallback( + napi_value recv, + size_t argc, + const napi_value* args, + napi_async_context context) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = + Value().MakeCallback(recv, argc, args, context); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap())); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Value(); + } + return scope.Escape(result); +#endif +} + +inline MaybeOrValue FunctionReference::New( + const std::initializer_list& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().New(args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap()).As()); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Object(); + } + return scope.Escape(result).As(); +#endif +} + +inline MaybeOrValue FunctionReference::New( + const std::vector& args) const { + EscapableHandleScope scope(_env); + MaybeOrValue result = Value().New(args); +#ifdef NODE_ADDON_API_ENABLE_MAYBE + if (result.IsJust()) { + return Just(scope.Escape(result.Unwrap()).As()); + } + return result; +#else + if (scope.Env().IsExceptionPending()) { + return Object(); + } + return scope.Escape(result).As(); +#endif +} + +//////////////////////////////////////////////////////////////////////////////// +// CallbackInfo class +//////////////////////////////////////////////////////////////////////////////// + +inline CallbackInfo::CallbackInfo(napi_env env, napi_callback_info info) + : _env(env), + _info(info), + _this(nullptr), + _dynamicArgs(nullptr), + _data(nullptr) { + _argc = _staticArgCount; + _argv = _staticArgs; + napi_status status = + napi_get_cb_info(env, info, &_argc, _argv, &_this, &_data); + NAPI_THROW_IF_FAILED_VOID(_env, status); + + if (_argc > _staticArgCount) { + // Use either a fixed-size array (on the stack) or a dynamically-allocated + // array (on the heap) depending on the number of args. + _dynamicArgs = new napi_value[_argc]; + _argv = _dynamicArgs; + + status = napi_get_cb_info(env, info, &_argc, _argv, nullptr, nullptr); + NAPI_THROW_IF_FAILED_VOID(_env, status); + } +} + +inline CallbackInfo::~CallbackInfo() { + if (_dynamicArgs != nullptr) { + delete[] _dynamicArgs; + } +} + +inline CallbackInfo::operator napi_callback_info() const { + return _info; +} + +inline Value CallbackInfo::NewTarget() const { + napi_value newTarget; + napi_status status = napi_get_new_target(_env, _info, &newTarget); + NAPI_THROW_IF_FAILED(_env, status, Value()); + return Value(_env, newTarget); +} + +inline bool CallbackInfo::IsConstructCall() const { + return !NewTarget().IsEmpty(); +} + +inline Napi::Env CallbackInfo::Env() const { + return Napi::Env(_env); +} + +inline size_t CallbackInfo::Length() const { + return _argc; +} + +inline const Value CallbackInfo::operator[](size_t index) const { + return index < _argc ? Value(_env, _argv[index]) : Env().Undefined(); +} + +inline Value CallbackInfo::This() const { + if (_this == nullptr) { + return Env().Undefined(); + } + return Object(_env, _this); +} + +inline void* CallbackInfo::Data() const { + return _data; +} + +inline void CallbackInfo::SetData(void* data) { + _data = data; +} + +//////////////////////////////////////////////////////////////////////////////// +// PropertyDescriptor class +//////////////////////////////////////////////////////////////////////////////// + +template +PropertyDescriptor PropertyDescriptor::Accessor( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + + desc.utf8name = utf8name; + desc.getter = details::TemplatedCallback; + desc.attributes = attributes; + desc.data = data; + + return desc; +} + +template +PropertyDescriptor PropertyDescriptor::Accessor( + const std::string& utf8name, + napi_property_attributes attributes, + void* data) { + return Accessor(utf8name.c_str(), attributes, data); +} + +template +PropertyDescriptor PropertyDescriptor::Accessor( + Name name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + + desc.name = name; + desc.getter = details::TemplatedCallback; + desc.attributes = attributes; + desc.data = data; + + return desc; +} + +template +PropertyDescriptor PropertyDescriptor::Accessor( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + + desc.utf8name = utf8name; + desc.getter = details::TemplatedCallback; + desc.setter = details::TemplatedVoidCallback; + desc.attributes = attributes; + desc.data = data; + + return desc; +} + +template +PropertyDescriptor PropertyDescriptor::Accessor( + const std::string& utf8name, + napi_property_attributes attributes, + void* data) { + return Accessor(utf8name.c_str(), attributes, data); +} + +template +PropertyDescriptor PropertyDescriptor::Accessor( + Name name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + + desc.name = name; + desc.getter = details::TemplatedCallback; + desc.setter = details::TemplatedVoidCallback; + desc.attributes = attributes; + desc.data = data; + + return desc; +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Napi::Env env, + Napi::Object object, + const char* utf8name, + Getter getter, + napi_property_attributes attributes, + void* data) { + using CbData = details::CallbackData; + auto callbackData = new CbData({getter, data}); + + napi_status status = AttachData(env, object, callbackData); + if (status != napi_ok) { + delete callbackData; + NAPI_THROW_IF_FAILED(env, status, napi_property_descriptor()); + } + + return PropertyDescriptor({utf8name, + nullptr, + nullptr, + CbData::Wrapper, + nullptr, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Napi::Env env, + Napi::Object object, + const std::string& utf8name, + Getter getter, + napi_property_attributes attributes, + void* data) { + return Accessor(env, object, utf8name.c_str(), getter, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Napi::Env env, + Napi::Object object, + Name name, + Getter getter, + napi_property_attributes attributes, + void* data) { + using CbData = details::CallbackData; + auto callbackData = new CbData({getter, data}); + + napi_status status = AttachData(env, object, callbackData); + if (status != napi_ok) { + delete callbackData; + NAPI_THROW_IF_FAILED(env, status, napi_property_descriptor()); + } + + return PropertyDescriptor({nullptr, + name, + nullptr, + CbData::Wrapper, + nullptr, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Napi::Env env, + Napi::Object object, + const char* utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* data) { + using CbData = details::AccessorCallbackData; + auto callbackData = new CbData({getter, setter, data}); + + napi_status status = AttachData(env, object, callbackData); + if (status != napi_ok) { + delete callbackData; + NAPI_THROW_IF_FAILED(env, status, napi_property_descriptor()); + } + + return PropertyDescriptor({utf8name, + nullptr, + nullptr, + CbData::GetterWrapper, + CbData::SetterWrapper, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Napi::Env env, + Napi::Object object, + const std::string& utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* data) { + return Accessor( + env, object, utf8name.c_str(), getter, setter, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Accessor( + Napi::Env env, + Napi::Object object, + Name name, + Getter getter, + Setter setter, + napi_property_attributes attributes, + void* data) { + using CbData = details::AccessorCallbackData; + auto callbackData = new CbData({getter, setter, data}); + + napi_status status = AttachData(env, object, callbackData); + if (status != napi_ok) { + delete callbackData; + NAPI_THROW_IF_FAILED(env, status, napi_property_descriptor()); + } + + return PropertyDescriptor({nullptr, + name, + nullptr, + CbData::GetterWrapper, + CbData::SetterWrapper, + nullptr, + attributes, + callbackData}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + Napi::Env env, + Napi::Object /*object*/, + const char* utf8name, + Callable cb, + napi_property_attributes attributes, + void* data) { + return PropertyDescriptor({utf8name, + nullptr, + nullptr, + nullptr, + nullptr, + Napi::Function::New(env, cb, utf8name, data), + attributes, + nullptr}); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + Napi::Env env, + Napi::Object object, + const std::string& utf8name, + Callable cb, + napi_property_attributes attributes, + void* data) { + return Function(env, object, utf8name.c_str(), cb, attributes, data); +} + +template +inline PropertyDescriptor PropertyDescriptor::Function( + Napi::Env env, + Napi::Object /*object*/, + Name name, + Callable cb, + napi_property_attributes attributes, + void* data) { + return PropertyDescriptor({nullptr, + name, + nullptr, + nullptr, + nullptr, + Napi::Function::New(env, cb, nullptr, data), + attributes, + nullptr}); +} + +inline PropertyDescriptor PropertyDescriptor::Value( + const char* utf8name, + napi_value value, + napi_property_attributes attributes) { + return PropertyDescriptor({utf8name, + nullptr, + nullptr, + nullptr, + nullptr, + value, + attributes, + nullptr}); +} + +inline PropertyDescriptor PropertyDescriptor::Value( + const std::string& utf8name, + napi_value value, + napi_property_attributes attributes) { + return Value(utf8name.c_str(), value, attributes); +} + +inline PropertyDescriptor PropertyDescriptor::Value( + napi_value name, napi_value value, napi_property_attributes attributes) { + return PropertyDescriptor( + {nullptr, name, nullptr, nullptr, nullptr, value, attributes, nullptr}); +} + +inline PropertyDescriptor PropertyDescriptor::Value( + Name name, Napi::Value value, napi_property_attributes attributes) { + napi_value nameValue = name; + napi_value valueValue = value; + return PropertyDescriptor::Value(nameValue, valueValue, attributes); +} + +inline PropertyDescriptor::PropertyDescriptor(napi_property_descriptor desc) + : _desc(desc) {} + +inline PropertyDescriptor::operator napi_property_descriptor&() { + return _desc; +} + +inline PropertyDescriptor::operator const napi_property_descriptor&() const { + return _desc; +} + +//////////////////////////////////////////////////////////////////////////////// +// InstanceWrap class +//////////////////////////////////////////////////////////////////////////////// + +template +inline void InstanceWrap::AttachPropData( + napi_env env, napi_value value, const napi_property_descriptor* prop) { + napi_status status; + if (!(prop->attributes & napi_static)) { + if (prop->method == T::InstanceVoidMethodCallbackWrapper) { + status = Napi::details::AttachData( + env, value, static_cast(prop->data)); + NAPI_THROW_IF_FAILED_VOID(env, status); + } else if (prop->method == T::InstanceMethodCallbackWrapper) { + status = Napi::details::AttachData( + env, value, static_cast(prop->data)); + NAPI_THROW_IF_FAILED_VOID(env, status); + } else if (prop->getter == T::InstanceGetterCallbackWrapper || + prop->setter == T::InstanceSetterCallbackWrapper) { + status = Napi::details::AttachData( + env, value, static_cast(prop->data)); + NAPI_THROW_IF_FAILED_VOID(env, status); + } + } +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + const char* utf8name, + InstanceVoidMethodCallback method, + napi_property_attributes attributes, + void* data) { + InstanceVoidMethodCallbackData* callbackData = + new InstanceVoidMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = T::InstanceVoidMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + const char* utf8name, + InstanceMethodCallback method, + napi_property_attributes attributes, + void* data) { + InstanceMethodCallbackData* callbackData = + new InstanceMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = T::InstanceMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + Symbol name, + InstanceVoidMethodCallback method, + napi_property_attributes attributes, + void* data) { + InstanceVoidMethodCallbackData* callbackData = + new InstanceVoidMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = T::InstanceVoidMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + Symbol name, + InstanceMethodCallback method, + napi_property_attributes attributes, + void* data) { + InstanceMethodCallbackData* callbackData = + new InstanceMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = T::InstanceMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = attributes; + return desc; +} + +template +template ::InstanceVoidMethodCallback method> +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = details::TemplatedInstanceVoidCallback; + desc.data = data; + desc.attributes = attributes; + return desc; +} + +template +template ::InstanceMethodCallback method> +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = details::TemplatedInstanceCallback; + desc.data = data; + desc.attributes = attributes; + return desc; +} + +template +template ::InstanceVoidMethodCallback method> +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + Symbol name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = details::TemplatedInstanceVoidCallback; + desc.data = data; + desc.attributes = attributes; + return desc; +} + +template +template ::InstanceMethodCallback method> +inline ClassPropertyDescriptor InstanceWrap::InstanceMethod( + Symbol name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = details::TemplatedInstanceCallback; + desc.data = data; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceAccessor( + const char* utf8name, + InstanceGetterCallback getter, + InstanceSetterCallback setter, + napi_property_attributes attributes, + void* data) { + InstanceAccessorCallbackData* callbackData = + new InstanceAccessorCallbackData({getter, setter, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.getter = getter != nullptr ? T::InstanceGetterCallbackWrapper : nullptr; + desc.setter = setter != nullptr ? T::InstanceSetterCallbackWrapper : nullptr; + desc.data = callbackData; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceAccessor( + Symbol name, + InstanceGetterCallback getter, + InstanceSetterCallback setter, + napi_property_attributes attributes, + void* data) { + InstanceAccessorCallbackData* callbackData = + new InstanceAccessorCallbackData({getter, setter, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.getter = getter != nullptr ? T::InstanceGetterCallbackWrapper : nullptr; + desc.setter = setter != nullptr ? T::InstanceSetterCallbackWrapper : nullptr; + desc.data = callbackData; + desc.attributes = attributes; + return desc; +} + +template +template ::InstanceGetterCallback getter, + typename InstanceWrap::InstanceSetterCallback setter> +inline ClassPropertyDescriptor InstanceWrap::InstanceAccessor( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.getter = details::TemplatedInstanceCallback; + desc.setter = This::WrapSetter(This::SetterTag()); + desc.data = data; + desc.attributes = attributes; + return desc; +} + +template +template ::InstanceGetterCallback getter, + typename InstanceWrap::InstanceSetterCallback setter> +inline ClassPropertyDescriptor InstanceWrap::InstanceAccessor( + Symbol name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.getter = details::TemplatedInstanceCallback; + desc.setter = This::WrapSetter(This::SetterTag()); + desc.data = data; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceValue( + const char* utf8name, + Napi::Value value, + napi_property_attributes attributes) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.value = value; + desc.attributes = attributes; + return desc; +} + +template +inline ClassPropertyDescriptor InstanceWrap::InstanceValue( + Symbol name, Napi::Value value, napi_property_attributes attributes) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.value = value; + desc.attributes = attributes; + return desc; +} + +template +inline napi_value InstanceWrap::InstanceVoidMethodCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + InstanceVoidMethodCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + T* instance = T::Unwrap(callbackInfo.This().As()); + auto cb = callbackData->callback; + (instance->*cb)(callbackInfo); + return nullptr; + }); +} + +template +inline napi_value InstanceWrap::InstanceMethodCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + InstanceMethodCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + T* instance = T::Unwrap(callbackInfo.This().As()); + auto cb = callbackData->callback; + return (instance->*cb)(callbackInfo); + }); +} + +template +inline napi_value InstanceWrap::InstanceGetterCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + InstanceAccessorCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + T* instance = T::Unwrap(callbackInfo.This().As()); + auto cb = callbackData->getterCallback; + return (instance->*cb)(callbackInfo); + }); +} + +template +inline napi_value InstanceWrap::InstanceSetterCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + InstanceAccessorCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + T* instance = T::Unwrap(callbackInfo.This().As()); + auto cb = callbackData->setterCallback; + (instance->*cb)(callbackInfo, callbackInfo[0]); + return nullptr; + }); +} + +template +template ::InstanceSetterCallback method> +inline napi_value InstanceWrap::WrappedMethod( + napi_env env, napi_callback_info info) NAPI_NOEXCEPT { + return details::WrapCallback([&] { + const CallbackInfo cbInfo(env, info); + T* instance = T::Unwrap(cbInfo.This().As()); + (instance->*method)(cbInfo, cbInfo[0]); + return nullptr; + }); +} + +//////////////////////////////////////////////////////////////////////////////// +// ObjectWrap class +//////////////////////////////////////////////////////////////////////////////// + +template +inline ObjectWrap::ObjectWrap(const Napi::CallbackInfo& callbackInfo) { + napi_env env = callbackInfo.Env(); + napi_value wrapper = callbackInfo.This(); + napi_status status; + napi_ref ref; + T* instance = static_cast(this); + status = napi_wrap(env, wrapper, instance, FinalizeCallback, nullptr, &ref); + NAPI_THROW_IF_FAILED_VOID(env, status); + + Reference* instanceRef = instance; + *instanceRef = Reference(env, ref); +} + +template +inline ObjectWrap::~ObjectWrap() { + // If the JS object still exists at this point, remove the finalizer added + // through `napi_wrap()`. + if (!IsEmpty()) { + Object object = Value(); + // It is not valid to call `napi_remove_wrap()` with an empty `object`. + // This happens e.g. during garbage collection. + if (!object.IsEmpty() && _construction_failed) { + napi_remove_wrap(Env(), object, nullptr); + } + } +} + +template +inline T* ObjectWrap::Unwrap(Object wrapper) { + void* unwrapped; + napi_status status = napi_unwrap(wrapper.Env(), wrapper, &unwrapped); + NAPI_THROW_IF_FAILED(wrapper.Env(), status, nullptr); + return static_cast(unwrapped); +} + +template +inline Function ObjectWrap::DefineClass( + Napi::Env env, + const char* utf8name, + const size_t props_count, + const napi_property_descriptor* descriptors, + void* data) { + napi_status status; + std::vector props(props_count); + + // We copy the descriptors to a local array because before defining the class + // we must replace static method property descriptors with value property + // descriptors such that the value is a function-valued `napi_value` created + // with `CreateFunction()`. + // + // This replacement could be made for instance methods as well, but V8 aborts + // if we do that, because it expects methods defined on the prototype template + // to have `FunctionTemplate`s. + for (size_t index = 0; index < props_count; index++) { + props[index] = descriptors[index]; + napi_property_descriptor* prop = &props[index]; + if (prop->method == T::StaticMethodCallbackWrapper) { + status = + CreateFunction(env, + utf8name, + prop->method, + static_cast(prop->data), + &(prop->value)); + NAPI_THROW_IF_FAILED(env, status, Function()); + prop->method = nullptr; + prop->data = nullptr; + } else if (prop->method == T::StaticVoidMethodCallbackWrapper) { + status = + CreateFunction(env, + utf8name, + prop->method, + static_cast(prop->data), + &(prop->value)); + NAPI_THROW_IF_FAILED(env, status, Function()); + prop->method = nullptr; + prop->data = nullptr; + } + } + + napi_value value; + status = napi_define_class(env, + utf8name, + NAPI_AUTO_LENGTH, + T::ConstructorCallbackWrapper, + data, + props_count, + props.data(), + &value); + NAPI_THROW_IF_FAILED(env, status, Function()); + + // After defining the class we iterate once more over the property descriptors + // and attach the data associated with accessors and instance methods to the + // newly created JavaScript class. + for (size_t idx = 0; idx < props_count; idx++) { + const napi_property_descriptor* prop = &props[idx]; + + if (prop->getter == T::StaticGetterCallbackWrapper || + prop->setter == T::StaticSetterCallbackWrapper) { + status = Napi::details::AttachData( + env, value, static_cast(prop->data)); + NAPI_THROW_IF_FAILED(env, status, Function()); + } else { + // InstanceWrap::AttachPropData is responsible for attaching the data + // of instance methods and accessors. + T::AttachPropData(env, value, prop); + } + } + + return Function(env, value); +} + +template +inline Function ObjectWrap::DefineClass( + Napi::Env env, + const char* utf8name, + const std::initializer_list>& properties, + void* data) { + return DefineClass( + env, + utf8name, + properties.size(), + reinterpret_cast(properties.begin()), + data); +} + +template +inline Function ObjectWrap::DefineClass( + Napi::Env env, + const char* utf8name, + const std::vector>& properties, + void* data) { + return DefineClass( + env, + utf8name, + properties.size(), + reinterpret_cast(properties.data()), + data); +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + const char* utf8name, + StaticVoidMethodCallback method, + napi_property_attributes attributes, + void* data) { + StaticVoidMethodCallbackData* callbackData = + new StaticVoidMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = T::StaticVoidMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + const char* utf8name, + StaticMethodCallback method, + napi_property_attributes attributes, + void* data) { + StaticMethodCallbackData* callbackData = + new StaticMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = T::StaticMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + Symbol name, + StaticVoidMethodCallback method, + napi_property_attributes attributes, + void* data) { + StaticVoidMethodCallbackData* callbackData = + new StaticVoidMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = T::StaticVoidMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + Symbol name, + StaticMethodCallback method, + napi_property_attributes attributes, + void* data) { + StaticMethodCallbackData* callbackData = + new StaticMethodCallbackData({method, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = T::StaticMethodCallbackWrapper; + desc.data = callbackData; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +template ::StaticVoidMethodCallback method> +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = details::TemplatedVoidCallback; + desc.data = data; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +template ::StaticVoidMethodCallback method> +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + Symbol name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = details::TemplatedVoidCallback; + desc.data = data; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +template ::StaticMethodCallback method> +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.method = details::TemplatedCallback; + desc.data = data; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +template ::StaticMethodCallback method> +inline ClassPropertyDescriptor ObjectWrap::StaticMethod( + Symbol name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.method = details::TemplatedCallback; + desc.data = data; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticAccessor( + const char* utf8name, + StaticGetterCallback getter, + StaticSetterCallback setter, + napi_property_attributes attributes, + void* data) { + StaticAccessorCallbackData* callbackData = + new StaticAccessorCallbackData({getter, setter, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.getter = getter != nullptr ? T::StaticGetterCallbackWrapper : nullptr; + desc.setter = setter != nullptr ? T::StaticSetterCallbackWrapper : nullptr; + desc.data = callbackData; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticAccessor( + Symbol name, + StaticGetterCallback getter, + StaticSetterCallback setter, + napi_property_attributes attributes, + void* data) { + StaticAccessorCallbackData* callbackData = + new StaticAccessorCallbackData({getter, setter, data}); + + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.getter = getter != nullptr ? T::StaticGetterCallbackWrapper : nullptr; + desc.setter = setter != nullptr ? T::StaticSetterCallbackWrapper : nullptr; + desc.data = callbackData; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +template ::StaticGetterCallback getter, + typename ObjectWrap::StaticSetterCallback setter> +inline ClassPropertyDescriptor ObjectWrap::StaticAccessor( + const char* utf8name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.getter = details::TemplatedCallback; + desc.setter = This::WrapStaticSetter(This::StaticSetterTag()); + desc.data = data; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +template ::StaticGetterCallback getter, + typename ObjectWrap::StaticSetterCallback setter> +inline ClassPropertyDescriptor ObjectWrap::StaticAccessor( + Symbol name, napi_property_attributes attributes, void* data) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.getter = details::TemplatedCallback; + desc.setter = This::WrapStaticSetter(This::StaticSetterTag()); + desc.data = data; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticValue( + const char* utf8name, + Napi::Value value, + napi_property_attributes attributes) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.utf8name = utf8name; + desc.value = value; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline ClassPropertyDescriptor ObjectWrap::StaticValue( + Symbol name, Napi::Value value, napi_property_attributes attributes) { + napi_property_descriptor desc = napi_property_descriptor(); + desc.name = name; + desc.value = value; + desc.attributes = + static_cast(attributes | napi_static); + return desc; +} + +template +inline Value ObjectWrap::OnCalledAsFunction( + const Napi::CallbackInfo& callbackInfo) { + NAPI_THROW( + TypeError::New(callbackInfo.Env(), + "Class constructors cannot be invoked without 'new'"), + Napi::Value()); +} + +template +inline void ObjectWrap::Finalize(Napi::Env /*env*/) {} + +template +inline napi_value ObjectWrap::ConstructorCallbackWrapper( + napi_env env, napi_callback_info info) { + napi_value new_target; + napi_status status = napi_get_new_target(env, info, &new_target); + if (status != napi_ok) return nullptr; + + bool isConstructCall = (new_target != nullptr); + if (!isConstructCall) { + return details::WrapCallback( + [&] { return T::OnCalledAsFunction(CallbackInfo(env, info)); }); + } + + napi_value wrapper = details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + T* instance = new T(callbackInfo); +#ifdef NAPI_CPP_EXCEPTIONS + instance->_construction_failed = false; +#else + if (callbackInfo.Env().IsExceptionPending()) { + // We need to clear the exception so that removing the wrap might work. + Error e = callbackInfo.Env().GetAndClearPendingException(); + delete instance; + e.ThrowAsJavaScriptException(); + } else { + instance->_construction_failed = false; + } +#endif // NAPI_CPP_EXCEPTIONS + return callbackInfo.This(); + }); + + return wrapper; +} + +template +inline napi_value ObjectWrap::StaticVoidMethodCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + StaticVoidMethodCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + callbackData->callback(callbackInfo); + return nullptr; + }); +} + +template +inline napi_value ObjectWrap::StaticMethodCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + StaticMethodCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + return callbackData->callback(callbackInfo); + }); +} + +template +inline napi_value ObjectWrap::StaticGetterCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + StaticAccessorCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + return callbackData->getterCallback(callbackInfo); + }); +} + +template +inline napi_value ObjectWrap::StaticSetterCallbackWrapper( + napi_env env, napi_callback_info info) { + return details::WrapCallback([&] { + CallbackInfo callbackInfo(env, info); + StaticAccessorCallbackData* callbackData = + reinterpret_cast(callbackInfo.Data()); + callbackInfo.SetData(callbackData->data); + callbackData->setterCallback(callbackInfo, callbackInfo[0]); + return nullptr; + }); +} + +template +inline void ObjectWrap::FinalizeCallback(napi_env env, + void* data, + void* /*hint*/) { + HandleScope scope(env); + T* instance = static_cast(data); + instance->Finalize(Napi::Env(env)); + delete instance; +} + +template +template ::StaticSetterCallback method> +inline napi_value ObjectWrap::WrappedMethod( + napi_env env, napi_callback_info info) NAPI_NOEXCEPT { + return details::WrapCallback([&] { + const CallbackInfo cbInfo(env, info); + method(cbInfo, cbInfo[0]); + return nullptr; + }); +} + +//////////////////////////////////////////////////////////////////////////////// +// HandleScope class +//////////////////////////////////////////////////////////////////////////////// + +inline HandleScope::HandleScope(napi_env env, napi_handle_scope scope) + : _env(env), _scope(scope) {} + +inline HandleScope::HandleScope(Napi::Env env) : _env(env) { + napi_status status = napi_open_handle_scope(_env, &_scope); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline HandleScope::~HandleScope() { + napi_status status = napi_close_handle_scope(_env, _scope); + NAPI_FATAL_IF_FAILED( + status, "HandleScope::~HandleScope", "napi_close_handle_scope"); +} + +inline HandleScope::operator napi_handle_scope() const { + return _scope; +} + +inline Napi::Env HandleScope::Env() const { + return Napi::Env(_env); +} + +//////////////////////////////////////////////////////////////////////////////// +// EscapableHandleScope class +//////////////////////////////////////////////////////////////////////////////// + +inline EscapableHandleScope::EscapableHandleScope( + napi_env env, napi_escapable_handle_scope scope) + : _env(env), _scope(scope) {} + +inline EscapableHandleScope::EscapableHandleScope(Napi::Env env) : _env(env) { + napi_status status = napi_open_escapable_handle_scope(_env, &_scope); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline EscapableHandleScope::~EscapableHandleScope() { + napi_status status = napi_close_escapable_handle_scope(_env, _scope); + NAPI_FATAL_IF_FAILED(status, + "EscapableHandleScope::~EscapableHandleScope", + "napi_close_escapable_handle_scope"); +} + +inline EscapableHandleScope::operator napi_escapable_handle_scope() const { + return _scope; +} + +inline Napi::Env EscapableHandleScope::Env() const { + return Napi::Env(_env); +} + +inline Value EscapableHandleScope::Escape(napi_value escapee) { + napi_value result; + napi_status status = napi_escape_handle(_env, _scope, escapee, &result); + NAPI_THROW_IF_FAILED(_env, status, Value()); + return Value(_env, result); +} + +#if (NAPI_VERSION > 2) +//////////////////////////////////////////////////////////////////////////////// +// CallbackScope class +//////////////////////////////////////////////////////////////////////////////// + +inline CallbackScope::CallbackScope(napi_env env, napi_callback_scope scope) + : _env(env), _scope(scope) {} + +inline CallbackScope::CallbackScope(napi_env env, napi_async_context context) + : _env(env) { + napi_status status = + napi_open_callback_scope(_env, Object::New(env), context, &_scope); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline CallbackScope::~CallbackScope() { + napi_status status = napi_close_callback_scope(_env, _scope); + NAPI_FATAL_IF_FAILED( + status, "CallbackScope::~CallbackScope", "napi_close_callback_scope"); +} + +inline CallbackScope::operator napi_callback_scope() const { + return _scope; +} + +inline Napi::Env CallbackScope::Env() const { + return Napi::Env(_env); +} +#endif + +//////////////////////////////////////////////////////////////////////////////// +// AsyncContext class +//////////////////////////////////////////////////////////////////////////////// + +inline AsyncContext::AsyncContext(napi_env env, const char* resource_name) + : AsyncContext(env, resource_name, Object::New(env)) {} + +inline AsyncContext::AsyncContext(napi_env env, + const char* resource_name, + const Object& resource) + : _env(env), _context(nullptr) { + napi_value resource_id; + napi_status status = napi_create_string_utf8( + _env, resource_name, NAPI_AUTO_LENGTH, &resource_id); + NAPI_THROW_IF_FAILED_VOID(_env, status); + + status = napi_async_init(_env, resource, resource_id, &_context); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline AsyncContext::~AsyncContext() { + if (_context != nullptr) { + napi_async_destroy(_env, _context); + _context = nullptr; + } +} + +inline AsyncContext::AsyncContext(AsyncContext&& other) { + _env = other._env; + other._env = nullptr; + _context = other._context; + other._context = nullptr; +} + +inline AsyncContext& AsyncContext::operator=(AsyncContext&& other) { + _env = other._env; + other._env = nullptr; + _context = other._context; + other._context = nullptr; + return *this; +} + +inline AsyncContext::operator napi_async_context() const { + return _context; +} + +inline Napi::Env AsyncContext::Env() const { + return Napi::Env(_env); +} + +//////////////////////////////////////////////////////////////////////////////// +// AsyncWorker class +//////////////////////////////////////////////////////////////////////////////// + +inline AsyncWorker::AsyncWorker(const Function& callback) + : AsyncWorker(callback, "generic") {} + +inline AsyncWorker::AsyncWorker(const Function& callback, + const char* resource_name) + : AsyncWorker(callback, resource_name, Object::New(callback.Env())) {} + +inline AsyncWorker::AsyncWorker(const Function& callback, + const char* resource_name, + const Object& resource) + : AsyncWorker( + Object::New(callback.Env()), callback, resource_name, resource) {} + +inline AsyncWorker::AsyncWorker(const Object& receiver, + const Function& callback) + : AsyncWorker(receiver, callback, "generic") {} + +inline AsyncWorker::AsyncWorker(const Object& receiver, + const Function& callback, + const char* resource_name) + : AsyncWorker( + receiver, callback, resource_name, Object::New(callback.Env())) {} + +inline AsyncWorker::AsyncWorker(const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource) + : _env(callback.Env()), + _receiver(Napi::Persistent(receiver)), + _callback(Napi::Persistent(callback)), + _suppress_destruct(false) { + napi_value resource_id; + napi_status status = napi_create_string_latin1( + _env, resource_name, NAPI_AUTO_LENGTH, &resource_id); + NAPI_THROW_IF_FAILED_VOID(_env, status); + + status = napi_create_async_work(_env, + resource, + resource_id, + OnAsyncWorkExecute, + OnAsyncWorkComplete, + this, + &_work); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline AsyncWorker::AsyncWorker(Napi::Env env) : AsyncWorker(env, "generic") {} + +inline AsyncWorker::AsyncWorker(Napi::Env env, const char* resource_name) + : AsyncWorker(env, resource_name, Object::New(env)) {} + +inline AsyncWorker::AsyncWorker(Napi::Env env, + const char* resource_name, + const Object& resource) + : _env(env), _receiver(), _callback(), _suppress_destruct(false) { + napi_value resource_id; + napi_status status = napi_create_string_latin1( + _env, resource_name, NAPI_AUTO_LENGTH, &resource_id); + NAPI_THROW_IF_FAILED_VOID(_env, status); + + status = napi_create_async_work(_env, + resource, + resource_id, + OnAsyncWorkExecute, + OnAsyncWorkComplete, + this, + &_work); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline AsyncWorker::~AsyncWorker() { + if (_work != nullptr) { + napi_delete_async_work(_env, _work); + _work = nullptr; + } +} + +inline void AsyncWorker::Destroy() { + delete this; +} + +inline AsyncWorker::AsyncWorker(AsyncWorker&& other) { + _env = other._env; + other._env = nullptr; + _work = other._work; + other._work = nullptr; + _receiver = std::move(other._receiver); + _callback = std::move(other._callback); + _error = std::move(other._error); + _suppress_destruct = other._suppress_destruct; +} + +inline AsyncWorker& AsyncWorker::operator=(AsyncWorker&& other) { + _env = other._env; + other._env = nullptr; + _work = other._work; + other._work = nullptr; + _receiver = std::move(other._receiver); + _callback = std::move(other._callback); + _error = std::move(other._error); + _suppress_destruct = other._suppress_destruct; + return *this; +} + +inline AsyncWorker::operator napi_async_work() const { + return _work; +} + +inline Napi::Env AsyncWorker::Env() const { + return Napi::Env(_env); +} + +inline void AsyncWorker::Queue() { + napi_status status = napi_queue_async_work(_env, _work); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline void AsyncWorker::Cancel() { + napi_status status = napi_cancel_async_work(_env, _work); + NAPI_THROW_IF_FAILED_VOID(_env, status); +} + +inline ObjectReference& AsyncWorker::Receiver() { + return _receiver; +} + +inline FunctionReference& AsyncWorker::Callback() { + return _callback; +} + +inline void AsyncWorker::SuppressDestruct() { + _suppress_destruct = true; +} + +inline void AsyncWorker::OnOK() { + if (!_callback.IsEmpty()) { + _callback.Call(_receiver.Value(), GetResult(_callback.Env())); + } +} + +inline void AsyncWorker::OnError(const Error& e) { + if (!_callback.IsEmpty()) { + _callback.Call(_receiver.Value(), + std::initializer_list{e.Value()}); + } +} + +inline void AsyncWorker::SetError(const std::string& error) { + _error = error; +} + +inline std::vector AsyncWorker::GetResult(Napi::Env /*env*/) { + return {}; +} +// The OnAsyncWorkExecute method receives an napi_env argument. However, do NOT +// use it within this method, as it does not run on the JavaScript thread and +// must not run any method that would cause JavaScript to run. In practice, +// this means that almost any use of napi_env will be incorrect. +inline void AsyncWorker::OnAsyncWorkExecute(napi_env env, void* asyncworker) { + AsyncWorker* self = static_cast(asyncworker); + self->OnExecute(env); +} +// The OnExecute method receives an napi_env argument. However, do NOT +// use it within this method, as it does not run on the JavaScript thread and +// must not run any method that would cause JavaScript to run. In practice, +// this means that almost any use of napi_env will be incorrect. +inline void AsyncWorker::OnExecute(Napi::Env /*DO_NOT_USE*/) { +#ifdef NAPI_CPP_EXCEPTIONS + try { + Execute(); + } catch (const std::exception& e) { + SetError(e.what()); + } +#else // NAPI_CPP_EXCEPTIONS + Execute(); +#endif // NAPI_CPP_EXCEPTIONS +} + +inline void AsyncWorker::OnAsyncWorkComplete(napi_env env, + napi_status status, + void* asyncworker) { + AsyncWorker* self = static_cast(asyncworker); + self->OnWorkComplete(env, status); +} +inline void AsyncWorker::OnWorkComplete(Napi::Env /*env*/, napi_status status) { + if (status != napi_cancelled) { + HandleScope scope(_env); + details::WrapCallback([&] { + if (_error.size() == 0) { + OnOK(); + } else { + OnError(Error::New(_env, _error)); + } + return nullptr; + }); + } + if (!_suppress_destruct) { + Destroy(); + } +} + +#if (NAPI_VERSION > 3 && !defined(__wasm32__)) +//////////////////////////////////////////////////////////////////////////////// +// TypedThreadSafeFunction class +//////////////////////////////////////////////////////////////////////////////// + +// Starting with NAPI 5, the JavaScript function `func` parameter of +// `napi_create_threadsafe_function` is optional. +#if NAPI_VERSION > 4 +// static, with Callback [missing] Resource [missing] Finalizer [missing] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context) { + TypedThreadSafeFunction tsfn; + + napi_status status = + napi_create_threadsafe_function(env, + nullptr, + nullptr, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + nullptr, + nullptr, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +// static, with Callback [missing] Resource [passed] Finalizer [missing] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context) { + TypedThreadSafeFunction tsfn; + + napi_status status = + napi_create_threadsafe_function(env, + nullptr, + resource, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + nullptr, + nullptr, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +// static, with Callback [missing] Resource [missing] Finalizer [passed] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data) { + TypedThreadSafeFunction tsfn; + + auto* finalizeData = new details:: + ThreadSafeFinalize( + {data, finalizeCallback}); + napi_status status = napi_create_threadsafe_function( + env, + nullptr, + nullptr, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + finalizeData, + details::ThreadSafeFinalize:: + FinalizeFinalizeWrapperWithDataAndContext, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +// static, with Callback [missing] Resource [passed] Finalizer [passed] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data) { + TypedThreadSafeFunction tsfn; + + auto* finalizeData = new details:: + ThreadSafeFinalize( + {data, finalizeCallback}); + napi_status status = napi_create_threadsafe_function( + env, + nullptr, + resource, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + finalizeData, + details::ThreadSafeFinalize:: + FinalizeFinalizeWrapperWithDataAndContext, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} +#endif + +// static, with Callback [passed] Resource [missing] Finalizer [missing] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context) { + TypedThreadSafeFunction tsfn; + + napi_status status = + napi_create_threadsafe_function(env, + callback, + nullptr, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + nullptr, + nullptr, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +// static, with Callback [passed] Resource [passed] Finalizer [missing] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context) { + TypedThreadSafeFunction tsfn; + + napi_status status = + napi_create_threadsafe_function(env, + callback, + resource, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + nullptr, + nullptr, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +// static, with Callback [passed] Resource [missing] Finalizer [passed] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data) { + TypedThreadSafeFunction tsfn; + + auto* finalizeData = new details:: + ThreadSafeFinalize( + {data, finalizeCallback}); + napi_status status = napi_create_threadsafe_function( + env, + callback, + nullptr, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + finalizeData, + details::ThreadSafeFinalize:: + FinalizeFinalizeWrapperWithDataAndContext, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +// static, with: Callback [passed] Resource [passed] Finalizer [passed] +template +template +inline TypedThreadSafeFunction +TypedThreadSafeFunction::New( + napi_env env, + CallbackType callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data) { + TypedThreadSafeFunction tsfn; + + auto* finalizeData = new details:: + ThreadSafeFinalize( + {data, finalizeCallback}); + napi_status status = napi_create_threadsafe_function( + env, + details::DefaultCallbackWrapper< + CallbackType, + TypedThreadSafeFunction>(env, + callback), + resource, + String::From(env, resourceName), + maxQueueSize, + initialThreadCount, + finalizeData, + details::ThreadSafeFinalize:: + FinalizeFinalizeWrapperWithDataAndContext, + context, + CallJsInternal, + &tsfn._tsfn); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED( + env, status, TypedThreadSafeFunction()); + } + + return tsfn; +} + +template +inline TypedThreadSafeFunction:: + TypedThreadSafeFunction() + : _tsfn() {} + +template +inline TypedThreadSafeFunction:: + TypedThreadSafeFunction(napi_threadsafe_function tsfn) + : _tsfn(tsfn) {} + +template +inline TypedThreadSafeFunction:: +operator napi_threadsafe_function() const { + return _tsfn; +} + +template +inline napi_status +TypedThreadSafeFunction::BlockingCall( + DataType* data) const { + return napi_call_threadsafe_function(_tsfn, data, napi_tsfn_blocking); +} + +template +inline napi_status +TypedThreadSafeFunction::NonBlockingCall( + DataType* data) const { + return napi_call_threadsafe_function(_tsfn, data, napi_tsfn_nonblocking); +} + +template +inline void TypedThreadSafeFunction::Ref( + napi_env env) const { + if (_tsfn != nullptr) { + napi_status status = napi_ref_threadsafe_function(env, _tsfn); + NAPI_THROW_IF_FAILED_VOID(env, status); + } +} + +template +inline void TypedThreadSafeFunction::Unref( + napi_env env) const { + if (_tsfn != nullptr) { + napi_status status = napi_unref_threadsafe_function(env, _tsfn); + NAPI_THROW_IF_FAILED_VOID(env, status); + } +} + +template +inline napi_status +TypedThreadSafeFunction::Acquire() const { + return napi_acquire_threadsafe_function(_tsfn); +} + +template +inline napi_status +TypedThreadSafeFunction::Release() const { + return napi_release_threadsafe_function(_tsfn, napi_tsfn_release); +} + +template +inline napi_status +TypedThreadSafeFunction::Abort() const { + return napi_release_threadsafe_function(_tsfn, napi_tsfn_abort); +} + +template +inline ContextType* +TypedThreadSafeFunction::GetContext() const { + void* context; + napi_status status = napi_get_threadsafe_function_context(_tsfn, &context); + NAPI_FATAL_IF_FAILED(status, + "TypedThreadSafeFunction::GetContext", + "napi_get_threadsafe_function_context"); + return static_cast(context); +} + +// static +template +void TypedThreadSafeFunction::CallJsInternal( + napi_env env, napi_value jsCallback, void* context, void* data) { + details::CallJsWrapper( + env, jsCallback, context, data); +} + +#if NAPI_VERSION == 4 +// static +template +Napi::Function +TypedThreadSafeFunction::EmptyFunctionFactory( + Napi::Env env) { + return Napi::Function::New(env, [](const CallbackInfo& cb) {}); +} + +// static +template +Napi::Function +TypedThreadSafeFunction::FunctionOrEmpty( + Napi::Env env, Napi::Function& callback) { + if (callback.IsEmpty()) { + return EmptyFunctionFactory(env); + } + return callback; +} + +#else +// static +template +std::nullptr_t +TypedThreadSafeFunction::EmptyFunctionFactory( + Napi::Env /*env*/) { + return nullptr; +} + +// static +template +Napi::Function +TypedThreadSafeFunction::FunctionOrEmpty( + Napi::Env /*env*/, Napi::Function& callback) { + return callback; +} + +#endif + +//////////////////////////////////////////////////////////////////////////////// +// ThreadSafeFunction class +//////////////////////////////////////////////////////////////////////////////// + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount) { + return New( + env, callback, Object(), resourceName, maxQueueSize, initialThreadCount); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context) { + return New(env, + callback, + Object(), + resourceName, + maxQueueSize, + initialThreadCount, + context); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback) { + return New(env, + callback, + Object(), + resourceName, + maxQueueSize, + initialThreadCount, + finalizeCallback); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback, + FinalizerDataType* data) { + return New(env, + callback, + Object(), + resourceName, + maxQueueSize, + initialThreadCount, + finalizeCallback, + data); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback) { + return New(env, + callback, + Object(), + resourceName, + maxQueueSize, + initialThreadCount, + context, + finalizeCallback); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data) { + return New(env, + callback, + Object(), + resourceName, + maxQueueSize, + initialThreadCount, + context, + finalizeCallback, + data); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount) { + return New(env, + callback, + resource, + resourceName, + maxQueueSize, + initialThreadCount, + static_cast(nullptr) /* context */); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context) { + return New(env, + callback, + resource, + resourceName, + maxQueueSize, + initialThreadCount, + context, + [](Env, ContextType*) {} /* empty finalizer */); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback) { + return New(env, + callback, + resource, + resourceName, + maxQueueSize, + initialThreadCount, + static_cast(nullptr) /* context */, + finalizeCallback, + static_cast(nullptr) /* data */, + details::ThreadSafeFinalize::Wrapper); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback, + FinalizerDataType* data) { + return New(env, + callback, + resource, + resourceName, + maxQueueSize, + initialThreadCount, + static_cast(nullptr) /* context */, + finalizeCallback, + data, + details::ThreadSafeFinalize:: + FinalizeWrapperWithData); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback) { + return New( + env, + callback, + resource, + resourceName, + maxQueueSize, + initialThreadCount, + context, + finalizeCallback, + static_cast(nullptr) /* data */, + details::ThreadSafeFinalize::FinalizeWrapperWithContext); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data) { + return New( + env, + callback, + resource, + resourceName, + maxQueueSize, + initialThreadCount, + context, + finalizeCallback, + data, + details::ThreadSafeFinalize:: + FinalizeFinalizeWrapperWithDataAndContext); +} + +inline ThreadSafeFunction::ThreadSafeFunction() : _tsfn() {} + +inline ThreadSafeFunction::ThreadSafeFunction(napi_threadsafe_function tsfn) + : _tsfn(tsfn) {} + +inline ThreadSafeFunction::operator napi_threadsafe_function() const { + return _tsfn; +} + +inline napi_status ThreadSafeFunction::BlockingCall() const { + return CallInternal(nullptr, napi_tsfn_blocking); +} + +template <> +inline napi_status ThreadSafeFunction::BlockingCall(void* data) const { + return napi_call_threadsafe_function(_tsfn, data, napi_tsfn_blocking); +} + +template +inline napi_status ThreadSafeFunction::BlockingCall(Callback callback) const { + return CallInternal(new CallbackWrapper(callback), napi_tsfn_blocking); +} + +template +inline napi_status ThreadSafeFunction::BlockingCall(DataType* data, + Callback callback) const { + auto wrapper = [data, callback](Env env, Function jsCallback) { + callback(env, jsCallback, data); + }; + return CallInternal(new CallbackWrapper(wrapper), napi_tsfn_blocking); +} + +inline napi_status ThreadSafeFunction::NonBlockingCall() const { + return CallInternal(nullptr, napi_tsfn_nonblocking); +} + +template <> +inline napi_status ThreadSafeFunction::NonBlockingCall(void* data) const { + return napi_call_threadsafe_function(_tsfn, data, napi_tsfn_nonblocking); +} + +template +inline napi_status ThreadSafeFunction::NonBlockingCall( + Callback callback) const { + return CallInternal(new CallbackWrapper(callback), napi_tsfn_nonblocking); +} + +template +inline napi_status ThreadSafeFunction::NonBlockingCall( + DataType* data, Callback callback) const { + auto wrapper = [data, callback](Env env, Function jsCallback) { + callback(env, jsCallback, data); + }; + return CallInternal(new CallbackWrapper(wrapper), napi_tsfn_nonblocking); +} + +inline void ThreadSafeFunction::Ref(napi_env env) const { + if (_tsfn != nullptr) { + napi_status status = napi_ref_threadsafe_function(env, _tsfn); + NAPI_THROW_IF_FAILED_VOID(env, status); + } +} + +inline void ThreadSafeFunction::Unref(napi_env env) const { + if (_tsfn != nullptr) { + napi_status status = napi_unref_threadsafe_function(env, _tsfn); + NAPI_THROW_IF_FAILED_VOID(env, status); + } +} + +inline napi_status ThreadSafeFunction::Acquire() const { + return napi_acquire_threadsafe_function(_tsfn); +} + +inline napi_status ThreadSafeFunction::Release() const { + return napi_release_threadsafe_function(_tsfn, napi_tsfn_release); +} + +inline napi_status ThreadSafeFunction::Abort() const { + return napi_release_threadsafe_function(_tsfn, napi_tsfn_abort); +} + +inline ThreadSafeFunction::ConvertibleContext ThreadSafeFunction::GetContext() + const { + void* context; + napi_status status = napi_get_threadsafe_function_context(_tsfn, &context); + NAPI_FATAL_IF_FAILED(status, + "ThreadSafeFunction::GetContext", + "napi_get_threadsafe_function_context"); + return ConvertibleContext({context}); +} + +// static +template +inline ThreadSafeFunction ThreadSafeFunction::New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data, + napi_finalize wrapper) { + static_assert(details::can_make_string::value || + std::is_convertible::value, + "Resource name should be convertible to the string type"); + + ThreadSafeFunction tsfn; + auto* finalizeData = new details:: + ThreadSafeFinalize( + {data, finalizeCallback}); + napi_status status = + napi_create_threadsafe_function(env, + callback, + resource, + Value::From(env, resourceName), + maxQueueSize, + initialThreadCount, + finalizeData, + wrapper, + context, + CallJS, + &tsfn._tsfn); + if (status != napi_ok) { + delete finalizeData; + NAPI_THROW_IF_FAILED(env, status, ThreadSafeFunction()); + } + + return tsfn; +} + +inline napi_status ThreadSafeFunction::CallInternal( + CallbackWrapper* callbackWrapper, + napi_threadsafe_function_call_mode mode) const { + napi_status status = + napi_call_threadsafe_function(_tsfn, callbackWrapper, mode); + if (status != napi_ok && callbackWrapper != nullptr) { + delete callbackWrapper; + } + + return status; +} + +// static +inline void ThreadSafeFunction::CallJS(napi_env env, + napi_value jsCallback, + void* /* context */, + void* data) { + if (env == nullptr && jsCallback == nullptr) { + return; + } + + if (data != nullptr) { + auto* callbackWrapper = static_cast(data); + (*callbackWrapper)(env, Function(env, jsCallback)); + delete callbackWrapper; + } else if (jsCallback != nullptr) { + Function(env, jsCallback).Call({}); + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Async Progress Worker Base class +//////////////////////////////////////////////////////////////////////////////// +template +inline AsyncProgressWorkerBase::AsyncProgressWorkerBase( + const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource, + size_t queue_size) + : AsyncWorker(receiver, callback, resource_name, resource) { + // Fill all possible arguments to work around ambiguous + // ThreadSafeFunction::New signatures. + _tsfn = ThreadSafeFunction::New(callback.Env(), + callback, + resource, + resource_name, + queue_size, + /** initialThreadCount */ 1, + /** context */ this, + OnThreadSafeFunctionFinalize, + /** finalizeData */ this); +} + +#if NAPI_VERSION > 4 +template +inline AsyncProgressWorkerBase::AsyncProgressWorkerBase( + Napi::Env env, + const char* resource_name, + const Object& resource, + size_t queue_size) + : AsyncWorker(env, resource_name, resource) { + // TODO: Once the changes to make the callback optional for threadsafe + // functions are available on all versions we can remove the dummy Function + // here. + Function callback; + // Fill all possible arguments to work around ambiguous + // ThreadSafeFunction::New signatures. + _tsfn = ThreadSafeFunction::New(env, + callback, + resource, + resource_name, + queue_size, + /** initialThreadCount */ 1, + /** context */ this, + OnThreadSafeFunctionFinalize, + /** finalizeData */ this); +} +#endif + +template +inline AsyncProgressWorkerBase::~AsyncProgressWorkerBase() { + // Abort pending tsfn call. + // Don't send progress events after we've already completed. + // It's ok to call ThreadSafeFunction::Abort and ThreadSafeFunction::Release + // duplicated. + _tsfn.Abort(); +} + +template +inline void AsyncProgressWorkerBase::OnAsyncWorkProgress( + Napi::Env /* env */, Napi::Function /* jsCallback */, void* data) { + ThreadSafeData* tsd = static_cast(data); + tsd->asyncprogressworker()->OnWorkProgress(tsd->data()); + delete tsd; +} + +template +inline napi_status AsyncProgressWorkerBase::NonBlockingCall( + DataType* data) { + auto tsd = new AsyncProgressWorkerBase::ThreadSafeData(this, data); + auto ret = _tsfn.NonBlockingCall(tsd, OnAsyncWorkProgress); + if (ret != napi_ok) { + delete tsd; + } + return ret; +} + +template +inline void AsyncProgressWorkerBase::OnWorkComplete( + Napi::Env /* env */, napi_status status) { + _work_completed = true; + _complete_status = status; + _tsfn.Release(); +} + +template +inline void AsyncProgressWorkerBase::OnThreadSafeFunctionFinalize( + Napi::Env env, void* /* data */, AsyncProgressWorkerBase* context) { + if (context->_work_completed) { + context->AsyncWorker::OnWorkComplete(env, context->_complete_status); + } +} + +//////////////////////////////////////////////////////////////////////////////// +// Async Progress Worker class +//////////////////////////////////////////////////////////////////////////////// +template +inline AsyncProgressWorker::AsyncProgressWorker(const Function& callback) + : AsyncProgressWorker(callback, "generic") {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(const Function& callback, + const char* resource_name) + : AsyncProgressWorker( + callback, resource_name, Object::New(callback.Env())) {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(const Function& callback, + const char* resource_name, + const Object& resource) + : AsyncProgressWorker( + Object::New(callback.Env()), callback, resource_name, resource) {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(const Object& receiver, + const Function& callback) + : AsyncProgressWorker(receiver, callback, "generic") {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(const Object& receiver, + const Function& callback, + const char* resource_name) + : AsyncProgressWorker( + receiver, callback, resource_name, Object::New(callback.Env())) {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource) + : AsyncProgressWorkerBase(receiver, callback, resource_name, resource), + _asyncdata(nullptr), + _asyncsize(0), + _signaled(false) {} + +#if NAPI_VERSION > 4 +template +inline AsyncProgressWorker::AsyncProgressWorker(Napi::Env env) + : AsyncProgressWorker(env, "generic") {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(Napi::Env env, + const char* resource_name) + : AsyncProgressWorker(env, resource_name, Object::New(env)) {} + +template +inline AsyncProgressWorker::AsyncProgressWorker(Napi::Env env, + const char* resource_name, + const Object& resource) + : AsyncProgressWorkerBase(env, resource_name, resource), + _asyncdata(nullptr), + _asyncsize(0) {} +#endif + +template +inline AsyncProgressWorker::~AsyncProgressWorker() { + { + std::lock_guard lock(this->_mutex); + _asyncdata = nullptr; + _asyncsize = 0; + } +} + +template +inline void AsyncProgressWorker::Execute() { + ExecutionProgress progress(this); + Execute(progress); +} + +template +inline void AsyncProgressWorker::OnWorkProgress(void*) { + T* data; + size_t size; + bool signaled; + { + std::lock_guard lock(this->_mutex); + data = this->_asyncdata; + size = this->_asyncsize; + signaled = this->_signaled; + this->_asyncdata = nullptr; + this->_asyncsize = 0; + this->_signaled = false; + } + + /** + * The callback of ThreadSafeFunction is not been invoked immediately on the + * callback of uv_async_t (uv io poll), rather the callback of TSFN is + * invoked on the right next uv idle callback. There are chances that during + * the deferring the signal of uv_async_t is been sent again, i.e. potential + * not coalesced two calls of the TSFN callback. + */ + if (data == nullptr && !signaled) { + return; + } + + this->OnProgress(data, size); + delete[] data; +} + +template +inline void AsyncProgressWorker::SendProgress_(const T* data, size_t count) { + T* new_data = new T[count]; + std::copy(data, data + count, new_data); + + T* old_data; + { + std::lock_guard lock(this->_mutex); + old_data = _asyncdata; + _asyncdata = new_data; + _asyncsize = count; + _signaled = false; + } + this->NonBlockingCall(nullptr); + + delete[] old_data; +} + +template +inline void AsyncProgressWorker::Signal() { + { + std::lock_guard lock(this->_mutex); + _signaled = true; + } + this->NonBlockingCall(static_cast(nullptr)); +} + +template +inline void AsyncProgressWorker::ExecutionProgress::Signal() const { + this->_worker->Signal(); +} + +template +inline void AsyncProgressWorker::ExecutionProgress::Send( + const T* data, size_t count) const { + _worker->SendProgress_(data, count); +} + +//////////////////////////////////////////////////////////////////////////////// +// Async Progress Queue Worker class +//////////////////////////////////////////////////////////////////////////////// +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + const Function& callback) + : AsyncProgressQueueWorker(callback, "generic") {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + const Function& callback, const char* resource_name) + : AsyncProgressQueueWorker( + callback, resource_name, Object::New(callback.Env())) {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + const Function& callback, const char* resource_name, const Object& resource) + : AsyncProgressQueueWorker( + Object::New(callback.Env()), callback, resource_name, resource) {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + const Object& receiver, const Function& callback) + : AsyncProgressQueueWorker(receiver, callback, "generic") {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + const Object& receiver, const Function& callback, const char* resource_name) + : AsyncProgressQueueWorker( + receiver, callback, resource_name, Object::New(callback.Env())) {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource) + : AsyncProgressWorkerBase>( + receiver, + callback, + resource_name, + resource, + /** unlimited queue size */ 0) {} + +#if NAPI_VERSION > 4 +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker(Napi::Env env) + : AsyncProgressQueueWorker(env, "generic") {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + Napi::Env env, const char* resource_name) + : AsyncProgressQueueWorker(env, resource_name, Object::New(env)) {} + +template +inline AsyncProgressQueueWorker::AsyncProgressQueueWorker( + Napi::Env env, const char* resource_name, const Object& resource) + : AsyncProgressWorkerBase>( + env, resource_name, resource, /** unlimited queue size */ 0) {} +#endif + +template +inline void AsyncProgressQueueWorker::Execute() { + ExecutionProgress progress(this); + Execute(progress); +} + +template +inline void AsyncProgressQueueWorker::OnWorkProgress( + std::pair* datapair) { + if (datapair == nullptr) { + return; + } + + T* data = datapair->first; + size_t size = datapair->second; + + this->OnProgress(data, size); + delete datapair; + delete[] data; +} + +template +inline void AsyncProgressQueueWorker::SendProgress_(const T* data, + size_t count) { + T* new_data = new T[count]; + std::copy(data, data + count, new_data); + + auto pair = new std::pair(new_data, count); + this->NonBlockingCall(pair); +} + +template +inline void AsyncProgressQueueWorker::Signal() const { + this->SendProgress_(static_cast(nullptr), 0); +} + +template +inline void AsyncProgressQueueWorker::OnWorkComplete(Napi::Env env, + napi_status status) { + // Draining queued items in TSFN. + AsyncProgressWorkerBase>::OnWorkComplete(env, status); +} + +template +inline void AsyncProgressQueueWorker::ExecutionProgress::Signal() const { + _worker->SendProgress_(static_cast(nullptr), 0); +} + +template +inline void AsyncProgressQueueWorker::ExecutionProgress::Send( + const T* data, size_t count) const { + _worker->SendProgress_(data, count); +} +#endif // NAPI_VERSION > 3 && !defined(__wasm32__) + +//////////////////////////////////////////////////////////////////////////////// +// Memory Management class +//////////////////////////////////////////////////////////////////////////////// + +inline int64_t MemoryManagement::AdjustExternalMemory(Env env, + int64_t change_in_bytes) { + int64_t result; + napi_status status = + napi_adjust_external_memory(env, change_in_bytes, &result); + NAPI_THROW_IF_FAILED(env, status, 0); + return result; +} + +//////////////////////////////////////////////////////////////////////////////// +// Version Management class +//////////////////////////////////////////////////////////////////////////////// + +inline uint32_t VersionManagement::GetNapiVersion(Env env) { + uint32_t result; + napi_status status = napi_get_version(env, &result); + NAPI_THROW_IF_FAILED(env, status, 0); + return result; +} + +inline const napi_node_version* VersionManagement::GetNodeVersion(Env env) { + const napi_node_version* result; + napi_status status = napi_get_node_version(env, &result); + NAPI_THROW_IF_FAILED(env, status, 0); + return result; +} + +#if NAPI_VERSION > 5 +//////////////////////////////////////////////////////////////////////////////// +// Addon class +//////////////////////////////////////////////////////////////////////////////// + +template +inline Object Addon::Init(Env env, Object exports) { + T* addon = new T(env, exports); + env.SetInstanceData(addon); + return addon->entry_point_; +} + +template +inline T* Addon::Unwrap(Object wrapper) { + return wrapper.Env().GetInstanceData(); +} + +template +inline void Addon::DefineAddon( + Object exports, const std::initializer_list& props) { + DefineProperties(exports, props); + entry_point_ = exports; +} + +template +inline Napi::Object Addon::DefineProperties( + Object object, const std::initializer_list& props) { + const napi_property_descriptor* properties = + reinterpret_cast(props.begin()); + size_t size = props.size(); + napi_status status = + napi_define_properties(object.Env(), object, size, properties); + NAPI_THROW_IF_FAILED(object.Env(), status, object); + for (size_t idx = 0; idx < size; idx++) + T::AttachPropData(object.Env(), object, &properties[idx]); + return object; +} +#endif // NAPI_VERSION > 5 + +#if NAPI_VERSION > 2 +template +Env::CleanupHook Env::AddCleanupHook(Hook hook, Arg* arg) { + return CleanupHook(*this, hook, arg); +} + +template +Env::CleanupHook Env::AddCleanupHook(Hook hook) { + return CleanupHook(*this, hook); +} + +template +Env::CleanupHook::CleanupHook() { + data = nullptr; +} + +template +Env::CleanupHook::CleanupHook(Napi::Env env, Hook hook) + : wrapper(Env::CleanupHook::Wrapper) { + data = new CleanupData{std::move(hook), nullptr}; + napi_status status = napi_add_env_cleanup_hook(env, wrapper, data); + if (status != napi_ok) { + delete data; + data = nullptr; + } +} + +template +Env::CleanupHook::CleanupHook(Napi::Env env, Hook hook, Arg* arg) + : wrapper(Env::CleanupHook::WrapperWithArg) { + data = new CleanupData{std::move(hook), arg}; + napi_status status = napi_add_env_cleanup_hook(env, wrapper, data); + if (status != napi_ok) { + delete data; + data = nullptr; + } +} + +template +bool Env::CleanupHook::Remove(Env env) { + napi_status status = napi_remove_env_cleanup_hook(env, wrapper, data); + delete data; + data = nullptr; + return status == napi_ok; +} + +template +bool Env::CleanupHook::IsEmpty() const { + return data == nullptr; +} +#endif // NAPI_VERSION > 2 + +#ifdef NAPI_CPP_CUSTOM_NAMESPACE +} // namespace NAPI_CPP_CUSTOM_NAMESPACE +#endif + +} // namespace Napi + +#endif // SRC_NAPI_INL_H_ diff --git a/node_modules/node-addon-api/napi.h b/node_modules/node-addon-api/napi.h new file mode 100644 index 0000000..831b3b6 --- /dev/null +++ b/node_modules/node-addon-api/napi.h @@ -0,0 +1,3114 @@ +#ifndef SRC_NAPI_H_ +#define SRC_NAPI_H_ + +#include +#include +#include +#include +#include +#include +#include + +// VS2015 RTM has bugs with constexpr, so require min of VS2015 Update 3 (known +// good version) +#if !defined(_MSC_VER) || _MSC_FULL_VER >= 190024210 +#define NAPI_HAS_CONSTEXPR 1 +#endif + +// VS2013 does not support char16_t literal strings, so we'll work around it +// using wchar_t strings and casting them. This is safe as long as the character +// sizes are the same. +#if defined(_MSC_VER) && _MSC_VER <= 1800 +static_assert(sizeof(char16_t) == sizeof(wchar_t), + "Size mismatch between char16_t and wchar_t"); +#define NAPI_WIDE_TEXT(x) reinterpret_cast(L##x) +#else +#define NAPI_WIDE_TEXT(x) u##x +#endif + +// If C++ exceptions are not explicitly enabled or disabled, enable them +// if exceptions were enabled in the compiler settings. +#if !defined(NAPI_CPP_EXCEPTIONS) && !defined(NAPI_DISABLE_CPP_EXCEPTIONS) +#if defined(_CPPUNWIND) || defined(__EXCEPTIONS) +#define NAPI_CPP_EXCEPTIONS +#else +#error Exception support not detected. \ + Define either NAPI_CPP_EXCEPTIONS or NAPI_DISABLE_CPP_EXCEPTIONS. +#endif +#endif + +// If C++ NAPI_CPP_EXCEPTIONS are enabled, NODE_ADDON_API_ENABLE_MAYBE should +// not be set +#if defined(NAPI_CPP_EXCEPTIONS) && defined(NODE_ADDON_API_ENABLE_MAYBE) +#error NODE_ADDON_API_ENABLE_MAYBE should not be set when \ + NAPI_CPP_EXCEPTIONS is defined. +#endif + +#ifdef _NOEXCEPT +#define NAPI_NOEXCEPT _NOEXCEPT +#else +#define NAPI_NOEXCEPT noexcept +#endif + +#ifdef NAPI_CPP_EXCEPTIONS + +// When C++ exceptions are enabled, Errors are thrown directly. There is no need +// to return anything after the throw statements. The variadic parameter is an +// optional return value that is ignored. +// We need _VOID versions of the macros to avoid warnings resulting from +// leaving the NAPI_THROW_* `...` argument empty. + +#define NAPI_THROW(e, ...) throw e +#define NAPI_THROW_VOID(e) throw e + +#define NAPI_THROW_IF_FAILED(env, status, ...) \ + if ((status) != napi_ok) throw Napi::Error::New(env); + +#define NAPI_THROW_IF_FAILED_VOID(env, status) \ + if ((status) != napi_ok) throw Napi::Error::New(env); + +#else // NAPI_CPP_EXCEPTIONS + +// When C++ exceptions are disabled, Errors are thrown as JavaScript exceptions, +// which are pending until the callback returns to JS. The variadic parameter +// is an optional return value; usually it is an empty result. +// We need _VOID versions of the macros to avoid warnings resulting from +// leaving the NAPI_THROW_* `...` argument empty. + +#define NAPI_THROW(e, ...) \ + do { \ + (e).ThrowAsJavaScriptException(); \ + return __VA_ARGS__; \ + } while (0) + +#define NAPI_THROW_VOID(e) \ + do { \ + (e).ThrowAsJavaScriptException(); \ + return; \ + } while (0) + +#define NAPI_THROW_IF_FAILED(env, status, ...) \ + if ((status) != napi_ok) { \ + Napi::Error::New(env).ThrowAsJavaScriptException(); \ + return __VA_ARGS__; \ + } + +#define NAPI_THROW_IF_FAILED_VOID(env, status) \ + if ((status) != napi_ok) { \ + Napi::Error::New(env).ThrowAsJavaScriptException(); \ + return; \ + } + +#endif // NAPI_CPP_EXCEPTIONS + +#ifdef NODE_ADDON_API_ENABLE_MAYBE +#define NAPI_MAYBE_THROW_IF_FAILED(env, status, type) \ + NAPI_THROW_IF_FAILED(env, status, Napi::Nothing()) + +#define NAPI_RETURN_OR_THROW_IF_FAILED(env, status, result, type) \ + NAPI_MAYBE_THROW_IF_FAILED(env, status, type); \ + return Napi::Just(result); +#else +#define NAPI_MAYBE_THROW_IF_FAILED(env, status, type) \ + NAPI_THROW_IF_FAILED(env, status, type()) + +#define NAPI_RETURN_OR_THROW_IF_FAILED(env, status, result, type) \ + NAPI_MAYBE_THROW_IF_FAILED(env, status, type); \ + return result; +#endif + +#define NAPI_DISALLOW_ASSIGN(CLASS) void operator=(const CLASS&) = delete; +#define NAPI_DISALLOW_COPY(CLASS) CLASS(const CLASS&) = delete; + +#define NAPI_DISALLOW_ASSIGN_COPY(CLASS) \ + NAPI_DISALLOW_ASSIGN(CLASS) \ + NAPI_DISALLOW_COPY(CLASS) + +#define NAPI_CHECK(condition, location, message) \ + do { \ + if (!(condition)) { \ + Napi::Error::Fatal((location), (message)); \ + } \ + } while (0) + +#define NAPI_FATAL_IF_FAILED(status, location, message) \ + NAPI_CHECK((status) == napi_ok, location, message) + +//////////////////////////////////////////////////////////////////////////////// +/// Node-API C++ Wrapper Classes +/// +/// These classes wrap the "Node-API" ABI-stable C APIs for Node.js, providing a +/// C++ object model and C++ exception-handling semantics with low overhead. +/// The wrappers are all header-only so that they do not affect the ABI. +//////////////////////////////////////////////////////////////////////////////// +namespace Napi { + +#ifdef NAPI_CPP_CUSTOM_NAMESPACE +// NAPI_CPP_CUSTOM_NAMESPACE can be #define'd per-addon to avoid symbol +// conflicts between different instances of node-addon-api + +// First dummy definition of the namespace to make sure that Napi::(name) still +// refers to the right things inside this file. +namespace NAPI_CPP_CUSTOM_NAMESPACE {} +using namespace NAPI_CPP_CUSTOM_NAMESPACE; + +namespace NAPI_CPP_CUSTOM_NAMESPACE { +#endif + +// Forward declarations +class Env; +class Value; +class Boolean; +class Number; +#if NAPI_VERSION > 5 +class BigInt; +#endif // NAPI_VERSION > 5 +#if (NAPI_VERSION > 4) +class Date; +#endif +class String; +class Object; +class Array; +class ArrayBuffer; +class Function; +class Error; +class PropertyDescriptor; +class CallbackInfo; +class TypedArray; +template +class TypedArrayOf; + +using Int8Array = + TypedArrayOf; ///< Typed-array of signed 8-bit integers +using Uint8Array = + TypedArrayOf; ///< Typed-array of unsigned 8-bit integers +using Int16Array = + TypedArrayOf; ///< Typed-array of signed 16-bit integers +using Uint16Array = + TypedArrayOf; ///< Typed-array of unsigned 16-bit integers +using Int32Array = + TypedArrayOf; ///< Typed-array of signed 32-bit integers +using Uint32Array = + TypedArrayOf; ///< Typed-array of unsigned 32-bit integers +using Float32Array = + TypedArrayOf; ///< Typed-array of 32-bit floating-point values +using Float64Array = + TypedArrayOf; ///< Typed-array of 64-bit floating-point values +#if NAPI_VERSION > 5 +using BigInt64Array = + TypedArrayOf; ///< Typed array of signed 64-bit integers +using BigUint64Array = + TypedArrayOf; ///< Typed array of unsigned 64-bit integers +#endif // NAPI_VERSION > 5 + +/// Defines the signature of a Node-API C++ module's registration callback +/// (init) function. +using ModuleRegisterCallback = Object (*)(Env env, Object exports); + +class MemoryManagement; + +/// A simple Maybe type, representing an object which may or may not have a +/// value. +/// +/// If an API method returns a Maybe<>, the API method can potentially fail +/// either because an exception is thrown, or because an exception is pending, +/// e.g. because a previous API call threw an exception that hasn't been +/// caught yet. In that case, a "Nothing" value is returned. +template +class Maybe { + public: + bool IsNothing() const; + bool IsJust() const; + + /// Short-hand for Unwrap(), which doesn't return a value. Could be used + /// where the actual value of the Maybe is not needed like Object::Set. + /// If this Maybe is nothing (empty), node-addon-api will crash the + /// process. + void Check() const; + + /// Return the value of type T contained in the Maybe. If this Maybe is + /// nothing (empty), node-addon-api will crash the process. + T Unwrap() const; + + /// Return the value of type T contained in the Maybe, or using a default + /// value if this Maybe is nothing (empty). + T UnwrapOr(const T& default_value) const; + + /// Converts this Maybe to a value of type T in the out. If this Maybe is + /// nothing (empty), `false` is returned and `out` is left untouched. + bool UnwrapTo(T* out) const; + + bool operator==(const Maybe& other) const; + bool operator!=(const Maybe& other) const; + + private: + Maybe(); + explicit Maybe(const T& t); + + bool _has_value; + T _value; + + template + friend Maybe Nothing(); + template + friend Maybe Just(const U& u); +}; + +template +inline Maybe Nothing(); + +template +inline Maybe Just(const T& t); + +#if defined(NODE_ADDON_API_ENABLE_MAYBE) +template +using MaybeOrValue = Maybe; +#else +template +using MaybeOrValue = T; +#endif + +/// Environment for Node-API values and operations. +/// +/// All Node-API values and operations must be associated with an environment. +/// An environment instance is always provided to callback functions; that +/// environment must then be used for any creation of Node-API values or other +/// Node-API operations within the callback. (Many methods infer the +/// environment from the `this` instance that the method is called on.) +/// +/// In the future, multiple environments per process may be supported, +/// although current implementations only support one environment per process. +/// +/// In the V8 JavaScript engine, a Node-API environment approximately +/// corresponds to an Isolate. +class Env { + private: + napi_env _env; +#if NAPI_VERSION > 5 + template + static void DefaultFini(Env, T* data); + template + static void DefaultFiniWithHint(Env, DataType* data, HintType* hint); +#endif // NAPI_VERSION > 5 + public: + Env(napi_env env); + + operator napi_env() const; + + Object Global() const; + Value Undefined() const; + Value Null() const; + + bool IsExceptionPending() const; + Error GetAndClearPendingException() const; + + MaybeOrValue RunScript(const char* utf8script) const; + MaybeOrValue RunScript(const std::string& utf8script) const; + MaybeOrValue RunScript(String script) const; + +#if NAPI_VERSION > 2 + template + class CleanupHook; + + template + CleanupHook AddCleanupHook(Hook hook); + + template + CleanupHook AddCleanupHook(Hook hook, Arg* arg); +#endif // NAPI_VERSION > 2 + +#if NAPI_VERSION > 5 + template + T* GetInstanceData() const; + + template + using Finalizer = void (*)(Env, T*); + template fini = Env::DefaultFini> + void SetInstanceData(T* data) const; + + template + using FinalizerWithHint = void (*)(Env, DataType*, HintType*); + template fini = + Env::DefaultFiniWithHint> + void SetInstanceData(DataType* data, HintType* hint) const; +#endif // NAPI_VERSION > 5 + +#if NAPI_VERSION > 2 + template + class CleanupHook { + public: + CleanupHook(); + CleanupHook(Env env, Hook hook, Arg* arg); + CleanupHook(Env env, Hook hook); + bool Remove(Env env); + bool IsEmpty() const; + + private: + static inline void Wrapper(void* data) NAPI_NOEXCEPT; + static inline void WrapperWithArg(void* data) NAPI_NOEXCEPT; + + void (*wrapper)(void* arg); + struct CleanupData { + Hook hook; + Arg* arg; + } * data; + }; +#endif // NAPI_VERSION > 2 +}; + +/// A JavaScript value of unknown type. +/// +/// For type-specific operations, convert to one of the Value subclasses using a +/// `To*` or `As()` method. The `To*` methods do type coercion; the `As()` +/// method does not. +/// +/// Napi::Value value = ... +/// if (!value.IsString()) throw Napi::TypeError::New(env, "Invalid +/// arg..."); Napi::String str = value.As(); // Cast to a +/// string value +/// +/// Napi::Value anotherValue = ... +/// bool isTruthy = anotherValue.ToBoolean(); // Coerce to a boolean value +class Value { + public: + Value(); ///< Creates a new _empty_ Value instance. + Value(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + /// Creates a JS value from a C++ primitive. + /// + /// `value` may be any of: + /// - bool + /// - Any integer type + /// - Any floating point type + /// - const char* (encoded using UTF-8, null-terminated) + /// - const char16_t* (encoded using UTF-16-LE, null-terminated) + /// - std::string (encoded using UTF-8) + /// - std::u16string + /// - napi::Value + /// - napi_value + template + static Value From(napi_env env, const T& value); + + /// Converts to a Node-API value primitive. + /// + /// If the instance is _empty_, this returns `nullptr`. + operator napi_value() const; + + /// Tests if this value strictly equals another value. + bool operator==(const Value& other) const; + + /// Tests if this value does not strictly equal another value. + bool operator!=(const Value& other) const; + + /// Tests if this value strictly equals another value. + bool StrictEquals(const Value& other) const; + + /// Gets the environment the value is associated with. + Napi::Env Env() const; + + /// Checks if the value is empty (uninitialized). + /// + /// An empty value is invalid, and most attempts to perform an operation on an + /// empty value will result in an exception. Note an empty value is distinct + /// from JavaScript `null` or `undefined`, which are valid values. + /// + /// When C++ exceptions are disabled at compile time, a method with a `Value` + /// return type may return an empty value to indicate a pending exception. So + /// when not using C++ exceptions, callers should check whether the value is + /// empty before attempting to use it. + bool IsEmpty() const; + + napi_valuetype Type() const; ///< Gets the type of the value. + + bool IsUndefined() + const; ///< Tests if a value is an undefined JavaScript value. + bool IsNull() const; ///< Tests if a value is a null JavaScript value. + bool IsBoolean() const; ///< Tests if a value is a JavaScript boolean. + bool IsNumber() const; ///< Tests if a value is a JavaScript number. +#if NAPI_VERSION > 5 + bool IsBigInt() const; ///< Tests if a value is a JavaScript bigint. +#endif // NAPI_VERSION > 5 +#if (NAPI_VERSION > 4) + bool IsDate() const; ///< Tests if a value is a JavaScript date. +#endif + bool IsString() const; ///< Tests if a value is a JavaScript string. + bool IsSymbol() const; ///< Tests if a value is a JavaScript symbol. + bool IsArray() const; ///< Tests if a value is a JavaScript array. + bool IsArrayBuffer() + const; ///< Tests if a value is a JavaScript array buffer. + bool IsTypedArray() const; ///< Tests if a value is a JavaScript typed array. + bool IsObject() const; ///< Tests if a value is a JavaScript object. + bool IsFunction() const; ///< Tests if a value is a JavaScript function. + bool IsPromise() const; ///< Tests if a value is a JavaScript promise. + bool IsDataView() const; ///< Tests if a value is a JavaScript data view. + bool IsBuffer() const; ///< Tests if a value is a Node buffer. + bool IsExternal() const; ///< Tests if a value is a pointer to external data. + + /// Casts to another type of `Napi::Value`, when the actual type is known or + /// assumed. + /// + /// This conversion does NOT coerce the type. Calling any methods + /// inappropriate for the actual value type will throw `Napi::Error`. + template + T As() const; + + MaybeOrValue ToBoolean() + const; ///< Coerces a value to a JavaScript boolean. + MaybeOrValue ToNumber() + const; ///< Coerces a value to a JavaScript number. + MaybeOrValue ToString() + const; ///< Coerces a value to a JavaScript string. + MaybeOrValue ToObject() + const; ///< Coerces a value to a JavaScript object. + + protected: + /// !cond INTERNAL + napi_env _env; + napi_value _value; + /// !endcond +}; + +/// A JavaScript boolean value. +class Boolean : public Value { + public: + static Boolean New(napi_env env, ///< Node-API environment + bool value ///< Boolean value + ); + + Boolean(); ///< Creates a new _empty_ Boolean instance. + Boolean(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + operator bool() const; ///< Converts a Boolean value to a boolean primitive. + bool Value() const; ///< Converts a Boolean value to a boolean primitive. +}; + +/// A JavaScript number value. +class Number : public Value { + public: + static Number New(napi_env env, ///< Node-API environment + double value ///< Number value + ); + + Number(); ///< Creates a new _empty_ Number instance. + Number(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + operator int32_t() + const; ///< Converts a Number value to a 32-bit signed integer value. + operator uint32_t() + const; ///< Converts a Number value to a 32-bit unsigned integer value. + operator int64_t() + const; ///< Converts a Number value to a 64-bit signed integer value. + operator float() + const; ///< Converts a Number value to a 32-bit floating-point value. + operator double() + const; ///< Converts a Number value to a 64-bit floating-point value. + + int32_t Int32Value() + const; ///< Converts a Number value to a 32-bit signed integer value. + uint32_t Uint32Value() + const; ///< Converts a Number value to a 32-bit unsigned integer value. + int64_t Int64Value() + const; ///< Converts a Number value to a 64-bit signed integer value. + float FloatValue() + const; ///< Converts a Number value to a 32-bit floating-point value. + double DoubleValue() + const; ///< Converts a Number value to a 64-bit floating-point value. +}; + +#if NAPI_VERSION > 5 +/// A JavaScript bigint value. +class BigInt : public Value { + public: + static BigInt New(napi_env env, ///< Node-API environment + int64_t value ///< Number value + ); + static BigInt New(napi_env env, ///< Node-API environment + uint64_t value ///< Number value + ); + + /// Creates a new BigInt object using a specified sign bit and a + /// specified list of digits/words. + /// The resulting number is calculated as: + /// (-1)^sign_bit * (words[0] * (2^64)^0 + words[1] * (2^64)^1 + ...) + static BigInt New(napi_env env, ///< Node-API environment + int sign_bit, ///< Sign bit. 1 if negative. + size_t word_count, ///< Number of words in array + const uint64_t* words ///< Array of words + ); + + BigInt(); ///< Creates a new _empty_ BigInt instance. + BigInt(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + int64_t Int64Value(bool* lossless) + const; ///< Converts a BigInt value to a 64-bit signed integer value. + uint64_t Uint64Value(bool* lossless) + const; ///< Converts a BigInt value to a 64-bit unsigned integer value. + + size_t WordCount() const; ///< The number of 64-bit words needed to store + ///< the result of ToWords(). + + /// Writes the contents of this BigInt to a specified memory location. + /// `sign_bit` must be provided and will be set to 1 if this BigInt is + /// negative. + /// `*word_count` has to be initialized to the length of the `words` array. + /// Upon return, it will be set to the actual number of words that would + /// be needed to store this BigInt (i.e. the return value of `WordCount()`). + void ToWords(int* sign_bit, size_t* word_count, uint64_t* words); +}; +#endif // NAPI_VERSION > 5 + +#if (NAPI_VERSION > 4) +/// A JavaScript date value. +class Date : public Value { + public: + /// Creates a new Date value from a double primitive. + static Date New(napi_env env, ///< Node-API environment + double value ///< Number value + ); + + Date(); ///< Creates a new _empty_ Date instance. + Date(napi_env env, napi_value value); ///< Wraps a Node-API value primitive. + operator double() const; ///< Converts a Date value to double primitive + + double ValueOf() const; ///< Converts a Date value to a double primitive. +}; +#endif + +/// A JavaScript string or symbol value (that can be used as a property name). +class Name : public Value { + public: + Name(); ///< Creates a new _empty_ Name instance. + Name(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. +}; + +/// A JavaScript string value. +class String : public Name { + public: + /// Creates a new String value from a UTF-8 encoded C++ string. + static String New(napi_env env, ///< Node-API environment + const std::string& value ///< UTF-8 encoded C++ string + ); + + /// Creates a new String value from a UTF-16 encoded C++ string. + static String New(napi_env env, ///< Node-API environment + const std::u16string& value ///< UTF-16 encoded C++ string + ); + + /// Creates a new String value from a UTF-8 encoded C string. + static String New( + napi_env env, ///< Node-API environment + const char* value ///< UTF-8 encoded null-terminated C string + ); + + /// Creates a new String value from a UTF-16 encoded C string. + static String New( + napi_env env, ///< Node-API environment + const char16_t* value ///< UTF-16 encoded null-terminated C string + ); + + /// Creates a new String value from a UTF-8 encoded C string with specified + /// length. + static String New(napi_env env, ///< Node-API environment + const char* value, ///< UTF-8 encoded C string (not + ///< necessarily null-terminated) + size_t length ///< length of the string in bytes + ); + + /// Creates a new String value from a UTF-16 encoded C string with specified + /// length. + static String New( + napi_env env, ///< Node-API environment + const char16_t* value, ///< UTF-16 encoded C string (not necessarily + ///< null-terminated) + size_t length ///< Length of the string in 2-byte code units + ); + + /// Creates a new String based on the original object's type. + /// + /// `value` may be any of: + /// - const char* (encoded using UTF-8, null-terminated) + /// - const char16_t* (encoded using UTF-16-LE, null-terminated) + /// - std::string (encoded using UTF-8) + /// - std::u16string + template + static String From(napi_env env, const T& value); + + String(); ///< Creates a new _empty_ String instance. + String(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + operator std::string() + const; ///< Converts a String value to a UTF-8 encoded C++ string. + operator std::u16string() + const; ///< Converts a String value to a UTF-16 encoded C++ string. + std::string Utf8Value() + const; ///< Converts a String value to a UTF-8 encoded C++ string. + std::u16string Utf16Value() + const; ///< Converts a String value to a UTF-16 encoded C++ string. +}; + +/// A JavaScript symbol value. +class Symbol : public Name { + public: + /// Creates a new Symbol value with an optional description. + static Symbol New( + napi_env env, ///< Node-API environment + const char* description = + nullptr ///< Optional UTF-8 encoded null-terminated C string + /// describing the symbol + ); + + /// Creates a new Symbol value with a description. + static Symbol New( + napi_env env, ///< Node-API environment + const std::string& + description ///< UTF-8 encoded C++ string describing the symbol + ); + + /// Creates a new Symbol value with a description. + static Symbol New(napi_env env, ///< Node-API environment + String description ///< String value describing the symbol + ); + + /// Creates a new Symbol value with a description. + static Symbol New( + napi_env env, ///< Node-API environment + napi_value description ///< String value describing the symbol + ); + + /// Get a public Symbol (e.g. Symbol.iterator). + static MaybeOrValue WellKnown(napi_env, const std::string& name); + + // Create a symbol in the global registry, UTF-8 Encoded cpp string + static MaybeOrValue For(napi_env env, const std::string& description); + + // Create a symbol in the global registry, C style string (null terminated) + static MaybeOrValue For(napi_env env, const char* description); + + // Create a symbol in the global registry, String value describing the symbol + static MaybeOrValue For(napi_env env, String description); + + // Create a symbol in the global registry, napi_value describing the symbol + static MaybeOrValue For(napi_env env, napi_value description); + + Symbol(); ///< Creates a new _empty_ Symbol instance. + Symbol(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. +}; + +/// A JavaScript object value. +class Object : public Value { + public: + /// Enables property and element assignments using indexing syntax. + /// + /// This is a convenient helper to get and set object properties. As + /// getting and setting object properties may throw with JavaScript + /// exceptions, it is notable that these operations may fail. + /// When NODE_ADDON_API_ENABLE_MAYBE is defined, the process will abort + /// on JavaScript exceptions. + /// + /// Example: + /// + /// Napi::Value propertyValue = object1['A']; + /// object2['A'] = propertyValue; + /// Napi::Value elementValue = array[0]; + /// array[1] = elementValue; + template + class PropertyLValue { + public: + /// Converts an L-value to a value. + operator Value() const; + + /// Assigns a value to the property. The type of value can be + /// anything supported by `Object::Set`. + template + PropertyLValue& operator=(ValueType value); + + private: + PropertyLValue() = delete; + PropertyLValue(Object object, Key key); + napi_env _env; + napi_value _object; + Key _key; + + friend class Napi::Object; + }; + + /// Creates a new Object value. + static Object New(napi_env env ///< Node-API environment + ); + + Object(); ///< Creates a new _empty_ Object instance. + Object(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + /// Gets or sets a named property. + PropertyLValue operator[]( + const char* utf8name ///< UTF-8 encoded null-terminated property name + ); + + /// Gets or sets a named property. + PropertyLValue operator[]( + const std::string& utf8name ///< UTF-8 encoded property name + ); + + /// Gets or sets an indexed property or array element. + PropertyLValue operator[]( + uint32_t index /// Property / element index + ); + + /// Gets or sets an indexed property or array element. + PropertyLValue operator[](Value index /// Property / element index + ) const; + + /// Gets a named property. + MaybeOrValue operator[]( + const char* utf8name ///< UTF-8 encoded null-terminated property name + ) const; + + /// Gets a named property. + MaybeOrValue operator[]( + const std::string& utf8name ///< UTF-8 encoded property name + ) const; + + /// Gets an indexed property or array element. + MaybeOrValue operator[](uint32_t index ///< Property / element index + ) const; + + /// Checks whether a property is present. + MaybeOrValue Has(napi_value key ///< Property key primitive + ) const; + + /// Checks whether a property is present. + MaybeOrValue Has(Value key ///< Property key + ) const; + + /// Checks whether a named property is present. + MaybeOrValue Has( + const char* utf8name ///< UTF-8 encoded null-terminated property name + ) const; + + /// Checks whether a named property is present. + MaybeOrValue Has( + const std::string& utf8name ///< UTF-8 encoded property name + ) const; + + /// Checks whether a own property is present. + MaybeOrValue HasOwnProperty(napi_value key ///< Property key primitive + ) const; + + /// Checks whether a own property is present. + MaybeOrValue HasOwnProperty(Value key ///< Property key + ) const; + + /// Checks whether a own property is present. + MaybeOrValue HasOwnProperty( + const char* utf8name ///< UTF-8 encoded null-terminated property name + ) const; + + /// Checks whether a own property is present. + MaybeOrValue HasOwnProperty( + const std::string& utf8name ///< UTF-8 encoded property name + ) const; + + /// Gets a property. + MaybeOrValue Get(napi_value key ///< Property key primitive + ) const; + + /// Gets a property. + MaybeOrValue Get(Value key ///< Property key + ) const; + + /// Gets a named property. + MaybeOrValue Get( + const char* utf8name ///< UTF-8 encoded null-terminated property name + ) const; + + /// Gets a named property. + MaybeOrValue Get( + const std::string& utf8name ///< UTF-8 encoded property name + ) const; + + /// Sets a property. + template + MaybeOrValue Set(napi_value key, ///< Property key primitive + const ValueType& value ///< Property value primitive + ) const; + + /// Sets a property. + template + MaybeOrValue Set(Value key, ///< Property key + const ValueType& value ///< Property value + ) const; + + /// Sets a named property. + template + MaybeOrValue Set( + const char* utf8name, ///< UTF-8 encoded null-terminated property name + const ValueType& value) const; + + /// Sets a named property. + template + MaybeOrValue Set( + const std::string& utf8name, ///< UTF-8 encoded property name + const ValueType& value ///< Property value primitive + ) const; + + /// Delete property. + MaybeOrValue Delete(napi_value key ///< Property key primitive + ) const; + + /// Delete property. + MaybeOrValue Delete(Value key ///< Property key + ) const; + + /// Delete property. + MaybeOrValue Delete( + const char* utf8name ///< UTF-8 encoded null-terminated property name + ) const; + + /// Delete property. + MaybeOrValue Delete( + const std::string& utf8name ///< UTF-8 encoded property name + ) const; + + /// Checks whether an indexed property is present. + MaybeOrValue Has(uint32_t index ///< Property / element index + ) const; + + /// Gets an indexed property or array element. + MaybeOrValue Get(uint32_t index ///< Property / element index + ) const; + + /// Sets an indexed property or array element. + template + MaybeOrValue Set(uint32_t index, ///< Property / element index + const ValueType& value ///< Property value primitive + ) const; + + /// Deletes an indexed property or array element. + MaybeOrValue Delete(uint32_t index ///< Property / element index + ) const; + + /// This operation can fail in case of Proxy.[[OwnPropertyKeys]] and + /// Proxy.[[GetOwnProperty]] calling into JavaScript. See: + /// - + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-ownpropertykeys + /// - + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-getownproperty-p + MaybeOrValue GetPropertyNames() const; ///< Get all property names + + /// Defines a property on the object. + /// + /// This operation can fail in case of Proxy.[[DefineOwnProperty]] calling + /// into JavaScript. See + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-defineownproperty-p-desc + MaybeOrValue DefineProperty( + const PropertyDescriptor& + property ///< Descriptor for the property to be defined + ) const; + + /// Defines properties on the object. + /// + /// This operation can fail in case of Proxy.[[DefineOwnProperty]] calling + /// into JavaScript. See + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-defineownproperty-p-desc + MaybeOrValue DefineProperties( + const std::initializer_list& properties + ///< List of descriptors for the properties to be defined + ) const; + + /// Defines properties on the object. + /// + /// This operation can fail in case of Proxy.[[DefineOwnProperty]] calling + /// into JavaScript. See + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-defineownproperty-p-desc + MaybeOrValue DefineProperties( + const std::vector& properties + ///< Vector of descriptors for the properties to be defined + ) const; + + /// Checks if an object is an instance created by a constructor function. + /// + /// This is equivalent to the JavaScript `instanceof` operator. + /// + /// This operation can fail in case of Proxy.[[GetPrototypeOf]] calling into + /// JavaScript. + /// See + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-getprototypeof + MaybeOrValue InstanceOf( + const Function& constructor ///< Constructor function + ) const; + + template + inline void AddFinalizer(Finalizer finalizeCallback, T* data) const; + + template + inline void AddFinalizer(Finalizer finalizeCallback, + T* data, + Hint* finalizeHint) const; + +#ifdef NAPI_CPP_EXCEPTIONS + class const_iterator; + + inline const_iterator begin() const; + + inline const_iterator end() const; + + class iterator; + + inline iterator begin(); + + inline iterator end(); +#endif // NAPI_CPP_EXCEPTIONS + +#if NAPI_VERSION >= 8 + /// This operation can fail in case of Proxy.[[GetPrototypeOf]] calling into + /// JavaScript. + /// See + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-getprototypeof + MaybeOrValue Freeze() const; + /// This operation can fail in case of Proxy.[[GetPrototypeOf]] calling into + /// JavaScript. + /// See + /// https://tc39.es/ecma262/#sec-proxy-object-internal-methods-and-internal-slots-getprototypeof + MaybeOrValue Seal() const; +#endif // NAPI_VERSION >= 8 +}; + +template +class External : public Value { + public: + static External New(napi_env env, T* data); + + // Finalizer must implement `void operator()(Env env, T* data)`. + template + static External New(napi_env env, T* data, Finalizer finalizeCallback); + // Finalizer must implement `void operator()(Env env, T* data, Hint* hint)`. + template + static External New(napi_env env, + T* data, + Finalizer finalizeCallback, + Hint* finalizeHint); + + External(); + External(napi_env env, napi_value value); + + T* Data() const; +}; + +class Array : public Object { + public: + static Array New(napi_env env); + static Array New(napi_env env, size_t length); + + Array(); + Array(napi_env env, napi_value value); + + uint32_t Length() const; +}; + +#ifdef NAPI_CPP_EXCEPTIONS +class Object::const_iterator { + private: + enum class Type { BEGIN, END }; + + inline const_iterator(const Object* object, const Type type); + + public: + inline const_iterator& operator++(); + + inline bool operator==(const const_iterator& other) const; + + inline bool operator!=(const const_iterator& other) const; + + inline const std::pair> operator*() + const; + + private: + const Napi::Object* _object; + Array _keys; + uint32_t _index; + + friend class Object; +}; + +class Object::iterator { + private: + enum class Type { BEGIN, END }; + + inline iterator(Object* object, const Type type); + + public: + inline iterator& operator++(); + + inline bool operator==(const iterator& other) const; + + inline bool operator!=(const iterator& other) const; + + inline std::pair> operator*(); + + private: + Napi::Object* _object; + Array _keys; + uint32_t _index; + + friend class Object; +}; +#endif // NAPI_CPP_EXCEPTIONS + +/// A JavaScript array buffer value. +class ArrayBuffer : public Object { + public: + /// Creates a new ArrayBuffer instance over a new automatically-allocated + /// buffer. + static ArrayBuffer New( + napi_env env, ///< Node-API environment + size_t byteLength ///< Length of the buffer to be allocated, in bytes + ); + + /// Creates a new ArrayBuffer instance, using an external buffer with + /// specified byte length. + static ArrayBuffer New( + napi_env env, ///< Node-API environment + void* externalData, ///< Pointer to the external buffer to be used by + ///< the array + size_t byteLength ///< Length of the external buffer to be used by the + ///< array, in bytes + ); + + /// Creates a new ArrayBuffer instance, using an external buffer with + /// specified byte length. + template + static ArrayBuffer New( + napi_env env, ///< Node-API environment + void* externalData, ///< Pointer to the external buffer to be used by + ///< the array + size_t byteLength, ///< Length of the external buffer to be used by the + ///< array, + /// in bytes + Finalizer finalizeCallback ///< Function to be called when the array + ///< buffer is destroyed; + /// must implement `void operator()(Env env, + /// void* externalData)` + ); + + /// Creates a new ArrayBuffer instance, using an external buffer with + /// specified byte length. + template + static ArrayBuffer New( + napi_env env, ///< Node-API environment + void* externalData, ///< Pointer to the external buffer to be used by + ///< the array + size_t byteLength, ///< Length of the external buffer to be used by the + ///< array, + /// in bytes + Finalizer finalizeCallback, ///< Function to be called when the array + ///< buffer is destroyed; + /// must implement `void operator()(Env + /// env, void* externalData, Hint* hint)` + Hint* finalizeHint ///< Hint (second parameter) to be passed to the + ///< finalize callback + ); + + ArrayBuffer(); ///< Creates a new _empty_ ArrayBuffer instance. + ArrayBuffer(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + void* Data(); ///< Gets a pointer to the data buffer. + size_t ByteLength(); ///< Gets the length of the array buffer in bytes. + +#if NAPI_VERSION >= 7 + bool IsDetached() const; + void Detach(); +#endif // NAPI_VERSION >= 7 +}; + +/// A JavaScript typed-array value with unknown array type. +/// +/// For type-specific operations, cast to a `TypedArrayOf` instance using the +/// `As()` method: +/// +/// Napi::TypedArray array = ... +/// if (t.TypedArrayType() == napi_int32_array) { +/// Napi::Int32Array int32Array = t.As(); +/// } +class TypedArray : public Object { + public: + TypedArray(); ///< Creates a new _empty_ TypedArray instance. + TypedArray(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + napi_typedarray_type TypedArrayType() + const; ///< Gets the type of this typed-array. + Napi::ArrayBuffer ArrayBuffer() const; ///< Gets the backing array buffer. + + uint8_t ElementSize() + const; ///< Gets the size in bytes of one element in the array. + size_t ElementLength() const; ///< Gets the number of elements in the array. + size_t ByteOffset() + const; ///< Gets the offset into the buffer where the array starts. + size_t ByteLength() const; ///< Gets the length of the array in bytes. + + protected: + /// !cond INTERNAL + napi_typedarray_type _type; + size_t _length; + + TypedArray(napi_env env, + napi_value value, + napi_typedarray_type type, + size_t length); + + template + static +#if defined(NAPI_HAS_CONSTEXPR) + constexpr +#endif + napi_typedarray_type + TypedArrayTypeForPrimitiveType() { + return std::is_same::value ? napi_int8_array + : std::is_same::value ? napi_uint8_array + : std::is_same::value ? napi_int16_array + : std::is_same::value ? napi_uint16_array + : std::is_same::value ? napi_int32_array + : std::is_same::value ? napi_uint32_array + : std::is_same::value ? napi_float32_array + : std::is_same::value ? napi_float64_array +#if NAPI_VERSION > 5 + : std::is_same::value ? napi_bigint64_array + : std::is_same::value ? napi_biguint64_array +#endif // NAPI_VERSION > 5 + : napi_int8_array; + } + /// !endcond +}; + +/// A JavaScript typed-array value with known array type. +/// +/// Note while it is possible to create and access Uint8 "clamped" arrays using +/// this class, the _clamping_ behavior is only applied in JavaScript. +template +class TypedArrayOf : public TypedArray { + public: + /// Creates a new TypedArray instance over a new automatically-allocated array + /// buffer. + /// + /// The array type parameter can normally be omitted (because it is inferred + /// from the template parameter T), except when creating a "clamped" array: + /// + /// Uint8Array::New(env, length, napi_uint8_clamped_array) + static TypedArrayOf New( + napi_env env, ///< Node-API environment + size_t elementLength, ///< Length of the created array, as a number of + ///< elements +#if defined(NAPI_HAS_CONSTEXPR) + napi_typedarray_type type = + TypedArray::TypedArrayTypeForPrimitiveType() +#else + napi_typedarray_type type +#endif + ///< Type of array, if different from the default array type for the + ///< template parameter T. + ); + + /// Creates a new TypedArray instance over a provided array buffer. + /// + /// The array type parameter can normally be omitted (because it is inferred + /// from the template parameter T), except when creating a "clamped" array: + /// + /// Uint8Array::New(env, length, buffer, 0, napi_uint8_clamped_array) + static TypedArrayOf New( + napi_env env, ///< Node-API environment + size_t elementLength, ///< Length of the created array, as a number of + ///< elements + Napi::ArrayBuffer arrayBuffer, ///< Backing array buffer instance to use + size_t bufferOffset, ///< Offset into the array buffer where the + ///< typed-array starts +#if defined(NAPI_HAS_CONSTEXPR) + napi_typedarray_type type = + TypedArray::TypedArrayTypeForPrimitiveType() +#else + napi_typedarray_type type +#endif + ///< Type of array, if different from the default array type for the + ///< template parameter T. + ); + + TypedArrayOf(); ///< Creates a new _empty_ TypedArrayOf instance. + TypedArrayOf(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + T& operator[](size_t index); ///< Gets or sets an element in the array. + const T& operator[](size_t index) const; ///< Gets an element in the array. + + /// Gets a pointer to the array's backing buffer. + /// + /// This is not necessarily the same as the `ArrayBuffer::Data()` pointer, + /// because the typed-array may have a non-zero `ByteOffset()` into the + /// `ArrayBuffer`. + T* Data(); + + /// Gets a pointer to the array's backing buffer. + /// + /// This is not necessarily the same as the `ArrayBuffer::Data()` pointer, + /// because the typed-array may have a non-zero `ByteOffset()` into the + /// `ArrayBuffer`. + const T* Data() const; + + private: + T* _data; + + TypedArrayOf(napi_env env, + napi_value value, + napi_typedarray_type type, + size_t length, + T* data); +}; + +/// The DataView provides a low-level interface for reading/writing multiple +/// number types in an ArrayBuffer irrespective of the platform's endianness. +class DataView : public Object { + public: + static DataView New(napi_env env, Napi::ArrayBuffer arrayBuffer); + static DataView New(napi_env env, + Napi::ArrayBuffer arrayBuffer, + size_t byteOffset); + static DataView New(napi_env env, + Napi::ArrayBuffer arrayBuffer, + size_t byteOffset, + size_t byteLength); + + DataView(); ///< Creates a new _empty_ DataView instance. + DataView(napi_env env, + napi_value value); ///< Wraps a Node-API value primitive. + + Napi::ArrayBuffer ArrayBuffer() const; ///< Gets the backing array buffer. + size_t ByteOffset() + const; ///< Gets the offset into the buffer where the array starts. + size_t ByteLength() const; ///< Gets the length of the array in bytes. + + void* Data() const; + + float GetFloat32(size_t byteOffset) const; + double GetFloat64(size_t byteOffset) const; + int8_t GetInt8(size_t byteOffset) const; + int16_t GetInt16(size_t byteOffset) const; + int32_t GetInt32(size_t byteOffset) const; + uint8_t GetUint8(size_t byteOffset) const; + uint16_t GetUint16(size_t byteOffset) const; + uint32_t GetUint32(size_t byteOffset) const; + + void SetFloat32(size_t byteOffset, float value) const; + void SetFloat64(size_t byteOffset, double value) const; + void SetInt8(size_t byteOffset, int8_t value) const; + void SetInt16(size_t byteOffset, int16_t value) const; + void SetInt32(size_t byteOffset, int32_t value) const; + void SetUint8(size_t byteOffset, uint8_t value) const; + void SetUint16(size_t byteOffset, uint16_t value) const; + void SetUint32(size_t byteOffset, uint32_t value) const; + + private: + template + T ReadData(size_t byteOffset) const; + + template + void WriteData(size_t byteOffset, T value) const; + + void* _data; + size_t _length; +}; + +class Function : public Object { + public: + using VoidCallback = void (*)(const CallbackInfo& info); + using Callback = Value (*)(const CallbackInfo& info); + + template + static Function New(napi_env env, + const char* utf8name = nullptr, + void* data = nullptr); + + template + static Function New(napi_env env, + const char* utf8name = nullptr, + void* data = nullptr); + + template + static Function New(napi_env env, + const std::string& utf8name, + void* data = nullptr); + + template + static Function New(napi_env env, + const std::string& utf8name, + void* data = nullptr); + + /// Callable must implement operator() accepting a const CallbackInfo& + /// and return either void or Value. + template + static Function New(napi_env env, + Callable cb, + const char* utf8name = nullptr, + void* data = nullptr); + /// Callable must implement operator() accepting a const CallbackInfo& + /// and return either void or Value. + template + static Function New(napi_env env, + Callable cb, + const std::string& utf8name, + void* data = nullptr); + + Function(); + Function(napi_env env, napi_value value); + + MaybeOrValue operator()( + const std::initializer_list& args) const; + + MaybeOrValue Call(const std::initializer_list& args) const; + MaybeOrValue Call(const std::vector& args) const; + MaybeOrValue Call(const std::vector& args) const; + MaybeOrValue Call(size_t argc, const napi_value* args) const; + MaybeOrValue Call(napi_value recv, + const std::initializer_list& args) const; + MaybeOrValue Call(napi_value recv, + const std::vector& args) const; + MaybeOrValue Call(napi_value recv, + const std::vector& args) const; + MaybeOrValue Call(napi_value recv, + size_t argc, + const napi_value* args) const; + + MaybeOrValue MakeCallback( + napi_value recv, + const std::initializer_list& args, + napi_async_context context = nullptr) const; + MaybeOrValue MakeCallback(napi_value recv, + const std::vector& args, + napi_async_context context = nullptr) const; + MaybeOrValue MakeCallback(napi_value recv, + size_t argc, + const napi_value* args, + napi_async_context context = nullptr) const; + + MaybeOrValue New(const std::initializer_list& args) const; + MaybeOrValue New(const std::vector& args) const; + MaybeOrValue New(size_t argc, const napi_value* args) const; +}; + +class Promise : public Object { + public: + class Deferred { + public: + static Deferred New(napi_env env); + Deferred(napi_env env); + + Napi::Promise Promise() const; + Napi::Env Env() const; + + void Resolve(napi_value value) const; + void Reject(napi_value value) const; + + private: + napi_env _env; + napi_deferred _deferred; + napi_value _promise; + }; + + Promise(napi_env env, napi_value value); +}; + +template +class Buffer : public Uint8Array { + public: + static Buffer New(napi_env env, size_t length); + static Buffer New(napi_env env, T* data, size_t length); + + // Finalizer must implement `void operator()(Env env, T* data)`. + template + static Buffer New(napi_env env, + T* data, + size_t length, + Finalizer finalizeCallback); + // Finalizer must implement `void operator()(Env env, T* data, Hint* hint)`. + template + static Buffer New(napi_env env, + T* data, + size_t length, + Finalizer finalizeCallback, + Hint* finalizeHint); + + static Buffer Copy(napi_env env, const T* data, size_t length); + + Buffer(); + Buffer(napi_env env, napi_value value); + size_t Length() const; + T* Data() const; + + private: + mutable size_t _length; + mutable T* _data; + + Buffer(napi_env env, napi_value value, size_t length, T* data); + void EnsureInfo() const; +}; + +/// Holds a counted reference to a value; initially a weak reference unless +/// otherwise specified, may be changed to/from a strong reference by adjusting +/// the refcount. +/// +/// The referenced value is not immediately destroyed when the reference count +/// is zero; it is merely then eligible for garbage-collection if there are no +/// other references to the value. +template +class Reference { + public: + static Reference New(const T& value, uint32_t initialRefcount = 0); + + Reference(); + Reference(napi_env env, napi_ref ref); + ~Reference(); + + // A reference can be moved but cannot be copied. + Reference(Reference&& other); + Reference& operator=(Reference&& other); + NAPI_DISALLOW_ASSIGN(Reference) + + operator napi_ref() const; + bool operator==(const Reference& other) const; + bool operator!=(const Reference& other) const; + + Napi::Env Env() const; + bool IsEmpty() const; + + // Note when getting the value of a Reference it is usually correct to do so + // within a HandleScope so that the value handle gets cleaned up efficiently. + T Value() const; + + uint32_t Ref() const; + uint32_t Unref() const; + void Reset(); + void Reset(const T& value, uint32_t refcount = 0); + + // Call this on a reference that is declared as static data, to prevent its + // destructor from running at program shutdown time, which would attempt to + // reset the reference when the environment is no longer valid. Avoid using + // this if at all possible. If you do need to use static data, MAKE SURE to + // warn your users that your addon is NOT threadsafe. + void SuppressDestruct(); + + protected: + Reference(const Reference&); + + /// !cond INTERNAL + napi_env _env; + napi_ref _ref; + /// !endcond + + private: + bool _suppressDestruct; +}; + +class ObjectReference : public Reference { + public: + ObjectReference(); + ObjectReference(napi_env env, napi_ref ref); + + // A reference can be moved but cannot be copied. + ObjectReference(Reference&& other); + ObjectReference& operator=(Reference&& other); + ObjectReference(ObjectReference&& other); + ObjectReference& operator=(ObjectReference&& other); + NAPI_DISALLOW_ASSIGN(ObjectReference) + + MaybeOrValue Get(const char* utf8name) const; + MaybeOrValue Get(const std::string& utf8name) const; + MaybeOrValue Set(const char* utf8name, napi_value value) const; + MaybeOrValue Set(const char* utf8name, Napi::Value value) const; + MaybeOrValue Set(const char* utf8name, const char* utf8value) const; + MaybeOrValue Set(const char* utf8name, bool boolValue) const; + MaybeOrValue Set(const char* utf8name, double numberValue) const; + MaybeOrValue Set(const std::string& utf8name, napi_value value) const; + MaybeOrValue Set(const std::string& utf8name, Napi::Value value) const; + MaybeOrValue Set(const std::string& utf8name, + std::string& utf8value) const; + MaybeOrValue Set(const std::string& utf8name, bool boolValue) const; + MaybeOrValue Set(const std::string& utf8name, double numberValue) const; + + MaybeOrValue Get(uint32_t index) const; + MaybeOrValue Set(uint32_t index, const napi_value value) const; + MaybeOrValue Set(uint32_t index, const Napi::Value value) const; + MaybeOrValue Set(uint32_t index, const char* utf8value) const; + MaybeOrValue Set(uint32_t index, const std::string& utf8value) const; + MaybeOrValue Set(uint32_t index, bool boolValue) const; + MaybeOrValue Set(uint32_t index, double numberValue) const; + + protected: + ObjectReference(const ObjectReference&); +}; + +class FunctionReference : public Reference { + public: + FunctionReference(); + FunctionReference(napi_env env, napi_ref ref); + + // A reference can be moved but cannot be copied. + FunctionReference(Reference&& other); + FunctionReference& operator=(Reference&& other); + FunctionReference(FunctionReference&& other); + FunctionReference& operator=(FunctionReference&& other); + NAPI_DISALLOW_ASSIGN_COPY(FunctionReference) + + MaybeOrValue operator()( + const std::initializer_list& args) const; + + MaybeOrValue Call( + const std::initializer_list& args) const; + MaybeOrValue Call(const std::vector& args) const; + MaybeOrValue Call( + napi_value recv, const std::initializer_list& args) const; + MaybeOrValue Call(napi_value recv, + const std::vector& args) const; + MaybeOrValue Call(napi_value recv, + size_t argc, + const napi_value* args) const; + + MaybeOrValue MakeCallback( + napi_value recv, + const std::initializer_list& args, + napi_async_context context = nullptr) const; + MaybeOrValue MakeCallback( + napi_value recv, + const std::vector& args, + napi_async_context context = nullptr) const; + MaybeOrValue MakeCallback( + napi_value recv, + size_t argc, + const napi_value* args, + napi_async_context context = nullptr) const; + + MaybeOrValue New(const std::initializer_list& args) const; + MaybeOrValue New(const std::vector& args) const; +}; + +// Shortcuts to creating a new reference with inferred type and refcount = 0. +template +Reference Weak(T value); +ObjectReference Weak(Object value); +FunctionReference Weak(Function value); + +// Shortcuts to creating a new reference with inferred type and refcount = 1. +template +Reference Persistent(T value); +ObjectReference Persistent(Object value); +FunctionReference Persistent(Function value); + +/// A persistent reference to a JavaScript error object. Use of this class +/// depends somewhat on whether C++ exceptions are enabled at compile time. +/// +/// ### Handling Errors With C++ Exceptions +/// +/// If C++ exceptions are enabled, then the `Error` class extends +/// `std::exception` and enables integrated error-handling for C++ exceptions +/// and JavaScript exceptions. +/// +/// If a Node-API call fails without executing any JavaScript code (for +/// example due to an invalid argument), then the Node-API wrapper +/// automatically converts and throws the error as a C++ exception of type +/// `Napi::Error`. Or if a JavaScript function called by C++ code via Node-API +/// throws a JavaScript exception, then the Node-API wrapper automatically +/// converts and throws it as a C++ exception of type `Napi::Error`. +/// +/// If a C++ exception of type `Napi::Error` escapes from a Node-API C++ +/// callback, then the Node-API wrapper automatically converts and throws it +/// as a JavaScript exception. Therefore, catching a C++ exception of type +/// `Napi::Error` prevents a JavaScript exception from being thrown. +/// +/// #### Example 1A - Throwing a C++ exception: +/// +/// Napi::Env env = ... +/// throw Napi::Error::New(env, "Example exception"); +/// +/// Following C++ statements will not be executed. The exception will bubble +/// up as a C++ exception of type `Napi::Error`, until it is either caught +/// while still in C++, or else automatically propataged as a JavaScript +/// exception when the callback returns to JavaScript. +/// +/// #### Example 2A - Propagating a Node-API C++ exception: +/// +/// Napi::Function jsFunctionThatThrows = someObj.As(); +/// Napi::Value result = jsFunctionThatThrows({ arg1, arg2 }); +/// +/// Following C++ statements will not be executed. The exception will bubble +/// up as a C++ exception of type `Napi::Error`, until it is either caught +/// while still in C++, or else automatically propagated as a JavaScript +/// exception when the callback returns to JavaScript. +/// +/// #### Example 3A - Handling a Node-API C++ exception: +/// +/// Napi::Function jsFunctionThatThrows = someObj.As(); +/// Napi::Value result; +/// try { +/// result = jsFunctionThatThrows({ arg1, arg2 }); +/// } catch (const Napi::Error& e) { +/// cerr << "Caught JavaScript exception: " + e.what(); +/// } +/// +/// Since the exception was caught here, it will not be propagated as a +/// JavaScript exception. +/// +/// ### Handling Errors Without C++ Exceptions +/// +/// If C++ exceptions are disabled (by defining `NAPI_DISABLE_CPP_EXCEPTIONS`) +/// then this class does not extend `std::exception`, and APIs in the `Napi` +/// namespace do not throw C++ exceptions when they fail. Instead, they raise +/// _pending_ JavaScript exceptions and return _empty_ `Value`s. Calling code +/// should check `Value::IsEmpty()` before attempting to use a returned value, +/// and may use methods on the `Env` class to check for, get, and clear a +/// pending JavaScript exception. If the pending exception is not cleared, it +/// will be thrown when the native callback returns to JavaScript. +/// +/// #### Example 1B - Throwing a JS exception +/// +/// Napi::Env env = ... +/// Napi::Error::New(env, "Example +/// exception").ThrowAsJavaScriptException(); return; +/// +/// After throwing a JS exception, the code should generally return +/// immediately from the native callback, after performing any necessary +/// cleanup. +/// +/// #### Example 2B - Propagating a Node-API JS exception: +/// +/// Napi::Function jsFunctionThatThrows = someObj.As(); +/// Napi::Value result = jsFunctionThatThrows({ arg1, arg2 }); +/// if (result.IsEmpty()) return; +/// +/// An empty value result from a Node-API call indicates an error occurred, +/// and a JavaScript exception is pending. To let the exception propagate, the +/// code should generally return immediately from the native callback, after +/// performing any necessary cleanup. +/// +/// #### Example 3B - Handling a Node-API JS exception: +/// +/// Napi::Function jsFunctionThatThrows = someObj.As(); +/// Napi::Value result = jsFunctionThatThrows({ arg1, arg2 }); +/// if (result.IsEmpty()) { +/// Napi::Error e = env.GetAndClearPendingException(); +/// cerr << "Caught JavaScript exception: " + e.Message(); +/// } +/// +/// Since the exception was cleared here, it will not be propagated as a +/// JavaScript exception after the native callback returns. +class Error : public ObjectReference +#ifdef NAPI_CPP_EXCEPTIONS + , + public std::exception +#endif // NAPI_CPP_EXCEPTIONS +{ + public: + static Error New(napi_env env); + static Error New(napi_env env, const char* message); + static Error New(napi_env env, const std::string& message); + + static NAPI_NO_RETURN void Fatal(const char* location, const char* message); + + Error(); + Error(napi_env env, napi_value value); + + // An error can be moved or copied. + Error(Error&& other); + Error& operator=(Error&& other); + Error(const Error&); + Error& operator=(const Error&); + + const std::string& Message() const NAPI_NOEXCEPT; + void ThrowAsJavaScriptException() const; + + Object Value() const; + +#ifdef NAPI_CPP_EXCEPTIONS + const char* what() const NAPI_NOEXCEPT override; +#endif // NAPI_CPP_EXCEPTIONS + + protected: + /// !cond INTERNAL + using create_error_fn = napi_status (*)(napi_env envb, + napi_value code, + napi_value msg, + napi_value* result); + + template + static TError New(napi_env env, + const char* message, + size_t length, + create_error_fn create_error); + /// !endcond + + private: + static inline const char* ERROR_WRAP_VALUE() NAPI_NOEXCEPT; + mutable std::string _message; +}; + +class TypeError : public Error { + public: + static TypeError New(napi_env env, const char* message); + static TypeError New(napi_env env, const std::string& message); + + TypeError(); + TypeError(napi_env env, napi_value value); +}; + +class RangeError : public Error { + public: + static RangeError New(napi_env env, const char* message); + static RangeError New(napi_env env, const std::string& message); + + RangeError(); + RangeError(napi_env env, napi_value value); +}; + +class CallbackInfo { + public: + CallbackInfo(napi_env env, napi_callback_info info); + ~CallbackInfo(); + + // Disallow copying to prevent multiple free of _dynamicArgs + NAPI_DISALLOW_ASSIGN_COPY(CallbackInfo) + + Napi::Env Env() const; + Value NewTarget() const; + bool IsConstructCall() const; + size_t Length() const; + const Value operator[](size_t index) const; + Value This() const; + void* Data() const; + void SetData(void* data); + operator napi_callback_info() const; + + private: + const size_t _staticArgCount = 6; + napi_env _env; + napi_callback_info _info; + napi_value _this; + size_t _argc; + napi_value* _argv; + napi_value _staticArgs[6]; + napi_value* _dynamicArgs; + void* _data; +}; + +class PropertyDescriptor { + public: + using GetterCallback = Napi::Value (*)(const Napi::CallbackInfo& info); + using SetterCallback = void (*)(const Napi::CallbackInfo& info); + +#ifndef NODE_ADDON_API_DISABLE_DEPRECATED + template + static PropertyDescriptor Accessor( + const char* utf8name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + const std::string& utf8name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + napi_value name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Name name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + const char* utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + const std::string& utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + napi_value name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Name name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + const char* utf8name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + const std::string& utf8name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + napi_value name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + Name name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); +#endif // !NODE_ADDON_API_DISABLE_DEPRECATED + + template + static PropertyDescriptor Accessor( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + + template + static PropertyDescriptor Accessor( + const std::string& utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + + template + static PropertyDescriptor Accessor( + Name name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + + template + static PropertyDescriptor Accessor( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + + template + static PropertyDescriptor Accessor( + const std::string& utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + + template + static PropertyDescriptor Accessor( + Name name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + + template + static PropertyDescriptor Accessor( + Napi::Env env, + Napi::Object object, + const char* utf8name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Napi::Env env, + Napi::Object object, + const std::string& utf8name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Napi::Env env, + Napi::Object object, + Name name, + Getter getter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Napi::Env env, + Napi::Object object, + const char* utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Napi::Env env, + Napi::Object object, + const std::string& utf8name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Accessor( + Napi::Env env, + Napi::Object object, + Name name, + Getter getter, + Setter setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + Napi::Env env, + Napi::Object object, + const char* utf8name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + Napi::Env env, + Napi::Object object, + const std::string& utf8name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor Function( + Napi::Env env, + Napi::Object object, + Name name, + Callable cb, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor Value( + const char* utf8name, + napi_value value, + napi_property_attributes attributes = napi_default); + static PropertyDescriptor Value( + const std::string& utf8name, + napi_value value, + napi_property_attributes attributes = napi_default); + static PropertyDescriptor Value( + napi_value name, + napi_value value, + napi_property_attributes attributes = napi_default); + static PropertyDescriptor Value( + Name name, + Napi::Value value, + napi_property_attributes attributes = napi_default); + + PropertyDescriptor(napi_property_descriptor desc); + + operator napi_property_descriptor&(); + operator const napi_property_descriptor&() const; + + private: + napi_property_descriptor _desc; +}; + +/// Property descriptor for use with `ObjectWrap::DefineClass()`. +/// +/// This is different from the standalone `PropertyDescriptor` because it is +/// specific to each `ObjectWrap` subclass. This prevents using descriptors +/// from a different class when defining a new class (preventing the callbacks +/// from having incorrect `this` pointers). +template +class ClassPropertyDescriptor { + public: + ClassPropertyDescriptor(napi_property_descriptor desc) : _desc(desc) {} + + operator napi_property_descriptor&() { return _desc; } + operator const napi_property_descriptor&() const { return _desc; } + + private: + napi_property_descriptor _desc; +}; + +template +struct MethodCallbackData { + TCallback callback; + void* data; +}; + +template +struct AccessorCallbackData { + TGetterCallback getterCallback; + TSetterCallback setterCallback; + void* data; +}; + +template +class InstanceWrap { + public: + using InstanceVoidMethodCallback = void (T::*)(const CallbackInfo& info); + using InstanceMethodCallback = Napi::Value (T::*)(const CallbackInfo& info); + using InstanceGetterCallback = Napi::Value (T::*)(const CallbackInfo& info); + using InstanceSetterCallback = void (T::*)(const CallbackInfo& info, + const Napi::Value& value); + + using PropertyDescriptor = ClassPropertyDescriptor; + + static PropertyDescriptor InstanceMethod( + const char* utf8name, + InstanceVoidMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor InstanceMethod( + const char* utf8name, + InstanceMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor InstanceMethod( + Symbol name, + InstanceVoidMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor InstanceMethod( + Symbol name, + InstanceMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor InstanceMethod( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor InstanceMethod( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor InstanceMethod( + Symbol name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor InstanceMethod( + Symbol name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor InstanceAccessor( + const char* utf8name, + InstanceGetterCallback getter, + InstanceSetterCallback setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor InstanceAccessor( + Symbol name, + InstanceGetterCallback getter, + InstanceSetterCallback setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor InstanceAccessor( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor InstanceAccessor( + Symbol name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor InstanceValue( + const char* utf8name, + Napi::Value value, + napi_property_attributes attributes = napi_default); + static PropertyDescriptor InstanceValue( + Symbol name, + Napi::Value value, + napi_property_attributes attributes = napi_default); + + protected: + static void AttachPropData(napi_env env, + napi_value value, + const napi_property_descriptor* prop); + + private: + using This = InstanceWrap; + + using InstanceVoidMethodCallbackData = + MethodCallbackData; + using InstanceMethodCallbackData = + MethodCallbackData; + using InstanceAccessorCallbackData = + AccessorCallbackData; + + static napi_value InstanceVoidMethodCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value InstanceMethodCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value InstanceGetterCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value InstanceSetterCallbackWrapper(napi_env env, + napi_callback_info info); + + template + static napi_value WrappedMethod(napi_env env, + napi_callback_info info) NAPI_NOEXCEPT; + + template + struct SetterTag {}; + + template + static napi_callback WrapSetter(SetterTag) NAPI_NOEXCEPT { + return &This::WrappedMethod; + } + static napi_callback WrapSetter(SetterTag) NAPI_NOEXCEPT { + return nullptr; + } +}; + +/// Base class to be extended by C++ classes exposed to JavaScript; each C++ +/// class instance gets "wrapped" by a JavaScript object that is managed by this +/// class. +/// +/// At initialization time, the `DefineClass()` method must be used to +/// hook up the accessor and method callbacks. It takes a list of +/// property descriptors, which can be constructed via the various +/// static methods on the base class. +/// +/// #### Example: +/// +/// class Example: public Napi::ObjectWrap { +/// public: +/// static void Initialize(Napi::Env& env, Napi::Object& target) { +/// Napi::Function constructor = DefineClass(env, "Example", { +/// InstanceAccessor<&Example::GetSomething, +/// &Example::SetSomething>("value"), +/// InstanceMethod<&Example::DoSomething>("doSomething"), +/// }); +/// target.Set("Example", constructor); +/// } +/// +/// Example(const Napi::CallbackInfo& info); // Constructor +/// Napi::Value GetSomething(const Napi::CallbackInfo& info); +/// void SetSomething(const Napi::CallbackInfo& info, const Napi::Value& +/// value); Napi::Value DoSomething(const Napi::CallbackInfo& info); +/// } +template +class ObjectWrap : public InstanceWrap, public Reference { + public: + ObjectWrap(const CallbackInfo& callbackInfo); + virtual ~ObjectWrap(); + + static T* Unwrap(Object wrapper); + + // Methods exposed to JavaScript must conform to one of these callback + // signatures. + using StaticVoidMethodCallback = void (*)(const CallbackInfo& info); + using StaticMethodCallback = Napi::Value (*)(const CallbackInfo& info); + using StaticGetterCallback = Napi::Value (*)(const CallbackInfo& info); + using StaticSetterCallback = void (*)(const CallbackInfo& info, + const Napi::Value& value); + + using PropertyDescriptor = ClassPropertyDescriptor; + + static Function DefineClass( + Napi::Env env, + const char* utf8name, + const std::initializer_list& properties, + void* data = nullptr); + static Function DefineClass(Napi::Env env, + const char* utf8name, + const std::vector& properties, + void* data = nullptr); + static PropertyDescriptor StaticMethod( + const char* utf8name, + StaticVoidMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor StaticMethod( + const char* utf8name, + StaticMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor StaticMethod( + Symbol name, + StaticVoidMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor StaticMethod( + Symbol name, + StaticMethodCallback method, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor StaticMethod( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor StaticMethod( + Symbol name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor StaticMethod( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor StaticMethod( + Symbol name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor StaticAccessor( + const char* utf8name, + StaticGetterCallback getter, + StaticSetterCallback setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor StaticAccessor( + Symbol name, + StaticGetterCallback getter, + StaticSetterCallback setter, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor StaticAccessor( + const char* utf8name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + template + static PropertyDescriptor StaticAccessor( + Symbol name, + napi_property_attributes attributes = napi_default, + void* data = nullptr); + static PropertyDescriptor StaticValue( + const char* utf8name, + Napi::Value value, + napi_property_attributes attributes = napi_default); + static PropertyDescriptor StaticValue( + Symbol name, + Napi::Value value, + napi_property_attributes attributes = napi_default); + static Napi::Value OnCalledAsFunction(const Napi::CallbackInfo& callbackInfo); + virtual void Finalize(Napi::Env env); + + private: + using This = ObjectWrap; + + static napi_value ConstructorCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value StaticVoidMethodCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value StaticMethodCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value StaticGetterCallbackWrapper(napi_env env, + napi_callback_info info); + static napi_value StaticSetterCallbackWrapper(napi_env env, + napi_callback_info info); + static void FinalizeCallback(napi_env env, void* data, void* hint); + static Function DefineClass(Napi::Env env, + const char* utf8name, + const size_t props_count, + const napi_property_descriptor* props, + void* data = nullptr); + + using StaticVoidMethodCallbackData = + MethodCallbackData; + using StaticMethodCallbackData = MethodCallbackData; + + using StaticAccessorCallbackData = + AccessorCallbackData; + + template + static napi_value WrappedMethod(napi_env env, + napi_callback_info info) NAPI_NOEXCEPT; + + template + struct StaticSetterTag {}; + + template + static napi_callback WrapStaticSetter(StaticSetterTag) NAPI_NOEXCEPT { + return &This::WrappedMethod; + } + static napi_callback WrapStaticSetter(StaticSetterTag) + NAPI_NOEXCEPT { + return nullptr; + } + + bool _construction_failed = true; +}; + +class HandleScope { + public: + HandleScope(napi_env env, napi_handle_scope scope); + explicit HandleScope(Napi::Env env); + ~HandleScope(); + + // Disallow copying to prevent double close of napi_handle_scope + NAPI_DISALLOW_ASSIGN_COPY(HandleScope) + + operator napi_handle_scope() const; + + Napi::Env Env() const; + + private: + napi_env _env; + napi_handle_scope _scope; +}; + +class EscapableHandleScope { + public: + EscapableHandleScope(napi_env env, napi_escapable_handle_scope scope); + explicit EscapableHandleScope(Napi::Env env); + ~EscapableHandleScope(); + + // Disallow copying to prevent double close of napi_escapable_handle_scope + NAPI_DISALLOW_ASSIGN_COPY(EscapableHandleScope) + + operator napi_escapable_handle_scope() const; + + Napi::Env Env() const; + Value Escape(napi_value escapee); + + private: + napi_env _env; + napi_escapable_handle_scope _scope; +}; + +#if (NAPI_VERSION > 2) +class CallbackScope { + public: + CallbackScope(napi_env env, napi_callback_scope scope); + CallbackScope(napi_env env, napi_async_context context); + virtual ~CallbackScope(); + + // Disallow copying to prevent double close of napi_callback_scope + NAPI_DISALLOW_ASSIGN_COPY(CallbackScope) + + operator napi_callback_scope() const; + + Napi::Env Env() const; + + private: + napi_env _env; + napi_callback_scope _scope; +}; +#endif + +class AsyncContext { + public: + explicit AsyncContext(napi_env env, const char* resource_name); + explicit AsyncContext(napi_env env, + const char* resource_name, + const Object& resource); + virtual ~AsyncContext(); + + AsyncContext(AsyncContext&& other); + AsyncContext& operator=(AsyncContext&& other); + NAPI_DISALLOW_ASSIGN_COPY(AsyncContext) + + operator napi_async_context() const; + + Napi::Env Env() const; + + private: + napi_env _env; + napi_async_context _context; +}; + +class AsyncWorker { + public: + virtual ~AsyncWorker(); + + // An async worker can be moved but cannot be copied. + AsyncWorker(AsyncWorker&& other); + AsyncWorker& operator=(AsyncWorker&& other); + NAPI_DISALLOW_ASSIGN_COPY(AsyncWorker) + + operator napi_async_work() const; + + Napi::Env Env() const; + + void Queue(); + void Cancel(); + void SuppressDestruct(); + + ObjectReference& Receiver(); + FunctionReference& Callback(); + + virtual void OnExecute(Napi::Env env); + virtual void OnWorkComplete(Napi::Env env, napi_status status); + + protected: + explicit AsyncWorker(const Function& callback); + explicit AsyncWorker(const Function& callback, const char* resource_name); + explicit AsyncWorker(const Function& callback, + const char* resource_name, + const Object& resource); + explicit AsyncWorker(const Object& receiver, const Function& callback); + explicit AsyncWorker(const Object& receiver, + const Function& callback, + const char* resource_name); + explicit AsyncWorker(const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource); + + explicit AsyncWorker(Napi::Env env); + explicit AsyncWorker(Napi::Env env, const char* resource_name); + explicit AsyncWorker(Napi::Env env, + const char* resource_name, + const Object& resource); + + virtual void Execute() = 0; + virtual void OnOK(); + virtual void OnError(const Error& e); + virtual void Destroy(); + virtual std::vector GetResult(Napi::Env env); + + void SetError(const std::string& error); + + private: + static inline void OnAsyncWorkExecute(napi_env env, void* asyncworker); + static inline void OnAsyncWorkComplete(napi_env env, + napi_status status, + void* asyncworker); + + napi_env _env; + napi_async_work _work; + ObjectReference _receiver; + FunctionReference _callback; + std::string _error; + bool _suppress_destruct; +}; + +#if (NAPI_VERSION > 3 && !defined(__wasm32__)) +class ThreadSafeFunction { + public: + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback, + FinalizerDataType* data); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + Finalizer finalizeCallback, + FinalizerDataType* data); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback); + + // This API may only be called from the main thread. + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data); + + ThreadSafeFunction(); + ThreadSafeFunction(napi_threadsafe_function tsFunctionValue); + + operator napi_threadsafe_function() const; + + // This API may be called from any thread. + napi_status BlockingCall() const; + + // This API may be called from any thread. + template + napi_status BlockingCall(Callback callback) const; + + // This API may be called from any thread. + template + napi_status BlockingCall(DataType* data, Callback callback) const; + + // This API may be called from any thread. + napi_status NonBlockingCall() const; + + // This API may be called from any thread. + template + napi_status NonBlockingCall(Callback callback) const; + + // This API may be called from any thread. + template + napi_status NonBlockingCall(DataType* data, Callback callback) const; + + // This API may only be called from the main thread. + void Ref(napi_env env) const; + + // This API may only be called from the main thread. + void Unref(napi_env env) const; + + // This API may be called from any thread. + napi_status Acquire() const; + + // This API may be called from any thread. + napi_status Release() const; + + // This API may be called from any thread. + napi_status Abort() const; + + struct ConvertibleContext { + template + operator T*() { + return static_cast(context); + } + void* context; + }; + + // This API may be called from any thread. + ConvertibleContext GetContext() const; + + private: + using CallbackWrapper = std::function; + + template + static ThreadSafeFunction New(napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data, + napi_finalize wrapper); + + napi_status CallInternal(CallbackWrapper* callbackWrapper, + napi_threadsafe_function_call_mode mode) const; + + static void CallJS(napi_env env, + napi_value jsCallback, + void* context, + void* data); + + napi_threadsafe_function _tsfn; +}; + +// A TypedThreadSafeFunction by default has no context (nullptr) and can +// accept any type (void) to its CallJs. +template +class TypedThreadSafeFunction { + public: + // This API may only be called from the main thread. + // Helper function that returns nullptr if running Node-API 5+, otherwise a + // non-empty, no-op Function. This provides the ability to specify at + // compile-time a callback parameter to `New` that safely does no action + // when targeting _any_ Node-API version. +#if NAPI_VERSION > 4 + static std::nullptr_t EmptyFunctionFactory(Napi::Env env); +#else + static Napi::Function EmptyFunctionFactory(Napi::Env env); +#endif + static Napi::Function FunctionOrEmpty(Napi::Env env, + Napi::Function& callback); + +#if NAPI_VERSION > 4 + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [missing] Resource [missing] Finalizer [missing] + template + static TypedThreadSafeFunction New( + napi_env env, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context = nullptr); + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [missing] Resource [passed] Finalizer [missing] + template + static TypedThreadSafeFunction New( + napi_env env, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context = nullptr); + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [missing] Resource [missing] Finalizer [passed] + template + static TypedThreadSafeFunction New( + napi_env env, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data = nullptr); + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [missing] Resource [passed] Finalizer [passed] + template + static TypedThreadSafeFunction New( + napi_env env, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data = nullptr); +#endif + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [passed] Resource [missing] Finalizer [missing] + template + static TypedThreadSafeFunction New( + napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context = nullptr); + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [passed] Resource [passed] Finalizer [missing] + template + static TypedThreadSafeFunction New( + napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context = nullptr); + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [passed] Resource [missing] Finalizer [passed] + template + static TypedThreadSafeFunction New( + napi_env env, + const Function& callback, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data = nullptr); + + // This API may only be called from the main thread. + // Creates a new threadsafe function with: + // Callback [passed] Resource [passed] Finalizer [passed] + template + static TypedThreadSafeFunction New( + napi_env env, + CallbackType callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data = nullptr); + + TypedThreadSafeFunction(); + TypedThreadSafeFunction(napi_threadsafe_function tsFunctionValue); + + operator napi_threadsafe_function() const; + + // This API may be called from any thread. + napi_status BlockingCall(DataType* data = nullptr) const; + + // This API may be called from any thread. + napi_status NonBlockingCall(DataType* data = nullptr) const; + + // This API may only be called from the main thread. + void Ref(napi_env env) const; + + // This API may only be called from the main thread. + void Unref(napi_env env) const; + + // This API may be called from any thread. + napi_status Acquire() const; + + // This API may be called from any thread. + napi_status Release() const; + + // This API may be called from any thread. + napi_status Abort() const; + + // This API may be called from any thread. + ContextType* GetContext() const; + + private: + template + static TypedThreadSafeFunction New( + napi_env env, + const Function& callback, + const Object& resource, + ResourceString resourceName, + size_t maxQueueSize, + size_t initialThreadCount, + ContextType* context, + Finalizer finalizeCallback, + FinalizerDataType* data, + napi_finalize wrapper); + + static void CallJsInternal(napi_env env, + napi_value jsCallback, + void* context, + void* data); + + protected: + napi_threadsafe_function _tsfn; +}; +template +class AsyncProgressWorkerBase : public AsyncWorker { + public: + virtual void OnWorkProgress(DataType* data) = 0; + class ThreadSafeData { + public: + ThreadSafeData(AsyncProgressWorkerBase* asyncprogressworker, DataType* data) + : _asyncprogressworker(asyncprogressworker), _data(data) {} + + AsyncProgressWorkerBase* asyncprogressworker() { + return _asyncprogressworker; + }; + DataType* data() { return _data; }; + + private: + AsyncProgressWorkerBase* _asyncprogressworker; + DataType* _data; + }; + void OnWorkComplete(Napi::Env env, napi_status status) override; + + protected: + explicit AsyncProgressWorkerBase(const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource, + size_t queue_size = 1); + virtual ~AsyncProgressWorkerBase(); + +// Optional callback of Napi::ThreadSafeFunction only available after +// NAPI_VERSION 4. Refs: https://github.com/nodejs/node/pull/27791 +#if NAPI_VERSION > 4 + explicit AsyncProgressWorkerBase(Napi::Env env, + const char* resource_name, + const Object& resource, + size_t queue_size = 1); +#endif + + static inline void OnAsyncWorkProgress(Napi::Env env, + Napi::Function jsCallback, + void* data); + + napi_status NonBlockingCall(DataType* data); + + private: + ThreadSafeFunction _tsfn; + bool _work_completed = false; + napi_status _complete_status; + static inline void OnThreadSafeFunctionFinalize( + Napi::Env env, void* data, AsyncProgressWorkerBase* context); +}; + +template +class AsyncProgressWorker : public AsyncProgressWorkerBase { + public: + virtual ~AsyncProgressWorker(); + + class ExecutionProgress { + friend class AsyncProgressWorker; + + public: + void Signal() const; + void Send(const T* data, size_t count) const; + + private: + explicit ExecutionProgress(AsyncProgressWorker* worker) : _worker(worker) {} + AsyncProgressWorker* const _worker; + }; + + void OnWorkProgress(void*) override; + + protected: + explicit AsyncProgressWorker(const Function& callback); + explicit AsyncProgressWorker(const Function& callback, + const char* resource_name); + explicit AsyncProgressWorker(const Function& callback, + const char* resource_name, + const Object& resource); + explicit AsyncProgressWorker(const Object& receiver, + const Function& callback); + explicit AsyncProgressWorker(const Object& receiver, + const Function& callback, + const char* resource_name); + explicit AsyncProgressWorker(const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource); + +// Optional callback of Napi::ThreadSafeFunction only available after +// NAPI_VERSION 4. Refs: https://github.com/nodejs/node/pull/27791 +#if NAPI_VERSION > 4 + explicit AsyncProgressWorker(Napi::Env env); + explicit AsyncProgressWorker(Napi::Env env, const char* resource_name); + explicit AsyncProgressWorker(Napi::Env env, + const char* resource_name, + const Object& resource); +#endif + virtual void Execute(const ExecutionProgress& progress) = 0; + virtual void OnProgress(const T* data, size_t count) = 0; + + private: + void Execute() override; + void Signal(); + void SendProgress_(const T* data, size_t count); + + std::mutex _mutex; + T* _asyncdata; + size_t _asyncsize; + bool _signaled; +}; + +template +class AsyncProgressQueueWorker + : public AsyncProgressWorkerBase> { + public: + virtual ~AsyncProgressQueueWorker(){}; + + class ExecutionProgress { + friend class AsyncProgressQueueWorker; + + public: + void Signal() const; + void Send(const T* data, size_t count) const; + + private: + explicit ExecutionProgress(AsyncProgressQueueWorker* worker) + : _worker(worker) {} + AsyncProgressQueueWorker* const _worker; + }; + + void OnWorkComplete(Napi::Env env, napi_status status) override; + void OnWorkProgress(std::pair*) override; + + protected: + explicit AsyncProgressQueueWorker(const Function& callback); + explicit AsyncProgressQueueWorker(const Function& callback, + const char* resource_name); + explicit AsyncProgressQueueWorker(const Function& callback, + const char* resource_name, + const Object& resource); + explicit AsyncProgressQueueWorker(const Object& receiver, + const Function& callback); + explicit AsyncProgressQueueWorker(const Object& receiver, + const Function& callback, + const char* resource_name); + explicit AsyncProgressQueueWorker(const Object& receiver, + const Function& callback, + const char* resource_name, + const Object& resource); + +// Optional callback of Napi::ThreadSafeFunction only available after +// NAPI_VERSION 4. Refs: https://github.com/nodejs/node/pull/27791 +#if NAPI_VERSION > 4 + explicit AsyncProgressQueueWorker(Napi::Env env); + explicit AsyncProgressQueueWorker(Napi::Env env, const char* resource_name); + explicit AsyncProgressQueueWorker(Napi::Env env, + const char* resource_name, + const Object& resource); +#endif + virtual void Execute(const ExecutionProgress& progress) = 0; + virtual void OnProgress(const T* data, size_t count) = 0; + + private: + void Execute() override; + void Signal() const; + void SendProgress_(const T* data, size_t count); +}; +#endif // NAPI_VERSION > 3 && !defined(__wasm32__) + +// Memory management. +class MemoryManagement { + public: + static int64_t AdjustExternalMemory(Env env, int64_t change_in_bytes); +}; + +// Version management +class VersionManagement { + public: + static uint32_t GetNapiVersion(Env env); + static const napi_node_version* GetNodeVersion(Env env); +}; + +#if NAPI_VERSION > 5 +template +class Addon : public InstanceWrap { + public: + static inline Object Init(Env env, Object exports); + static T* Unwrap(Object wrapper); + + protected: + using AddonProp = ClassPropertyDescriptor; + void DefineAddon(Object exports, + const std::initializer_list& props); + Napi::Object DefineProperties(Object object, + const std::initializer_list& props); + + private: + Object entry_point_; +}; +#endif // NAPI_VERSION > 5 + +#ifdef NAPI_CPP_CUSTOM_NAMESPACE +} // namespace NAPI_CPP_CUSTOM_NAMESPACE +#endif + +} // namespace Napi + +// Inline implementations of all the above class methods are included here. +#include "napi-inl.h" + +#endif // SRC_NAPI_H_ diff --git a/node_modules/node-addon-api/node_api.gyp b/node_modules/node-addon-api/node_api.gyp new file mode 100644 index 0000000..4ff0ae7 --- /dev/null +++ b/node_modules/node-addon-api/node_api.gyp @@ -0,0 +1,9 @@ +{ + 'targets': [ + { + 'target_name': 'nothing', + 'type': 'static_library', + 'sources': [ 'nothing.c' ] + } + ] +} diff --git a/node_modules/node-addon-api/noexcept.gypi b/node_modules/node-addon-api/noexcept.gypi new file mode 100644 index 0000000..404a05f --- /dev/null +++ b/node_modules/node-addon-api/noexcept.gypi @@ -0,0 +1,26 @@ +{ + 'defines': [ 'NAPI_DISABLE_CPP_EXCEPTIONS' ], + 'cflags': [ '-fno-exceptions' ], + 'cflags_cc': [ '-fno-exceptions' ], + 'conditions': [ + ["OS=='win'", { + # _HAS_EXCEPTIONS is already defined and set to 0 in common.gypi + #"defines": [ + # "_HAS_EXCEPTIONS=0" + #], + "msvs_settings": { + "VCCLCompilerTool": { + 'ExceptionHandling': 0, + 'EnablePREfast': 'true', + }, + }, + }], + ["OS=='mac'", { + 'xcode_settings': { + 'CLANG_CXX_LIBRARY': 'libc++', + 'MACOSX_DEPLOYMENT_TARGET': '10.7', + 'GCC_ENABLE_CPP_EXCEPTIONS': 'NO', + }, + }], + ], +} diff --git a/node_modules/node-addon-api/nothing.c b/node_modules/node-addon-api/nothing.c new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/node-addon-api/package-support.json b/node_modules/node-addon-api/package-support.json new file mode 100644 index 0000000..10d3607 --- /dev/null +++ b/node_modules/node-addon-api/package-support.json @@ -0,0 +1,21 @@ +{ + "versions": [ + { + "version": "*", + "target": { + "node": "active" + }, + "response": { + "type": "time-permitting", + "paid": false, + "contact": { + "name": "node-addon-api team", + "url": "https://github.com/nodejs/node-addon-api/issues" + } + }, + "backing": [ { "project": "https://github.com/nodejs" }, + { "foundation": "https://openjsf.org/" } + ] + } + ] +} diff --git a/node_modules/node-addon-api/package.json b/node_modules/node-addon-api/package.json new file mode 100644 index 0000000..3ec3776 --- /dev/null +++ b/node_modules/node-addon-api/package.json @@ -0,0 +1,456 @@ +{ + "bugs": { + "url": "https://github.com/nodejs/node-addon-api/issues" + }, + "contributors": [ + { + "name": "Abhishek Kumar Singh", + "url": "https://github.com/abhi11210646" + }, + { + "name": "Alba Mendez", + "url": "https://github.com/jmendeth" + }, + { + "name": "Alexander Floh", + "url": "https://github.com/alexanderfloh" + }, + { + "name": "Ammar Faizi", + "url": "https://github.com/ammarfaizi2" + }, + { + "name": "András Timár, Dr", + "url": "https://github.com/timarandras" + }, + { + "name": "Andrew Petersen", + "url": "https://github.com/kirbysayshi" + }, + { + "name": "Anisha Rohra", + "url": "https://github.com/anisha-rohra" + }, + { + "name": "Anna Henningsen", + "url": "https://github.com/addaleax" + }, + { + "name": "Arnaud Botella", + "url": "https://github.com/BotellaA" + }, + { + "name": "Arunesh Chandra", + "url": "https://github.com/aruneshchandra" + }, + { + "name": "Azlan Mukhtar", + "url": "https://github.com/azlan" + }, + { + "name": "Ben Berman", + "url": "https://github.com/rivertam" + }, + { + "name": "Benjamin Byholm", + "url": "https://github.com/kkoopa" + }, + { + "name": "Bill Gallafent", + "url": "https://github.com/gallafent" + }, + { + "name": "blagoev", + "url": "https://github.com/blagoev" + }, + { + "name": "Bruce A. MacNaughton", + "url": "https://github.com/bmacnaughton" + }, + { + "name": "Cory Mickelson", + "url": "https://github.com/corymickelson" + }, + { + "name": "Daniel Bevenius", + "url": "https://github.com/danbev" + }, + { + "name": "Dante Calderón", + "url": "https://github.com/dantehemerson" + }, + { + "name": "Darshan Sen", + "url": "https://github.com/RaisinTen" + }, + { + "name": "David Halls", + "url": "https://github.com/davedoesdev" + }, + { + "name": "Deepak Rajamohan", + "url": "https://github.com/deepakrkris" + }, + { + "name": "Dmitry Ashkadov", + "url": "https://github.com/dmitryash" + }, + { + "name": "Dongjin Na", + "url": "https://github.com/nadongguri" + }, + { + "name": "Doni Rubiagatra", + "url": "https://github.com/rubiagatra" + }, + { + "name": "Eric Bickle", + "url": "https://github.com/ebickle" + }, + { + "name": "extremeheat", + "url": "https://github.com/extremeheat" + }, + { + "name": "Feng Yu", + "url": "https://github.com/F3n67u" + }, + { + "name": "Ferdinand Holzer", + "url": "https://github.com/fholzer" + }, + { + "name": "Gabriel Schulhof", + "url": "https://github.com/gabrielschulhof" + }, + { + "name": "Guenter Sandner", + "url": "https://github.com/gms1" + }, + { + "name": "Gus Caplan", + "url": "https://github.com/devsnek" + }, + { + "name": "Helio Frota", + "url": "https://github.com/helio-frota" + }, + { + "name": "Hitesh Kanwathirtha", + "url": "https://github.com/digitalinfinity" + }, + { + "name": "ikokostya", + "url": "https://github.com/ikokostya" + }, + { + "name": "Jack Xia", + "url": "https://github.com/JckXia" + }, + { + "name": "Jake Barnes", + "url": "https://github.com/DuBistKomisch" + }, + { + "name": "Jake Yoon", + "url": "https://github.com/yjaeseok" + }, + { + "name": "Jason Ginchereau", + "url": "https://github.com/jasongin" + }, + { + "name": "Jenny", + "url": "https://github.com/egg-bread" + }, + { + "name": "Jeroen Janssen", + "url": "https://github.com/japj" + }, + { + "name": "Jim Schlight", + "url": "https://github.com/jschlight" + }, + { + "name": "Jinho Bang", + "url": "https://github.com/romandev" + }, + { + "name": "José Expósito", + "url": "https://github.com/JoseExposito" + }, + { + "name": "joshgarde", + "url": "https://github.com/joshgarde" + }, + { + "name": "Julian Mesa", + "url": "https://github.com/julianmesa-gitkraken" + }, + { + "name": "Kasumi Hanazuki", + "url": "https://github.com/hanazuki" + }, + { + "name": "Kelvin", + "url": "https://github.com/kelvinhammond" + }, + { + "name": "Kevin Eady", + "url": "https://github.com/KevinEady" + }, + { + "name": "Kévin VOYER", + "url": "https://github.com/kecsou" + }, + { + "name": "kidneysolo", + "url": "https://github.com/kidneysolo" + }, + { + "name": "Koki Nishihara", + "url": "https://github.com/Nishikoh" + }, + { + "name": "Konstantin Tarkus", + "url": "https://github.com/koistya" + }, + { + "name": "Kyle Farnung", + "url": "https://github.com/kfarnung" + }, + { + "name": "Kyle Kovacs", + "url": "https://github.com/nullromo" + }, + { + "name": "legendecas", + "url": "https://github.com/legendecas" + }, + { + "name": "LongYinan", + "url": "https://github.com/Brooooooklyn" + }, + { + "name": "Lovell Fuller", + "url": "https://github.com/lovell" + }, + { + "name": "Luciano Martorella", + "url": "https://github.com/lmartorella" + }, + { + "name": "mastergberry", + "url": "https://github.com/mastergberry" + }, + { + "name": "Mathias Küsel", + "url": "https://github.com/mathiask88" + }, + { + "name": "Matteo Collina", + "url": "https://github.com/mcollina" + }, + { + "name": "Michael Dawson", + "url": "https://github.com/mhdawson" + }, + { + "name": "Michael Price", + "url": "https://github.com/mikepricedev" + }, + { + "name": "Michele Campus", + "url": "https://github.com/kYroL01" + }, + { + "name": "Mikhail Cheshkov", + "url": "https://github.com/mcheshkov" + }, + { + "name": "nempoBu4", + "url": "https://github.com/nempoBu4" + }, + { + "name": "Nicola Del Gobbo", + "url": "https://github.com/NickNaso" + }, + { + "name": "Nick Soggin", + "url": "https://github.com/iSkore" + }, + { + "name": "Nikolai Vavilov", + "url": "https://github.com/seishun" + }, + { + "name": "Nurbol Alpysbayev", + "url": "https://github.com/anurbol" + }, + { + "name": "pacop", + "url": "https://github.com/pacop" + }, + { + "name": "Peter Šándor", + "url": "https://github.com/petersandor" + }, + { + "name": "Philipp Renoth", + "url": "https://github.com/DaAitch" + }, + { + "name": "rgerd", + "url": "https://github.com/rgerd" + }, + { + "name": "Richard Lau", + "url": "https://github.com/richardlau" + }, + { + "name": "Rolf Timmermans", + "url": "https://github.com/rolftimmermans" + }, + { + "name": "Ross Weir", + "url": "https://github.com/ross-weir" + }, + { + "name": "Ryuichi Okumura", + "url": "https://github.com/okuryu" + }, + { + "name": "Saint Gabriel", + "url": "https://github.com/chineduG" + }, + { + "name": "Sampson Gao", + "url": "https://github.com/sampsongao" + }, + { + "name": "Sam Roberts", + "url": "https://github.com/sam-github" + }, + { + "name": "strager", + "url": "https://github.com/strager" + }, + { + "name": "Taylor Woll", + "url": "https://github.com/boingoing" + }, + { + "name": "Thomas Gentilhomme", + "url": "https://github.com/fraxken" + }, + { + "name": "Tim Rach", + "url": "https://github.com/timrach" + }, + { + "name": "Tobias Nießen", + "url": "https://github.com/tniessen" + }, + { + "name": "todoroff", + "url": "https://github.com/todoroff" + }, + { + "name": "Tux3", + "url": "https://github.com/tux3" + }, + { + "name": "Vlad Velmisov", + "url": "https://github.com/Velmisov" + }, + { + "name": "Vladimir Morozov", + "url": "https://github.com/vmoroz" + + }, + { + "name": "WenheLI", + "url": "https://github.com/WenheLI" + }, + { + "name": "Xuguang Mei", + "url": "https://github.com/meixg" + }, + { + "name": "Yohei Kishimoto", + "url": "https://github.com/morokosi" + }, + { + "name": "Yulong Wang", + "url": "https://github.com/fs-eire" + }, + { + "name": "Ziqiu Zhao", + "url": "https://github.com/ZzqiZQute" + }, + { + "name": "Feng Yu", + "url": "https://github.com/F3n67u" + } + ], + "description": "Node.js API (Node-API)", + "devDependencies": { + "benchmark": "^2.1.4", + "bindings": "^1.5.0", + "clang-format": "^1.4.0", + "eslint": "^7.32.0", + "eslint-config-semistandard": "^16.0.0", + "eslint-config-standard": "^16.0.3", + "eslint-plugin-import": "^2.24.2", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.0", + "fs-extra": "^9.0.1", + "path": "^0.12.7", + "pre-commit": "^1.2.2", + "safe-buffer": "^5.1.1" + }, + "directories": {}, + "gypfile": false, + "homepage": "https://github.com/nodejs/node-addon-api", + "keywords": [ + "n-api", + "napi", + "addon", + "native", + "bindings", + "c", + "c++", + "nan", + "node-addon-api" + ], + "license": "MIT", + "main": "index.js", + "name": "node-addon-api", + "readme": "README.md", + "repository": { + "type": "git", + "url": "git://github.com/nodejs/node-addon-api.git" + }, + "files": [ + "*.{c,h,gyp,gypi}", + "package-support.json", + "tools/" + ], + "scripts": { + "prebenchmark": "node-gyp rebuild -C benchmark", + "benchmark": "node benchmark", + "pretest": "node-gyp rebuild -C test", + "test": "node test", + "test:debug": "node-gyp rebuild -C test --debug && NODE_API_BUILD_CONFIG=Debug node ./test/index.js", + "predev": "node-gyp rebuild -C test --debug", + "dev": "node test", + "predev:incremental": "node-gyp configure build -C test --debug", + "dev:incremental": "node test", + "doc": "doxygen doc/Doxyfile", + "lint": "node tools/eslint-format && node tools/clang-format", + "lint:fix": "node tools/clang-format --fix && node tools/eslint-format --fix" + }, + "pre-commit": "lint", + "version": "5.1.0", + "support": true +} diff --git a/node_modules/node-addon-api/tools/README.md b/node_modules/node-addon-api/tools/README.md new file mode 100644 index 0000000..6b80e94 --- /dev/null +++ b/node_modules/node-addon-api/tools/README.md @@ -0,0 +1,73 @@ +# Tools + +## clang-format + +The clang-format checking tools is designed to check changed lines of code compared to given git-refs. + +## Migration Script + +The migration tool is designed to reduce repetitive work in the migration process. However, the script is not aiming to convert every thing for you. There are usually some small fixes and major reconstruction required. + +### How To Use + +To run the conversion script, first make sure you have the latest `node-addon-api` in your `node_modules` directory. +``` +npm install node-addon-api +``` + +Then run the script passing your project directory +``` +node ./node_modules/node-addon-api/tools/conversion.js ./ +``` + +After finish, recompile and debug things that are missed by the script. + + +### Quick Fixes +Here is the list of things that can be fixed easily. + 1. Change your methods' return value to void if it doesn't return value to JavaScript. + 2. Use `.` to access attribute or to invoke member function in Napi::Object instead of `->`. + 3. `Napi::New(env, value);` to `Napi::[Type]::New(env, value); + + +### Major Reconstructions +The implementation of `Napi::ObjectWrap` is significantly different from NAN's. `Napi::ObjectWrap` takes a pointer to the wrapped object and creates a reference to the wrapped object inside ObjectWrap constructor. `Napi::ObjectWrap` also associates wrapped object's instance methods to Javascript module instead of static methods like NAN. + +So if you use Nan::ObjectWrap in your module, you will need to execute the following steps. + + 1. Convert your [ClassName]::New function to a constructor function that takes a `Napi::CallbackInfo`. Declare it as +``` +[ClassName](const Napi::CallbackInfo& info); +``` +and define it as +``` +[ClassName]::[ClassName](const Napi::CallbackInfo& info) : Napi::ObjectWrap<[ClassName]>(info){ + ... +} +``` +This way, the `Napi::ObjectWrap` constructor will be invoked after the object has been instantiated and `Napi::ObjectWrap` can use the `this` pointer to create a reference to the wrapped object. + + 2. Move your original constructor code into the new constructor. Delete your original constructor. + 3. In your class initialization function, associate native methods in the following way. +``` +Napi::FunctionReference constructor; + +void [ClassName]::Init(Napi::Env env, Napi::Object exports, Napi::Object module) { + Napi::HandleScope scope(env); + Napi::Function ctor = DefineClass(env, "Canvas", { + InstanceMethod<&[ClassName]::Func1>("Func1"), + InstanceMethod<&[ClassName]::Func2>("Func2"), + InstanceAccessor<&[ClassName]::ValueGetter>("Value"), + StaticMethod<&[ClassName]::StaticMethod>("MethodName"), + InstanceValue("Value", Napi::[Type]::New(env, value)), + }); + + constructor = Napi::Persistent(ctor); + constructor .SuppressDestruct(); + exports.Set("[ClassName]", ctor); +} +``` + 4. In function where you need to Unwrap the ObjectWrap in NAN like `[ClassName]* native = Nan::ObjectWrap::Unwrap<[ClassName]>(info.This());`, use `this` pointer directly as the unwrapped object as each ObjectWrap instance is associated with a unique object instance. + + +If you still find issues after following this guide, please leave us an issue describing your problem and we will try to resolve it. diff --git a/node_modules/node-addon-api/tools/check-napi.js b/node_modules/node-addon-api/tools/check-napi.js new file mode 100644 index 0000000..9199af3 --- /dev/null +++ b/node_modules/node-addon-api/tools/check-napi.js @@ -0,0 +1,99 @@ +'use strict'; +// Descend into a directory structure and, for each file matching *.node, output +// based on the imports found in the file whether it's an N-API module or not. + +const fs = require('fs'); +const path = require('path'); + +// Read the output of the command, break it into lines, and use the reducer to +// decide whether the file is an N-API module or not. +function checkFile (file, command, argv, reducer) { + const child = require('child_process').spawn(command, argv, { + stdio: ['inherit', 'pipe', 'inherit'] + }); + let leftover = ''; + let isNapi; + child.stdout.on('data', (chunk) => { + if (isNapi === undefined) { + chunk = (leftover + chunk.toString()).split(/[\r\n]+/); + leftover = chunk.pop(); + isNapi = chunk.reduce(reducer, isNapi); + if (isNapi !== undefined) { + child.kill(); + } + } + }); + child.on('close', (code, signal) => { + if ((code === null && signal !== null) || (code !== 0)) { + console.log( + command + ' exited with code: ' + code + ' and signal: ' + signal); + } else { + // Green if it's a N-API module, red otherwise. + console.log( + '\x1b[' + (isNapi ? '42' : '41') + 'm' + + (isNapi ? ' N-API' : 'Not N-API') + + '\x1b[0m: ' + file); + } + }); +} + +// Use nm -a to list symbols. +function checkFileUNIX (file) { + checkFile(file, 'nm', ['-a', file], (soFar, line) => { + if (soFar === undefined) { + line = line.match(/([0-9a-f]*)? ([a-zA-Z]) (.*$)/); + if (line[2] === 'U') { + if (/^napi/.test(line[3])) { + soFar = true; + } + } + } + return soFar; + }); +} + +// Use dumpbin /imports to list symbols. +function checkFileWin32 (file) { + checkFile(file, 'dumpbin', ['/imports', file], (soFar, line) => { + if (soFar === undefined) { + line = line.match(/([0-9a-f]*)? +([a-zA-Z0-9]) (.*$)/); + if (line && /^napi/.test(line[line.length - 1])) { + soFar = true; + } + } + return soFar; + }); +} + +// Descend into a directory structure and pass each file ending in '.node' to +// one of the above checks, depending on the OS. +function recurse (top) { + fs.readdir(top, (error, items) => { + if (error) { + throw new Error('error reading directory ' + top + ': ' + error); + } + items.forEach((item) => { + item = path.join(top, item); + fs.stat(item, ((item) => (error, stats) => { + if (error) { + throw new Error('error about ' + item + ': ' + error); + } + if (stats.isDirectory()) { + recurse(item); + } else if (/[.]node$/.test(item) && + // Explicitly ignore files called 'nothing.node' because they are + // artefacts of node-addon-api having identified a version of + // Node.js that ships with a correct implementation of N-API. + path.basename(item) !== 'nothing.node') { + process.platform === 'win32' + ? checkFileWin32(item) + : checkFileUNIX(item); + } + })(item)); + }); + }); +} + +// Start with the directory given on the command line or the current directory +// if nothing was given. +recurse(process.argv.length > 3 ? process.argv[2] : '.'); diff --git a/node_modules/node-addon-api/tools/clang-format.js b/node_modules/node-addon-api/tools/clang-format.js new file mode 100644 index 0000000..e4bb4f5 --- /dev/null +++ b/node_modules/node-addon-api/tools/clang-format.js @@ -0,0 +1,71 @@ +#!/usr/bin/env node + +const spawn = require('child_process').spawnSync; +const path = require('path'); + +const filesToCheck = ['*.h', '*.cc']; +const FORMAT_START = process.env.FORMAT_START || 'main'; + +function main (args) { + let fix = false; + while (args.length > 0) { + switch (args[0]) { + case '-f': + case '--fix': + fix = true; + break; + default: + } + args.shift(); + } + + const clangFormatPath = path.dirname(require.resolve('clang-format')); + const binary = process.platform === 'win32' + ? 'node_modules\\.bin\\clang-format.cmd' + : 'node_modules/.bin/clang-format'; + const options = ['--binary=' + binary, '--style=file']; + if (fix) { + options.push(FORMAT_START); + } else { + options.push('--diff', FORMAT_START); + } + + const gitClangFormatPath = path.join(clangFormatPath, 'bin/git-clang-format'); + const result = spawn( + 'python', + [gitClangFormatPath, ...options, '--', ...filesToCheck], + { encoding: 'utf-8' } + ); + + if (result.stderr) { + console.error('Error running git-clang-format:', result.stderr); + return 2; + } + + const clangFormatOutput = result.stdout.trim(); + // Bail fast if in fix mode. + if (fix) { + console.log(clangFormatOutput); + return 0; + } + // Detect if there is any complains from clang-format + if ( + clangFormatOutput !== '' && + clangFormatOutput !== 'no modified files to format' && + clangFormatOutput !== 'clang-format did not modify any files' + ) { + console.error(clangFormatOutput); + const fixCmd = 'npm run lint:fix'; + console.error(` + ERROR: please run "${fixCmd}" to format changes in your commit + Note that when running the command locally, please keep your local + main branch and working branch up to date with nodejs/node-addon-api + to exclude un-related complains. + Or you can run "env FORMAT_START=upstream/main ${fixCmd}".`); + return 1; + } +} + +if (require.main === module) { + process.exitCode = main(process.argv.slice(2)); +} diff --git a/node_modules/node-addon-api/tools/conversion.js b/node_modules/node-addon-api/tools/conversion.js new file mode 100644 index 0000000..f89245a --- /dev/null +++ b/node_modules/node-addon-api/tools/conversion.js @@ -0,0 +1,301 @@ +#! /usr/bin/env node + +'use strict'; + +const fs = require('fs'); +const path = require('path'); + +const args = process.argv.slice(2); +const dir = args[0]; +if (!dir) { + console.log('Usage: node ' + path.basename(__filename) + ' '); + process.exit(1); +} + +const NodeApiVersion = require('../package.json').version; + +const disable = args[1]; +let ConfigFileOperations; +if (disable !== '--disable' && dir !== '--disable') { + ConfigFileOperations = { + 'package.json': [ + [/([ ]*)"dependencies": {/g, '$1"dependencies": {\n$1 "node-addon-api": "' + NodeApiVersion + '",'], + [/[ ]*"nan": *"[^"]+"(,|)[\n\r]/g, ''] + ], + 'binding.gyp': [ + [/([ ]*)'include_dirs': \[/g, '$1\'include_dirs\': [\n$1 \'\s+(\w+)\s*=\s*Nan::New\([\w\d:]+\);(?:\w+->Reset\(\1\))?\s+\1->SetClassName\(Nan::String::New\("(\w+)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$2", {'], + [/Local\s+(\w+)\s*=\s*Nan::New\([\w\d:]+\);\s+(\w+)\.Reset\((\1)\);\s+\1->SetClassName\((Nan::String::New|Nan::New<(v8::)*String>)\("(.+?)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$6", {'], + [/Local\s+(\w+)\s*=\s*Nan::New\([\w\d:]+\);(?:\w+->Reset\(\1\))?\s+\1->SetClassName\(Nan::String::New\("(\w+)"\)\);/g, 'Napi::Function $1 = DefineClass(env, "$2", {'], + [/Nan::New\(([\w\d:]+)\)->GetFunction\(\)/g, 'Napi::Function::New(env, $1)'], + [/Nan::New\(([\w\d:]+)\)->GetFunction()/g, 'Napi::Function::New(env, $1);'], + [/Nan::New\(([\w\d:]+)\)/g, 'Napi::Function::New(env, $1)'], + [/Nan::New\(([\w\d:]+)\)/g, 'Napi::Function::New(env, $1)'], + + // FunctionTemplate to FunctionReference + [/Nan::Persistent<(v8::)*FunctionTemplate>/g, 'Napi::FunctionReference'], + [/Nan::Persistent<(v8::)*Function>/g, 'Napi::FunctionReference'], + [/v8::Local/g, 'Napi::FunctionReference'], + [/Local/g, 'Napi::FunctionReference'], + [/v8::FunctionTemplate/g, 'Napi::FunctionReference'], + [/FunctionTemplate/g, 'Napi::FunctionReference'], + + [/([ ]*)Nan::SetPrototypeMethod\(\w+, "(\w+)", (\w+)\);/g, '$1InstanceMethod("$2", &$3),'], + [/([ ]*)(?:\w+\.Reset\(\w+\);\s+)?\(target\)\.Set\("(\w+)",\s*Nan::GetFunction\((\w+)\)\);/gm, + '});\n\n' + + '$1constructor = Napi::Persistent($3);\n' + + '$1constructor.SuppressDestruct();\n' + + '$1target.Set("$2", $3);'], + + // TODO: Other attribute combinations + [/static_cast\(ReadOnly\s*\|\s*DontDelete\)/gm, + 'static_cast(napi_enumerable | napi_configurable)'], + + [/([\w\d:<>]+?)::Cast\((.+?)\)/g, '$2.As<$1>()'], + + [/\*Nan::Utf8String\(([^)]+)\)/g, '$1->As().Utf8Value().c_str()'], + [/Nan::Utf8String +(\w+)\(([^)]+)\)/g, 'std::string $1 = $2.As()'], + [/Nan::Utf8String/g, 'std::string'], + + [/v8::String::Utf8Value (.+?)\((.+?)\)/g, 'Napi::String $1(env, $2)'], + [/String::Utf8Value (.+?)\((.+?)\)/g, 'Napi::String $1(env, $2)'], + [/\.length\(\)/g, '.Length()'], + + [/Nan::MakeCallback\(([^,]+),[\s\\]+([^,]+),/gm, '$2.MakeCallback($1,'], + + [/class\s+(\w+)\s*:\s*public\s+Nan::ObjectWrap/g, 'class $1 : public Napi::ObjectWrap<$1>'], + [/(\w+)\(([^)]*)\)\s*:\s*Nan::ObjectWrap\(\)\s*(,)?/gm, '$1($2) : Napi::ObjectWrap<$1>()$3'], + + // HandleOKCallback to OnOK + [/HandleOKCallback/g, 'OnOK'], + // HandleErrorCallback to OnError + [/HandleErrorCallback/g, 'OnError'], + + // ex. .As() to .As() + [/\.As\(\)/g, '.As()'], + [/\.As<(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>\(\)/g, '.As()'], + + // ex. Nan::New(info[0]) to Napi::Number::New(info[0]) + [/Nan::New<(v8::)*Integer>\((.+?)\)/g, 'Napi::Number::New(env, $2)'], + [/Nan::New\(([0-9.]+)\)/g, 'Napi::Number::New(env, $1)'], + [/Nan::New<(v8::)*String>\("(.+?)"\)/g, 'Napi::String::New(env, "$2")'], + [/Nan::New\("(.+?)"\)/g, 'Napi::String::New(env, "$1")'], + [/Nan::New<(v8::)*(.+?)>\(\)/g, 'Napi::$2::New(env)'], + [/Nan::New<(.+?)>\(\)/g, 'Napi::$1::New(env)'], + [/Nan::New<(v8::)*(.+?)>\(/g, 'Napi::$2::New(env, '], + [/Nan::New<(.+?)>\(/g, 'Napi::$1::New(env, '], + [/Nan::NewBuffer\(/g, 'Napi::Buffer::New(env, '], + // TODO: Properly handle this + [/Nan::New\(/g, 'Napi::New(env, '], + + [/\.IsInt32\(\)/g, '.IsNumber()'], + [/->IsInt32\(\)/g, '.IsNumber()'], + + [/(.+?)->BooleanValue\(\)/g, '$1.As().Value()'], + [/(.+?)->Int32Value\(\)/g, '$1.As().Int32Value()'], + [/(.+?)->Uint32Value\(\)/g, '$1.As().Uint32Value()'], + [/(.+?)->IntegerValue\(\)/g, '$1.As().Int64Value()'], + [/(.+?)->NumberValue\(\)/g, '$1.As().DoubleValue()'], + + // ex. Nan::To(info[0]) to info[0].Value() + [/Nan::To\((.+?)\)/g, '$2.To()'], + [/Nan::To<(Boolean|String|Number|Object|Array|Symbol|Function)>\((.+?)\)/g, '$2.To()'], + // ex. Nan::To(info[0]) to info[0].As().Value() + [/Nan::To\((.+?)\)/g, '$1.As().Value()'], + // ex. Nan::To(info[0]) to info[0].As().Int32Value() + [/Nan::To\((.+?)\)/g, '$1.As().Int32Value()'], + // ex. Nan::To(info[0]) to info[0].As().Int32Value() + [/Nan::To\((.+?)\)/g, '$1.As().Int32Value()'], + // ex. Nan::To(info[0]) to info[0].As().Uint32Value() + [/Nan::To\((.+?)\)/g, '$1.As().Uint32Value()'], + // ex. Nan::To(info[0]) to info[0].As().Int64Value() + [/Nan::To\((.+?)\)/g, '$1.As().Int64Value()'], + // ex. Nan::To(info[0]) to info[0].As().FloatValue() + [/Nan::To\((.+?)\)/g, '$1.As().FloatValue()'], + // ex. Nan::To(info[0]) to info[0].As().DoubleValue() + [/Nan::To\((.+?)\)/g, '$1.As().DoubleValue()'], + + [/Nan::New\((\w+)\)->HasInstance\((\w+)\)/g, '$2.InstanceOf($1.Value())'], + + [/Nan::Has\(([^,]+),\s*/gm, '($1).Has('], + [/\.Has\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\)/gm, '.Has($1)'], + [/\.Has\([\s|\\]*Nan::New\(([^)]+)\)\)/gm, '.Has($1)'], + + [/Nan::Get\(([^,]+),\s*/gm, '($1).Get('], + [/\.Get\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\)/gm, '.Get($1)'], + [/\.Get\([\s|\\]*Nan::New\(([^)]+)\)\)/gm, '.Get($1)'], + + [/Nan::Set\(([^,]+),\s*/gm, '($1).Set('], + [/\.Set\([\s|\\]*Nan::New<(v8::)*String>\(([^)]+)\)\s*,/gm, '.Set($1,'], + [/\.Set\([\s|\\]*Nan::New\(([^)]+)\)\s*,/gm, '.Set($1,'], + + // ex. node::Buffer::HasInstance(info[0]) to info[0].IsBuffer() + [/node::Buffer::HasInstance\((.+?)\)/g, '$1.IsBuffer()'], + // ex. node::Buffer::Length(info[0]) to info[0].Length() + [/node::Buffer::Length\((.+?)\)/g, '$1.As>().Length()'], + // ex. node::Buffer::Data(info[0]) to info[0].Data() + [/node::Buffer::Data\((.+?)\)/g, '$1.As>().Data()'], + [/Nan::CopyBuffer\(/g, 'Napi::Buffer::Copy(env, '], + + // Nan::AsyncQueueWorker(worker) + [/Nan::AsyncQueueWorker\((.+)\);/g, '$1.Queue();'], + [/Nan::(Undefined|Null|True|False)\(\)/g, 'env.$1()'], + + // Nan::ThrowError(error) to Napi::Error::New(env, error).ThrowAsJavaScriptException() + [/([ ]*)return Nan::Throw(\w*?)Error\((.+?)\);/g, '$1Napi::$2Error::New(env, $3).ThrowAsJavaScriptException();\n$1return env.Null();'], + [/Nan::Throw(\w*?)Error\((.+?)\);\n(\s*)return;/g, 'Napi::$1Error::New(env, $2).ThrowAsJavaScriptException();\n$3return env.Null();'], + [/Nan::Throw(\w*?)Error\((.+?)\);/g, 'Napi::$1Error::New(env, $2).ThrowAsJavaScriptException();\n'], + // Nan::RangeError(error) to Napi::RangeError::New(env, error) + [/Nan::(\w*?)Error\((.+)\)/g, 'Napi::$1Error::New(env, $2)'], + + [/Nan::Set\((.+?),\n* *(.+?),\n* *(.+?),\n* *(.+?)\)/g, '$1.Set($2, $3, $4)'], + + [/Nan::(Escapable)?HandleScope\s+(\w+)\s*;/g, 'Napi::$1HandleScope $2(env);'], + [/Nan::(Escapable)?HandleScope/g, 'Napi::$1HandleScope'], + [/Nan::ForceSet\(([^,]+), ?/g, '$1->DefineProperty('], + [/\.ForceSet\(Napi::String::New\(env, "(\w+)"\),\s*?/g, '.DefineProperty("$1", '], + // [ /Nan::GetPropertyNames\(([^,]+)\)/, '$1->GetPropertyNames()' ], + [/Nan::Equals\(([^,]+),/g, '$1.StrictEquals('], + + [/(.+)->Set\(/g, '$1.Set('], + + [/Nan::Callback/g, 'Napi::FunctionReference'], + + [/Nan::Persistent/g, 'Napi::ObjectReference'], + [/Nan::ADDON_REGISTER_FUNCTION_ARGS_TYPE target/g, 'Napi::Env& env, Napi::Object& target'], + + [/(\w+)\*\s+(\w+)\s*=\s*Nan::ObjectWrap::Unwrap<\w+>\(info\.This\(\)\);/g, '$1* $2 = this;'], + [/Nan::ObjectWrap::Unwrap<(\w+)>\((.*)\);/g, '$2.Unwrap<$1>();'], + + [/Nan::NAN_METHOD_RETURN_TYPE/g, 'void'], + [/NAN_INLINE/g, 'inline'], + + [/Nan::NAN_METHOD_ARGS_TYPE/g, 'const Napi::CallbackInfo&'], + [/NAN_METHOD\(([\w\d:]+?)\)/g, 'Napi::Value $1(const Napi::CallbackInfo& info)'], + [/static\s*NAN_GETTER\(([\w\d:]+?)\)/g, 'Napi::Value $1(const Napi::CallbackInfo& info)'], + [/NAN_GETTER\(([\w\d:]+?)\)/g, 'Napi::Value $1(const Napi::CallbackInfo& info)'], + [/static\s*NAN_SETTER\(([\w\d:]+?)\)/g, 'void $1(const Napi::CallbackInfo& info, const Napi::Value& value)'], + [/NAN_SETTER\(([\w\d:]+?)\)/g, 'void $1(const Napi::CallbackInfo& info, const Napi::Value& value)'], + [/void Init\((v8::)*Local<(v8::)*Object> exports\)/g, 'Napi::Object Init(Napi::Env env, Napi::Object exports)'], + [/NAN_MODULE_INIT\(([\w\d:]+?)\);/g, 'Napi::Object $1(Napi::Env env, Napi::Object exports);'], + [/NAN_MODULE_INIT\(([\w\d:]+?)\)/g, 'Napi::Object $1(Napi::Env env, Napi::Object exports)'], + + [/::(Init(?:ialize)?)\(target\)/g, '::$1(env, target, module)'], + [/constructor_template/g, 'constructor'], + + [/Nan::FunctionCallbackInfo<(v8::)?Value>[ ]*& [ ]*info\)[ ]*{\n*([ ]*)/gm, 'Napi::CallbackInfo& info) {\n$2Napi::Env env = info.Env();\n$2'], + [/Nan::FunctionCallbackInfo<(v8::)*Value>\s*&\s*info\);/g, 'Napi::CallbackInfo& info);'], + [/Nan::FunctionCallbackInfo<(v8::)*Value>\s*&/g, 'Napi::CallbackInfo&'], + + [/Buffer::HasInstance\(([^)]+)\)/g, '$1.IsBuffer()'], + + [/info\[(\d+)\]->/g, 'info[$1].'], + [/info\[([\w\d]+)\]->/g, 'info[$1].'], + [/info\.This\(\)->/g, 'info.This().'], + [/->Is(Object|String|Int32|Number)\(\)/g, '.Is$1()'], + [/info.GetReturnValue\(\).SetUndefined\(\)/g, 'return env.Undefined()'], + [/info\.GetReturnValue\(\)\.Set\(((\n|.)+?)\);/g, 'return $1;'], + + // ex. Local to Napi::Value + [/v8::Local/g, 'Napi::$1'], + [/Local<(Value|Boolean|String|Number|Object|Array|Symbol|External|Function)>/g, 'Napi::$1'], + + // Declare an env in helper functions that take a Napi::Value + [/(\w+)\(Napi::Value (\w+)(,\s*[^()]+)?\)\s*{\n*([ ]*)/gm, '$1(Napi::Value $2$3) {\n$4Napi::Env env = $2.Env();\n$4'], + + // delete #include and/or + [/#include +(<|")(?:node|nan).h("|>)/g, '#include $1napi.h$2\n#include $1uv.h$2'], + // NODE_MODULE to NODE_API_MODULE + [/NODE_MODULE/g, 'NODE_API_MODULE'], + [/Nan::/g, 'Napi::'], + [/nan.h/g, 'napi.h'], + + // delete .FromJust() + [/\.FromJust\(\)/g, ''], + // delete .ToLocalCheck() + [/\.ToLocalChecked\(\)/g, ''], + [/^.*->SetInternalFieldCount\(.*$/gm, ''], + + // replace using node; and/or using v8; to using Napi; + [/using (node|v8);/g, 'using Napi;'], + [/using namespace (node|Nan|v8);/g, 'using namespace Napi;'], + // delete using v8::Local; + [/using v8::Local;\n/g, ''], + // replace using v8::XXX; with using Napi::XXX + [/using v8::([A-Za-z]+);/g, 'using Napi::$1;'] + +]; + +const paths = listFiles(dir); +paths.forEach(function (dirEntry) { + const filename = dirEntry.split('\\').pop().split('/').pop(); + + // Check whether the file is a source file or a config file + // then execute function accordingly + const sourcePattern = /.+\.h|.+\.cc|.+\.cpp/; + if (sourcePattern.test(filename)) { + convertFile(dirEntry, SourceFileOperations); + } else if (ConfigFileOperations[filename] != null) { + convertFile(dirEntry, ConfigFileOperations[filename]); + } +}); + +function listFiles (dir, filelist) { + const files = fs.readdirSync(dir); + filelist = filelist || []; + files.forEach(function (file) { + if (file === 'node_modules') { + return; + } + + if (fs.statSync(path.join(dir, file)).isDirectory()) { + filelist = listFiles(path.join(dir, file), filelist); + } else { + filelist.push(path.join(dir, file)); + } + }); + return filelist; +} + +function convert (content, operations) { + for (let i = 0; i < operations.length; i++) { + const operation = operations[i]; + content = content.replace(operation[0], operation[1]); + } + return content; +} + +function convertFile (fileName, operations) { + fs.readFile(fileName, 'utf-8', function (err, file) { + if (err) throw err; + + file = convert(file, operations); + + fs.writeFile(fileName, file, function (err) { + if (err) throw err; + }); + }); +} diff --git a/node_modules/node-addon-api/tools/eslint-format.js b/node_modules/node-addon-api/tools/eslint-format.js new file mode 100644 index 0000000..1dda444 --- /dev/null +++ b/node_modules/node-addon-api/tools/eslint-format.js @@ -0,0 +1,79 @@ +#!/usr/bin/env node + +const spawn = require('child_process').spawnSync; + +const filesToCheck = '*.js'; +const FORMAT_START = process.env.FORMAT_START || 'main'; +const IS_WIN = process.platform === 'win32'; +const ESLINT_PATH = IS_WIN ? 'node_modules\\.bin\\eslint.cmd' : 'node_modules/.bin/eslint'; + +function main (args) { + let fix = false; + while (args.length > 0) { + switch (args[0]) { + case '-f': + case '--fix': + fix = true; + break; + default: + } + args.shift(); + } + + // Check js files that change on unstaged file + const fileUnStaged = spawn( + 'git', + ['diff', '--name-only', FORMAT_START, filesToCheck], + { + encoding: 'utf-8' + } + ); + + // Check js files that change on staged file + const fileStaged = spawn( + 'git', + ['diff', '--name-only', '--cached', FORMAT_START, filesToCheck], + { + encoding: 'utf-8' + } + ); + + const options = [ + ...fileStaged.stdout.split('\n').filter((f) => f !== ''), + ...fileUnStaged.stdout.split('\n').filter((f) => f !== '') + ]; + + if (fix) { + options.push('--fix'); + } + + const result = spawn(ESLINT_PATH, [...options], { + encoding: 'utf-8' + }); + + if (result.error && result.error.errno === 'ENOENT') { + console.error('Eslint not found! Eslint is supposed to be found at ', ESLINT_PATH); + return 2; + } + + if (result.status === 1) { + console.error('Eslint error:', result.stdout); + const fixCmd = 'npm run lint:fix'; + console.error(`ERROR: please run "${fixCmd}" to format changes in your commit + Note that when running the command locally, please keep your local + main branch and working branch up to date with nodejs/node-addon-api + to exclude un-related complains. + Or you can run "env FORMAT_START=upstream/main ${fixCmd}". + Also fix JS files by yourself if necessary.`); + return 1; + } + + if (result.stderr) { + console.error('Error running eslint:', result.stderr); + return 2; + } +} + +if (require.main === module) { + process.exitCode = main(process.argv.slice(2)); +} diff --git a/node_modules/node-fetch/LICENSE.md b/node_modules/node-fetch/LICENSE.md new file mode 100644 index 0000000..660ffec --- /dev/null +++ b/node_modules/node-fetch/LICENSE.md @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + diff --git a/node_modules/node-fetch/README.md b/node_modules/node-fetch/README.md new file mode 100644 index 0000000..55f09b7 --- /dev/null +++ b/node_modules/node-fetch/README.md @@ -0,0 +1,634 @@ +node-fetch +========== + +[![npm version][npm-image]][npm-url] +[![build status][travis-image]][travis-url] +[![coverage status][codecov-image]][codecov-url] +[![install size][install-size-image]][install-size-url] +[![Discord][discord-image]][discord-url] + +A light-weight module that brings `window.fetch` to Node.js + +(We are looking for [v2 maintainers and collaborators](https://github.com/bitinn/node-fetch/issues/567)) + +[![Backers][opencollective-image]][opencollective-url] + + + +- [Motivation](#motivation) +- [Features](#features) +- [Difference from client-side fetch](#difference-from-client-side-fetch) +- [Installation](#installation) +- [Loading and configuring the module](#loading-and-configuring-the-module) +- [Common Usage](#common-usage) + - [Plain text or HTML](#plain-text-or-html) + - [JSON](#json) + - [Simple Post](#simple-post) + - [Post with JSON](#post-with-json) + - [Post with form parameters](#post-with-form-parameters) + - [Handling exceptions](#handling-exceptions) + - [Handling client and server errors](#handling-client-and-server-errors) +- [Advanced Usage](#advanced-usage) + - [Streams](#streams) + - [Buffer](#buffer) + - [Accessing Headers and other Meta data](#accessing-headers-and-other-meta-data) + - [Extract Set-Cookie Header](#extract-set-cookie-header) + - [Post data using a file stream](#post-data-using-a-file-stream) + - [Post with form-data (detect multipart)](#post-with-form-data-detect-multipart) + - [Request cancellation with AbortSignal](#request-cancellation-with-abortsignal) +- [API](#api) + - [fetch(url[, options])](#fetchurl-options) + - [Options](#options) + - [Class: Request](#class-request) + - [Class: Response](#class-response) + - [Class: Headers](#class-headers) + - [Interface: Body](#interface-body) + - [Class: FetchError](#class-fetcherror) +- [License](#license) +- [Acknowledgement](#acknowledgement) + + + +## Motivation + +Instead of implementing `XMLHttpRequest` in Node.js to run browser-specific [Fetch polyfill](https://github.com/github/fetch), why not go from native `http` to `fetch` API directly? Hence, `node-fetch`, minimal code for a `window.fetch` compatible API on Node.js runtime. + +See Matt Andrews' [isomorphic-fetch](https://github.com/matthew-andrews/isomorphic-fetch) or Leonardo Quixada's [cross-fetch](https://github.com/lquixada/cross-fetch) for isomorphic usage (exports `node-fetch` for server-side, `whatwg-fetch` for client-side). + +## Features + +- Stay consistent with `window.fetch` API. +- Make conscious trade-off when following [WHATWG fetch spec][whatwg-fetch] and [stream spec](https://streams.spec.whatwg.org/) implementation details, document known differences. +- Use native promise but allow substituting it with [insert your favorite promise library]. +- Use native Node streams for body on both request and response. +- Decode content encoding (gzip/deflate) properly and convert string output (such as `res.text()` and `res.json()`) to UTF-8 automatically. +- Useful extensions such as timeout, redirect limit, response size limit, [explicit errors](ERROR-HANDLING.md) for troubleshooting. + +## Difference from client-side fetch + +- See [Known Differences](LIMITS.md) for details. +- If you happen to use a missing feature that `window.fetch` offers, feel free to open an issue. +- Pull requests are welcomed too! + +## Installation + +Current stable release (`2.x`) + +```sh +$ npm install node-fetch +``` + +## Loading and configuring the module +We suggest you load the module via `require` until the stabilization of ES modules in node: +```js +const fetch = require('node-fetch'); +``` + +If you are using a Promise library other than native, set it through `fetch.Promise`: +```js +const Bluebird = require('bluebird'); + +fetch.Promise = Bluebird; +``` + +## Common Usage + +NOTE: The documentation below is up-to-date with `2.x` releases; see the [`1.x` readme](https://github.com/bitinn/node-fetch/blob/1.x/README.md), [changelog](https://github.com/bitinn/node-fetch/blob/1.x/CHANGELOG.md) and [2.x upgrade guide](UPGRADE-GUIDE.md) for the differences. + +#### Plain text or HTML +```js +fetch('https://github.com/') + .then(res => res.text()) + .then(body => console.log(body)); +``` + +#### JSON + +```js + +fetch('https://api.github.com/users/github') + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Simple Post +```js +fetch('https://httpbin.org/post', { method: 'POST', body: 'a=1' }) + .then(res => res.json()) // expecting a json response + .then(json => console.log(json)); +``` + +#### Post with JSON + +```js +const body = { a: 1 }; + +fetch('https://httpbin.org/post', { + method: 'post', + body: JSON.stringify(body), + headers: { 'Content-Type': 'application/json' }, + }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Post with form parameters +`URLSearchParams` is available in Node.js as of v7.5.0. See [official documentation](https://nodejs.org/api/url.html#url_class_urlsearchparams) for more usage methods. + +NOTE: The `Content-Type` header is only set automatically to `x-www-form-urlencoded` when an instance of `URLSearchParams` is given as such: + +```js +const { URLSearchParams } = require('url'); + +const params = new URLSearchParams(); +params.append('a', 1); + +fetch('https://httpbin.org/post', { method: 'POST', body: params }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Handling exceptions +NOTE: 3xx-5xx responses are *NOT* exceptions and should be handled in `then()`; see the next section for more information. + +Adding a catch to the fetch promise chain will catch *all* exceptions, such as errors originating from node core libraries, network errors and operational errors, which are instances of FetchError. See the [error handling document](ERROR-HANDLING.md) for more details. + +```js +fetch('https://domain.invalid/') + .catch(err => console.error(err)); +``` + +#### Handling client and server errors +It is common to create a helper function to check that the response contains no client (4xx) or server (5xx) error responses: + +```js +function checkStatus(res) { + if (res.ok) { // res.status >= 200 && res.status < 300 + return res; + } else { + throw MyCustomError(res.statusText); + } +} + +fetch('https://httpbin.org/status/400') + .then(checkStatus) + .then(res => console.log('will not get here...')) +``` + +## Advanced Usage + +#### Streams +The "Node.js way" is to use streams when possible: + +```js +fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') + .then(res => { + const dest = fs.createWriteStream('./octocat.png'); + res.body.pipe(dest); + }); +``` + +In Node.js 14 you can also use async iterators to read `body`; however, be careful to catch +errors -- the longer a response runs, the more likely it is to encounter an error. + +```js +const fetch = require('node-fetch'); +const response = await fetch('https://httpbin.org/stream/3'); +try { + for await (const chunk of response.body) { + console.dir(JSON.parse(chunk.toString())); + } +} catch (err) { + console.error(err.stack); +} +``` + +In Node.js 12 you can also use async iterators to read `body`; however, async iterators with streams +did not mature until Node.js 14, so you need to do some extra work to ensure you handle errors +directly from the stream and wait on it response to fully close. + +```js +const fetch = require('node-fetch'); +const read = async body => { + let error; + body.on('error', err => { + error = err; + }); + for await (const chunk of body) { + console.dir(JSON.parse(chunk.toString())); + } + return new Promise((resolve, reject) => { + body.on('close', () => { + error ? reject(error) : resolve(); + }); + }); +}; +try { + const response = await fetch('https://httpbin.org/stream/3'); + await read(response.body); +} catch (err) { + console.error(err.stack); +} +``` + +#### Buffer +If you prefer to cache binary data in full, use buffer(). (NOTE: `buffer()` is a `node-fetch`-only API) + +```js +const fileType = require('file-type'); + +fetch('https://assets-cdn.github.com/images/modules/logos_page/Octocat.png') + .then(res => res.buffer()) + .then(buffer => fileType(buffer)) + .then(type => { /* ... */ }); +``` + +#### Accessing Headers and other Meta data +```js +fetch('https://github.com/') + .then(res => { + console.log(res.ok); + console.log(res.status); + console.log(res.statusText); + console.log(res.headers.raw()); + console.log(res.headers.get('content-type')); + }); +``` + +#### Extract Set-Cookie Header + +Unlike browsers, you can access raw `Set-Cookie` headers manually using `Headers.raw()`. This is a `node-fetch` only API. + +```js +fetch(url).then(res => { + // returns an array of values, instead of a string of comma-separated values + console.log(res.headers.raw()['set-cookie']); +}); +``` + +#### Post data using a file stream + +```js +const { createReadStream } = require('fs'); + +const stream = createReadStream('input.txt'); + +fetch('https://httpbin.org/post', { method: 'POST', body: stream }) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Post with form-data (detect multipart) + +```js +const FormData = require('form-data'); + +const form = new FormData(); +form.append('a', 1); + +fetch('https://httpbin.org/post', { method: 'POST', body: form }) + .then(res => res.json()) + .then(json => console.log(json)); + +// OR, using custom headers +// NOTE: getHeaders() is non-standard API + +const form = new FormData(); +form.append('a', 1); + +const options = { + method: 'POST', + body: form, + headers: form.getHeaders() +} + +fetch('https://httpbin.org/post', options) + .then(res => res.json()) + .then(json => console.log(json)); +``` + +#### Request cancellation with AbortSignal + +> NOTE: You may cancel streamed requests only on Node >= v8.0.0 + +You may cancel requests with `AbortController`. A suggested implementation is [`abort-controller`](https://www.npmjs.com/package/abort-controller). + +An example of timing out a request after 150ms could be achieved as the following: + +```js +import AbortController from 'abort-controller'; + +const controller = new AbortController(); +const timeout = setTimeout( + () => { controller.abort(); }, + 150, +); + +fetch(url, { signal: controller.signal }) + .then(res => res.json()) + .then( + data => { + useData(data) + }, + err => { + if (err.name === 'AbortError') { + // request was aborted + } + }, + ) + .finally(() => { + clearTimeout(timeout); + }); +``` + +See [test cases](https://github.com/bitinn/node-fetch/blob/master/test/test.js) for more examples. + + +## API + +### fetch(url[, options]) + +- `url` A string representing the URL for fetching +- `options` [Options](#fetch-options) for the HTTP(S) request +- Returns: Promise<[Response](#class-response)> + +Perform an HTTP(S) fetch. + +`url` should be an absolute url, such as `https://example.com/`. A path-relative URL (`/file/under/root`) or protocol-relative URL (`//can-be-http-or-https.com/`) will result in a rejected `Promise`. + + +### Options + +The default values are shown after each option key. + +```js +{ + // These properties are part of the Fetch Standard + method: 'GET', + headers: {}, // request headers. format is the identical to that accepted by the Headers constructor (see below) + body: null, // request body. can be null, a string, a Buffer, a Blob, or a Node.js Readable stream + redirect: 'follow', // set to `manual` to extract redirect headers, `error` to reject redirect + signal: null, // pass an instance of AbortSignal to optionally abort requests + + // The following properties are node-fetch extensions + follow: 20, // maximum redirect count. 0 to not follow redirect + timeout: 0, // req/res timeout in ms, it resets on redirect. 0 to disable (OS limit applies). Signal is recommended instead. + compress: true, // support gzip/deflate content encoding. false to disable + size: 0, // maximum response body size in bytes. 0 to disable + agent: null // http(s).Agent instance or function that returns an instance (see below) +} +``` + +##### Default Headers + +If no values are set, the following request headers will be sent automatically: + +Header | Value +------------------- | -------------------------------------------------------- +`Accept-Encoding` | `gzip,deflate` _(when `options.compress === true`)_ +`Accept` | `*/*` +`Content-Length` | _(automatically calculated, if possible)_ +`Transfer-Encoding` | `chunked` _(when `req.body` is a stream)_ +`User-Agent` | `node-fetch/1.0 (+https://github.com/bitinn/node-fetch)` + +Note: when `body` is a `Stream`, `Content-Length` is not set automatically. + +##### Custom Agent + +The `agent` option allows you to specify networking related options which are out of the scope of Fetch, including and not limited to the following: + +- Support self-signed certificate +- Use only IPv4 or IPv6 +- Custom DNS Lookup + +See [`http.Agent`](https://nodejs.org/api/http.html#http_new_agent_options) for more information. + +If no agent is specified, the default agent provided by Node.js is used. Note that [this changed in Node.js 19](https://github.com/nodejs/node/blob/4267b92604ad78584244488e7f7508a690cb80d0/lib/_http_agent.js#L564) to have `keepalive` true by default. If you wish to enable `keepalive` in an earlier version of Node.js, you can override the agent as per the following code sample. + +In addition, the `agent` option accepts a function that returns `http`(s)`.Agent` instance given current [URL](https://nodejs.org/api/url.html), this is useful during a redirection chain across HTTP and HTTPS protocol. + +```js +const httpAgent = new http.Agent({ + keepAlive: true +}); +const httpsAgent = new https.Agent({ + keepAlive: true +}); + +const options = { + agent: function (_parsedURL) { + if (_parsedURL.protocol == 'http:') { + return httpAgent; + } else { + return httpsAgent; + } + } +} +``` + + +### Class: Request + +An HTTP(S) request containing information about URL, method, headers, and the body. This class implements the [Body](#iface-body) interface. + +Due to the nature of Node.js, the following properties are not implemented at this moment: + +- `type` +- `destination` +- `referrer` +- `referrerPolicy` +- `mode` +- `credentials` +- `cache` +- `integrity` +- `keepalive` + +The following node-fetch extension properties are provided: + +- `follow` +- `compress` +- `counter` +- `agent` + +See [options](#fetch-options) for exact meaning of these extensions. + +#### new Request(input[, options]) + +*(spec-compliant)* + +- `input` A string representing a URL, or another `Request` (which will be cloned) +- `options` [Options][#fetch-options] for the HTTP(S) request + +Constructs a new `Request` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Request/Request). + +In most cases, directly `fetch(url, options)` is simpler than creating a `Request` object. + + +### Class: Response + +An HTTP(S) response. This class implements the [Body](#iface-body) interface. + +The following properties are not implemented in node-fetch at this moment: + +- `Response.error()` +- `Response.redirect()` +- `type` +- `trailer` + +#### new Response([body[, options]]) + +*(spec-compliant)* + +- `body` A `String` or [`Readable` stream][node-readable] +- `options` A [`ResponseInit`][response-init] options dictionary + +Constructs a new `Response` object. The constructor is identical to that in the [browser](https://developer.mozilla.org/en-US/docs/Web/API/Response/Response). + +Because Node.js does not implement service workers (for which this class was designed), one rarely has to construct a `Response` directly. + +#### response.ok + +*(spec-compliant)* + +Convenience property representing if the request ended normally. Will evaluate to true if the response status was greater than or equal to 200 but smaller than 300. + +#### response.redirected + +*(spec-compliant)* + +Convenience property representing if the request has been redirected at least once. Will evaluate to true if the internal redirect counter is greater than 0. + + +### Class: Headers + +This class allows manipulating and iterating over a set of HTTP headers. All methods specified in the [Fetch Standard][whatwg-fetch] are implemented. + +#### new Headers([init]) + +*(spec-compliant)* + +- `init` Optional argument to pre-fill the `Headers` object + +Construct a new `Headers` object. `init` can be either `null`, a `Headers` object, an key-value map object or any iterable object. + +```js +// Example adapted from https://fetch.spec.whatwg.org/#example-headers-class + +const meta = { + 'Content-Type': 'text/xml', + 'Breaking-Bad': '<3' +}; +const headers = new Headers(meta); + +// The above is equivalent to +const meta = [ + [ 'Content-Type', 'text/xml' ], + [ 'Breaking-Bad', '<3' ] +]; +const headers = new Headers(meta); + +// You can in fact use any iterable objects, like a Map or even another Headers +const meta = new Map(); +meta.set('Content-Type', 'text/xml'); +meta.set('Breaking-Bad', '<3'); +const headers = new Headers(meta); +const copyOfHeaders = new Headers(headers); +``` + + +### Interface: Body + +`Body` is an abstract interface with methods that are applicable to both `Request` and `Response` classes. + +The following methods are not yet implemented in node-fetch at this moment: + +- `formData()` + +#### body.body + +*(deviation from spec)* + +* Node.js [`Readable` stream][node-readable] + +Data are encapsulated in the `Body` object. Note that while the [Fetch Standard][whatwg-fetch] requires the property to always be a WHATWG `ReadableStream`, in node-fetch it is a Node.js [`Readable` stream][node-readable]. + +#### body.bodyUsed + +*(spec-compliant)* + +* `Boolean` + +A boolean property for if this body has been consumed. Per the specs, a consumed body cannot be used again. + +#### body.arrayBuffer() +#### body.blob() +#### body.json() +#### body.text() + +*(spec-compliant)* + +* Returns: Promise + +Consume the body and return a promise that will resolve to one of these formats. + +#### body.buffer() + +*(node-fetch extension)* + +* Returns: Promise<Buffer> + +Consume the body and return a promise that will resolve to a Buffer. + +#### body.textConverted() + +*(node-fetch extension)* + +* Returns: Promise<String> + +Identical to `body.text()`, except instead of always converting to UTF-8, encoding sniffing will be performed and text converted to UTF-8 if possible. + +(This API requires an optional dependency of the npm package [encoding](https://www.npmjs.com/package/encoding), which you need to install manually. `webpack` users may see [a warning message](https://github.com/bitinn/node-fetch/issues/412#issuecomment-379007792) due to this optional dependency.) + + +### Class: FetchError + +*(node-fetch extension)* + +An operational error in the fetching process. See [ERROR-HANDLING.md][] for more info. + + +### Class: AbortError + +*(node-fetch extension)* + +An Error thrown when the request is aborted in response to an `AbortSignal`'s `abort` event. It has a `name` property of `AbortError`. See [ERROR-HANDLING.MD][] for more info. + +## Acknowledgement + +Thanks to [github/fetch](https://github.com/github/fetch) for providing a solid implementation reference. + +`node-fetch` v1 was maintained by [@bitinn](https://github.com/bitinn); v2 was maintained by [@TimothyGu](https://github.com/timothygu), [@bitinn](https://github.com/bitinn) and [@jimmywarting](https://github.com/jimmywarting); v2 readme is written by [@jkantr](https://github.com/jkantr). + +## License + +MIT + +[npm-image]: https://flat.badgen.net/npm/v/node-fetch +[npm-url]: https://www.npmjs.com/package/node-fetch +[travis-image]: https://flat.badgen.net/travis/bitinn/node-fetch +[travis-url]: https://travis-ci.org/bitinn/node-fetch +[codecov-image]: https://flat.badgen.net/codecov/c/github/bitinn/node-fetch/master +[codecov-url]: https://codecov.io/gh/bitinn/node-fetch +[install-size-image]: https://flat.badgen.net/packagephobia/install/node-fetch +[install-size-url]: https://packagephobia.now.sh/result?p=node-fetch +[discord-image]: https://img.shields.io/discord/619915844268326952?color=%237289DA&label=Discord&style=flat-square +[discord-url]: https://discord.gg/Zxbndcm +[opencollective-image]: https://opencollective.com/node-fetch/backers.svg +[opencollective-url]: https://opencollective.com/node-fetch +[whatwg-fetch]: https://fetch.spec.whatwg.org/ +[response-init]: https://fetch.spec.whatwg.org/#responseinit +[node-readable]: https://nodejs.org/api/stream.html#stream_readable_streams +[mdn-headers]: https://developer.mozilla.org/en-US/docs/Web/API/Headers +[LIMITS.md]: https://github.com/bitinn/node-fetch/blob/master/LIMITS.md +[ERROR-HANDLING.md]: https://github.com/bitinn/node-fetch/blob/master/ERROR-HANDLING.md +[UPGRADE-GUIDE.md]: https://github.com/bitinn/node-fetch/blob/master/UPGRADE-GUIDE.md diff --git a/node_modules/node-fetch/browser.js b/node_modules/node-fetch/browser.js new file mode 100644 index 0000000..ee86265 --- /dev/null +++ b/node_modules/node-fetch/browser.js @@ -0,0 +1,25 @@ +"use strict"; + +// ref: https://github.com/tc39/proposal-global +var getGlobal = function () { + // the only reliable means to get the global object is + // `Function('return this')()` + // However, this causes CSP violations in Chrome apps. + if (typeof self !== 'undefined') { return self; } + if (typeof window !== 'undefined') { return window; } + if (typeof global !== 'undefined') { return global; } + throw new Error('unable to locate global object'); +} + +var globalObject = getGlobal(); + +module.exports = exports = globalObject.fetch; + +// Needed for TypeScript and Webpack. +if (globalObject.fetch) { + exports.default = globalObject.fetch.bind(globalObject); +} + +exports.Headers = globalObject.Headers; +exports.Request = globalObject.Request; +exports.Response = globalObject.Response; diff --git a/node_modules/node-fetch/lib/index.es.js b/node_modules/node-fetch/lib/index.es.js new file mode 100644 index 0000000..aae9799 --- /dev/null +++ b/node_modules/node-fetch/lib/index.es.js @@ -0,0 +1,1777 @@ +process.emitWarning("The .es.js file is deprecated. Use .mjs instead."); + +import Stream from 'stream'; +import http from 'http'; +import Url from 'url'; +import whatwgUrl from 'whatwg-url'; +import https from 'https'; +import zlib from 'zlib'; + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +export default fetch; +export { Headers, Request, Response, FetchError, AbortError }; diff --git a/node_modules/node-fetch/lib/index.js b/node_modules/node-fetch/lib/index.js new file mode 100644 index 0000000..567ff5d --- /dev/null +++ b/node_modules/node-fetch/lib/index.js @@ -0,0 +1,1787 @@ +'use strict'; + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(require('stream')); +var http = _interopDefault(require('http')); +var Url = _interopDefault(require('url')); +var whatwgUrl = _interopDefault(require('whatwg-url')); +var https = _interopDefault(require('https')); +var zlib = _interopDefault(require('zlib')); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; +exports.AbortError = AbortError; diff --git a/node_modules/node-fetch/lib/index.mjs b/node_modules/node-fetch/lib/index.mjs new file mode 100644 index 0000000..2863dd9 --- /dev/null +++ b/node_modules/node-fetch/lib/index.mjs @@ -0,0 +1,1775 @@ +import Stream from 'stream'; +import http from 'http'; +import Url from 'url'; +import whatwgUrl from 'whatwg-url'; +import https from 'https'; +import zlib from 'zlib'; + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = require('encoding').convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); +const URL = Url.URL || whatwgUrl.URL; + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +/** + * Wrapper around `new URL` to handle arbitrary URLs + * + * @param {string} urlStr + * @return {void} + */ +function parseURL(urlStr) { + /* + Check whether the URL is absolute or not + Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 + Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 + */ + if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { + urlStr = new URL(urlStr).toString(); + } + + // Fallback to old implementation for arbitrary URLs + return parse_url(urlStr); +} + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parseURL(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parseURL(`${input}`); + } + input = {}; + } else { + parsedURL = parseURL(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +const URL$1 = Url.URL || whatwgUrl.URL; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; + +const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { + const orig = new URL$1(original).hostname; + const dest = new URL$1(destination).hostname; + + return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); +}; + +/** + * isSameProtocol reports whether the two provided URLs use the same protocol. + * + * Both domains must already be in canonical form. + * @param {string|URL} original + * @param {string|URL} destination + */ +const isSameProtocol = function isSameProtocol(destination, original) { + const orig = new URL$1(original).protocol; + const dest = new URL$1(destination).protocol; + + return orig === dest; +}; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + destroyStream(request.body, error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + + if (response && response.body) { + destroyStream(response.body, err); + } + + finalize(); + }); + + fixResponseChunkedTransferBadEnding(req, function (err) { + if (signal && signal.aborted) { + return; + } + + if (response && response.body) { + destroyStream(response.body, err); + } + }); + + /* c8 ignore next 18 */ + if (parseInt(process.version.substring(1)) < 14) { + // Before Node.js 14, pipeline() does not fully support async iterators and does not always + // properly handle when the socket close/end events are out of order. + req.on('socket', function (s) { + s.addListener('close', function (hadError) { + // if a data listener is still present we didn't end cleanly + const hasDataListener = s.listenerCount('data') > 0; + + // if end happened before close but the socket didn't emit an error, do it now + if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + response.body.emit('error', err); + } + }); + }); + } + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + let locationURL = null; + try { + locationURL = location === null ? null : new URL$1(location, request.url).toString(); + } catch (err) { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); + finalize(); + return; + } + } + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { + for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { + requestOpts.headers.delete(name); + } + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + raw.on('end', function () { + // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. + if (!response) { + response = new Response(body, response_options); + resolve(response); + } + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +function fixResponseChunkedTransferBadEnding(request, errorCallback) { + let socket; + + request.on('socket', function (s) { + socket = s; + }); + + request.on('response', function (response) { + const headers = response.headers; + + if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { + response.once('close', function (hadError) { + // tests for socket presence, as in some situations the + // the 'socket' event is not triggered for the request + // (happens in deno), avoids `TypeError` + // if a data listener is still present we didn't end cleanly + const hasDataListener = socket && socket.listenerCount('data') > 0; + + if (hasDataListener && !hadError) { + const err = new Error('Premature close'); + err.code = 'ERR_STREAM_PREMATURE_CLOSE'; + errorCallback(err); + } + }); + } + }); +} + +function destroyStream(stream, err) { + if (stream.destroy) { + stream.destroy(err); + } else { + // node < 8 + stream.emit('error', err); + stream.end(); + } +} + +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +export default fetch; +export { Headers, Request, Response, FetchError, AbortError }; diff --git a/node_modules/node-fetch/package.json b/node_modules/node-fetch/package.json new file mode 100644 index 0000000..e0be176 --- /dev/null +++ b/node_modules/node-fetch/package.json @@ -0,0 +1,89 @@ +{ + "name": "node-fetch", + "version": "2.7.0", + "description": "A light-weight module that brings window.fetch to node.js", + "main": "lib/index.js", + "browser": "./browser.js", + "module": "lib/index.mjs", + "files": [ + "lib/index.js", + "lib/index.mjs", + "lib/index.es.js", + "browser.js" + ], + "engines": { + "node": "4.x || >=6.0.0" + }, + "scripts": { + "build": "cross-env BABEL_ENV=rollup rollup -c", + "prepare": "npm run build", + "test": "cross-env BABEL_ENV=test mocha --require babel-register --throw-deprecation test/test.js", + "report": "cross-env BABEL_ENV=coverage nyc --reporter lcov --reporter text mocha -R spec test/test.js", + "coverage": "cross-env BABEL_ENV=coverage nyc --reporter json --reporter text mocha -R spec test/test.js && codecov -f coverage/coverage-final.json" + }, + "repository": { + "type": "git", + "url": "https://github.com/bitinn/node-fetch.git" + }, + "keywords": [ + "fetch", + "http", + "promise" + ], + "author": "David Frank", + "license": "MIT", + "bugs": { + "url": "https://github.com/bitinn/node-fetch/issues" + }, + "homepage": "https://github.com/bitinn/node-fetch", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + }, + "devDependencies": { + "@ungap/url-search-params": "^0.1.2", + "abort-controller": "^1.1.0", + "abortcontroller-polyfill": "^1.3.0", + "babel-core": "^6.26.3", + "babel-plugin-istanbul": "^4.1.6", + "babel-plugin-transform-async-generator-functions": "^6.24.1", + "babel-polyfill": "^6.26.0", + "babel-preset-env": "1.4.0", + "babel-register": "^6.16.3", + "chai": "^3.5.0", + "chai-as-promised": "^7.1.1", + "chai-iterator": "^1.1.1", + "chai-string": "~1.3.0", + "codecov": "3.3.0", + "cross-env": "^5.2.0", + "form-data": "^2.3.3", + "is-builtin-module": "^1.0.0", + "mocha": "^5.0.0", + "nyc": "11.9.0", + "parted": "^0.1.1", + "promise": "^8.0.3", + "resumer": "0.0.0", + "rollup": "^0.63.4", + "rollup-plugin-babel": "^3.0.7", + "string-to-arraybuffer": "^1.0.2", + "teeny-request": "3.7.0" + }, + "release": { + "branches": [ + "+([0-9]).x", + "main", + "next", + { + "name": "beta", + "prerelease": true + } + ] + } +} diff --git a/node_modules/nopt/CHANGELOG.md b/node_modules/nopt/CHANGELOG.md new file mode 100644 index 0000000..82a09fb --- /dev/null +++ b/node_modules/nopt/CHANGELOG.md @@ -0,0 +1,58 @@ +### v4.0.1 (2016-12-14) + +#### WHOOPS + +* [`fb9b1ce`](https://github.com/npm/nopt/commit/fb9b1ce57b3c69b4f7819015be87719204f77ef6) + Merged so many patches at once that the code fencing + ([@adius](https://github.com/adius)) added got broken. Sorry, + ([@adius](https://github.com/adius))! + ([@othiym23](https://github.com/othiym23)) + +### v4.0.0 (2016-12-13) + +#### BREAKING CHANGES + +* [`651d447`](https://github.com/npm/nopt/commit/651d4473946096d341a480bbe56793de3fc706aa) + When parsing String-typed arguments, if the next value is `""`, don't simply + swallow it. ([@samjonester](https://github.com/samjonester)) + +#### PERFORMANCE TWEAKS + +* [`3370ce8`](https://github.com/npm/nopt/commit/3370ce87a7618ba228883861db84ddbcdff252a9) + Simplify initialization. ([@elidoran](https://github.com/elidoran)) +* [`356e58e`](https://github.com/npm/nopt/commit/356e58e3b3b431a4b1af7fd7bdee44c2c0526a09) + Store `Array.isArray(types[arg])` for reuse. + ([@elidoran](https://github.com/elidoran)) +* [`0d95e90`](https://github.com/npm/nopt/commit/0d95e90515844f266015b56d2c80b94e5d14a07e) + Interpret single-item type arrays as a single type. + ([@samjonester](https://github.com/samjonester)) +* [`07c69d3`](https://github.com/npm/nopt/commit/07c69d38b5186450941fbb505550becb78a0e925) + Simplify key-value extraction. ([@elidoran](https://github.com/elidoran)) +* [`39b6e5c`](https://github.com/npm/nopt/commit/39b6e5c65ac47f60cd43a1fbeece5cd4c834c254) + Only call `Date.parse(val)` once. ([@elidoran](https://github.com/elidoran)) +* [`934943d`](https://github.com/npm/nopt/commit/934943dffecb55123a2b15959fe2a359319a5dbd) + Use `osenv.home()` to find a user's home directory instead of assuming it's + always `$HOME`. ([@othiym23](https://github.com/othiym23)) + +#### TEST & CI IMPROVEMENTS + +* [`326ffff`](https://github.com/npm/nopt/commit/326ffff7f78a00bcd316adecf69075f8a8093619) + Fix `/tmp` test to work on Windows. + ([@elidoran](https://github.com/elidoran)) +* [`c89d31a`](https://github.com/npm/nopt/commit/c89d31a49d14f2238bc6672db08da697bbc57f1b) + Only run Windows tests on Windows, only run Unix tests on a Unix. + ([@elidoran](https://github.com/elidoran)) +* [`affd3d1`](https://github.com/npm/nopt/commit/affd3d1d0addffa93006397b2013b18447339366) + Refresh Travis to run the tests against the currently-supported batch of npm + versions. ([@helio](https://github.com/helio)-frota) +* [`55f9449`](https://github.com/npm/nopt/commit/55f94497d163ed4d16dd55fd6c4fb95cc440e66d) + `tap@8.0.1` ([@othiym23](https://github.com/othiym23)) + +#### DOC TWEAKS + +* [`5271229`](https://github.com/npm/nopt/commit/5271229ee7c810217dd51616c086f5d9ab224581) + Use JavaScript code block for syntax highlighting. + ([@adius](https://github.com/adius)) +* [`c0d156f`](https://github.com/npm/nopt/commit/c0d156f229f9994c5dfcec4a8886eceff7a07682) + The code sample in the README had `many2: [ oneThing ]`, and now it has + `many2: [ two, things ]`. ([@silkentrance](https://github.com/silkentrance)) diff --git a/node_modules/nopt/LICENSE b/node_modules/nopt/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/nopt/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/nopt/README.md b/node_modules/nopt/README.md new file mode 100644 index 0000000..a99531c --- /dev/null +++ b/node_modules/nopt/README.md @@ -0,0 +1,213 @@ +If you want to write an option parser, and have it be good, there are +two ways to do it. The Right Way, and the Wrong Way. + +The Wrong Way is to sit down and write an option parser. We've all done +that. + +The Right Way is to write some complex configurable program with so many +options that you hit the limit of your frustration just trying to +manage them all, and defer it with duct-tape solutions until you see +exactly to the core of the problem, and finally snap and write an +awesome option parser. + +If you want to write an option parser, don't write an option parser. +Write a package manager, or a source control system, or a service +restarter, or an operating system. You probably won't end up with a +good one of those, but if you don't give up, and you are relentless and +diligent enough in your procrastination, you may just end up with a very +nice option parser. + +## USAGE + +```javascript +// my-program.js +var nopt = require("nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + , "many1" : [String, Array] + , "many2" : [path, Array] + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag"] + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) +console.log(parsed) +``` + +This would give you support for any of the following: + +```console +$ node my-program.js --foo "blerp" --no-flag +{ "foo" : "blerp", "flag" : false } + +$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag +{ bar: 7, foo: "Mr. Hand", flag: true } + +$ node my-program.js --foo "blerp" -f -----p +{ foo: "blerp", flag: true, pick: true } + +$ node my-program.js -fp --foofoo +{ foo: "Mr. Foo", flag: true, pick: true } + +$ node my-program.js --foofoo -- -fp # -- stops the flag parsing. +{ foo: "Mr. Foo", argv: { remain: ["-fp"] } } + +$ node my-program.js --blatzk -fp # unknown opts are ok. +{ blatzk: true, flag: true, pick: true } + +$ node my-program.js --blatzk=1000 -fp # but you need to use = if they have a value +{ blatzk: 1000, flag: true, pick: true } + +$ node my-program.js --no-blatzk -fp # unless they start with "no-" +{ blatzk: false, flag: true, pick: true } + +$ node my-program.js --baz b/a/z # known paths are resolved. +{ baz: "/Users/isaacs/b/a/z" } + +# if Array is one of the types, then it can take many +# values, and will always be an array. The other types provided +# specify what types are allowed in the list. + +$ node my-program.js --many1 5 --many1 null --many1 foo +{ many1: ["5", "null", "foo"] } + +$ node my-program.js --many2 foo --many2 bar +{ many2: ["/path/to/foo", "path/to/bar"] } +``` + +Read the tests at the bottom of `lib/nopt.js` for more examples of +what this puppy can do. + +## Types + +The following types are supported, and defined on `nopt.typeDefs` + +* String: A normal string. No parsing is done. +* path: A file system path. Gets resolved against cwd if not absolute. +* url: A url. If it doesn't parse, it isn't accepted. +* Number: Must be numeric. +* Date: Must parse as a date. If it does, and `Date` is one of the options, + then it will return a Date object, not a string. +* Boolean: Must be either `true` or `false`. If an option is a boolean, + then it does not need a value, and its presence will imply `true` as + the value. To negate boolean flags, do `--no-whatever` or `--whatever + false` +* NaN: Means that the option is strictly not allowed. Any value will + fail. +* Stream: An object matching the "Stream" class in node. Valuable + for use when validating programmatically. (npm uses this to let you + supply any WriteStream on the `outfd` and `logfd` config options.) +* Array: If `Array` is specified as one of the types, then the value + will be parsed as a list of options. This means that multiple values + can be specified, and that the value will always be an array. + +If a type is an array of values not on this list, then those are +considered valid values. For instance, in the example above, the +`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`, +and any other value will be rejected. + +When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be +interpreted as their JavaScript equivalents. + +You can also mix types and values, or multiple types, in a list. For +instance `{ blah: [Number, null] }` would allow a value to be set to +either a Number or null. When types are ordered, this implies a +preference, and the first type that can be used to properly interpret +the value will be used. + +To define a new type, add it to `nopt.typeDefs`. Each item in that +hash is an object with a `type` member and a `validate` method. The +`type` member is an object that matches what goes in the type list. The +`validate` method is a function that gets called with `validate(data, +key, val)`. Validate methods should assign `data[key]` to the valid +value of `val` if it can be handled properly, or return boolean +`false` if it cannot. + +You can also call `nopt.clean(data, types, typeDefs)` to clean up a +config object and remove its invalid properties. + +## Error Handling + +By default, nopt outputs a warning to standard error when invalid values for +known options are found. You can change this behavior by assigning a method +to `nopt.invalidHandler`. This method will be called with +the offending `nopt.invalidHandler(key, val, types)`. + +If no `nopt.invalidHandler` is assigned, then it will console.error +its whining. If it is assigned to boolean `false` then the warning is +suppressed. + +## Abbreviations + +Yes, they are supported. If you define options like this: + +```javascript +{ "foolhardyelephants" : Boolean +, "pileofmonkeys" : Boolean } +``` + +Then this will work: + +```bash +node program.js --foolhar --pil +node program.js --no-f --pileofmon +# etc. +``` + +## Shorthands + +Shorthands are a hash of shorter option names to a snippet of args that +they expand to. + +If multiple one-character shorthands are all combined, and the +combination does not unambiguously match any other option or shorthand, +then they will be broken up into their constituent parts. For example: + +```json +{ "s" : ["--loglevel", "silent"] +, "g" : "--global" +, "f" : "--force" +, "p" : "--parseable" +, "l" : "--long" +} +``` + +```bash +npm ls -sgflp +# just like doing this: +npm ls --loglevel silent --global --force --long --parseable +``` + +## The Rest of the args + +The config object returned by nopt is given a special member called +`argv`, which is an object with the following fields: + +* `remain`: The remaining args after all the parsing has occurred. +* `original`: The args as they originally appeared. +* `cooked`: The args after flags and shorthands are expanded. + +## Slicing + +Node programs are called with more or less the exact argv as it appears +in C land, after the v8 and node-specific options have been plucked off. +As such, `argv[0]` is always `node` and `argv[1]` is always the +JavaScript program being run. + +That's usually not very useful to you. So they're sliced off by +default. If you want them, then you can pass in `0` as the last +argument, or any other number that you'd like to slice off the start of +the list. diff --git a/node_modules/nopt/bin/nopt.js b/node_modules/nopt/bin/nopt.js new file mode 100644 index 0000000..3232d4c --- /dev/null +++ b/node_modules/nopt/bin/nopt.js @@ -0,0 +1,54 @@ +#!/usr/bin/env node +var nopt = require("../lib/nopt") + , path = require("path") + , types = { num: Number + , bool: Boolean + , help: Boolean + , list: Array + , "num-list": [Number, Array] + , "str-list": [String, Array] + , "bool-list": [Boolean, Array] + , str: String + , clear: Boolean + , config: Boolean + , length: Number + , file: path + } + , shorthands = { s: [ "--str", "astring" ] + , b: [ "--bool" ] + , nb: [ "--no-bool" ] + , tft: [ "--bool-list", "--no-bool-list", "--bool-list", "true" ] + , "?": ["--help"] + , h: ["--help"] + , H: ["--help"] + , n: [ "--num", "125" ] + , c: ["--config"] + , l: ["--length"] + , f: ["--file"] + } + , parsed = nopt( types + , shorthands + , process.argv + , 2 ) + +console.log("parsed", parsed) + +if (parsed.help) { + console.log("") + console.log("nopt cli tester") + console.log("") + console.log("types") + console.log(Object.keys(types).map(function M (t) { + var type = types[t] + if (Array.isArray(type)) { + return [t, type.map(function (type) { return type.name })] + } + return [t, type && type.name] + }).reduce(function (s, i) { + s[i[0]] = i[1] + return s + }, {})) + console.log("") + console.log("shorthands") + console.log(shorthands) +} diff --git a/node_modules/nopt/lib/nopt.js b/node_modules/nopt/lib/nopt.js new file mode 100644 index 0000000..ecfa5da --- /dev/null +++ b/node_modules/nopt/lib/nopt.js @@ -0,0 +1,441 @@ +// info about each config option. + +var debug = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG + ? function () { console.error.apply(console, arguments) } + : function () {} + +var url = require("url") + , path = require("path") + , Stream = require("stream").Stream + , abbrev = require("abbrev") + , os = require("os") + +module.exports = exports = nopt +exports.clean = clean + +exports.typeDefs = + { String : { type: String, validate: validateString } + , Boolean : { type: Boolean, validate: validateBoolean } + , url : { type: url, validate: validateUrl } + , Number : { type: Number, validate: validateNumber } + , path : { type: path, validate: validatePath } + , Stream : { type: Stream, validate: validateStream } + , Date : { type: Date, validate: validateDate } + } + +function nopt (types, shorthands, args, slice) { + args = args || process.argv + types = types || {} + shorthands = shorthands || {} + if (typeof slice !== "number") slice = 2 + + debug(types, shorthands, args, slice) + + args = args.slice(slice) + var data = {} + , key + , argv = { + remain: [], + cooked: args, + original: args.slice(0) + } + + parse(args, data, argv.remain, types, shorthands) + // now data is full + clean(data, types, exports.typeDefs) + data.argv = argv + Object.defineProperty(data.argv, 'toString', { value: function () { + return this.original.map(JSON.stringify).join(" ") + }, enumerable: false }) + return data +} + +function clean (data, types, typeDefs) { + typeDefs = typeDefs || exports.typeDefs + var remove = {} + , typeDefault = [false, true, null, String, Array] + + Object.keys(data).forEach(function (k) { + if (k === "argv") return + var val = data[k] + , isArray = Array.isArray(val) + , type = types[k] + if (!isArray) val = [val] + if (!type) type = typeDefault + if (type === Array) type = typeDefault.concat(Array) + if (!Array.isArray(type)) type = [type] + + debug("val=%j", val) + debug("types=", type) + val = val.map(function (val) { + // if it's an unknown value, then parse false/true/null/numbers/dates + if (typeof val === "string") { + debug("string %j", val) + val = val.trim() + if ((val === "null" && ~type.indexOf(null)) + || (val === "true" && + (~type.indexOf(true) || ~type.indexOf(Boolean))) + || (val === "false" && + (~type.indexOf(false) || ~type.indexOf(Boolean)))) { + val = JSON.parse(val) + debug("jsonable %j", val) + } else if (~type.indexOf(Number) && !isNaN(val)) { + debug("convert to number", val) + val = +val + } else if (~type.indexOf(Date) && !isNaN(Date.parse(val))) { + debug("convert to date", val) + val = new Date(val) + } + } + + if (!types.hasOwnProperty(k)) { + return val + } + + // allow `--no-blah` to set 'blah' to null if null is allowed + if (val === false && ~type.indexOf(null) && + !(~type.indexOf(false) || ~type.indexOf(Boolean))) { + val = null + } + + var d = {} + d[k] = val + debug("prevalidated val", d, val, types[k]) + if (!validate(d, k, val, types[k], typeDefs)) { + if (exports.invalidHandler) { + exports.invalidHandler(k, val, types[k], data) + } else if (exports.invalidHandler !== false) { + debug("invalid: "+k+"="+val, types[k]) + } + return remove + } + debug("validated val", d, val, types[k]) + return d[k] + }).filter(function (val) { return val !== remove }) + + // if we allow Array specifically, then an empty array is how we + // express 'no value here', not null. Allow it. + if (!val.length && type.indexOf(Array) === -1) { + debug('VAL HAS NO LENGTH, DELETE IT', val, k, type.indexOf(Array)) + delete data[k] + } + else if (isArray) { + debug(isArray, data[k], val) + data[k] = val + } else data[k] = val[0] + + debug("k=%s val=%j", k, val, data[k]) + }) +} + +function validateString (data, k, val) { + data[k] = String(val) +} + +function validatePath (data, k, val) { + if (val === true) return false + if (val === null) return true + + val = String(val) + + var isWin = process.platform === 'win32' + , homePattern = isWin ? /^~(\/|\\)/ : /^~\// + , home = os.homedir() + + if (home && val.match(homePattern)) { + data[k] = path.resolve(home, val.substr(2)) + } else { + data[k] = path.resolve(val) + } + return true +} + +function validateNumber (data, k, val) { + debug("validate Number %j %j %j", k, val, isNaN(val)) + if (isNaN(val)) return false + data[k] = +val +} + +function validateDate (data, k, val) { + var s = Date.parse(val) + debug("validate Date %j %j %j", k, val, s) + if (isNaN(s)) return false + data[k] = new Date(val) +} + +function validateBoolean (data, k, val) { + if (val instanceof Boolean) val = val.valueOf() + else if (typeof val === "string") { + if (!isNaN(val)) val = !!(+val) + else if (val === "null" || val === "false") val = false + else val = true + } else val = !!val + data[k] = val +} + +function validateUrl (data, k, val) { + val = url.parse(String(val)) + if (!val.host) return false + data[k] = val.href +} + +function validateStream (data, k, val) { + if (!(val instanceof Stream)) return false + data[k] = val +} + +function validate (data, k, val, type, typeDefs) { + // arrays are lists of types. + if (Array.isArray(type)) { + for (var i = 0, l = type.length; i < l; i ++) { + if (type[i] === Array) continue + if (validate(data, k, val, type[i], typeDefs)) return true + } + delete data[k] + return false + } + + // an array of anything? + if (type === Array) return true + + // NaN is poisonous. Means that something is not allowed. + if (type !== type) { + debug("Poison NaN", k, val, type) + delete data[k] + return false + } + + // explicit list of values + if (val === type) { + debug("Explicitly allowed %j", val) + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + return true + } + + // now go through the list of typeDefs, validate against each one. + var ok = false + , types = Object.keys(typeDefs) + for (var i = 0, l = types.length; i < l; i ++) { + debug("test type %j %j %j", k, val, types[i]) + var t = typeDefs[types[i]] + if (t && + ((type && type.name && t.type && t.type.name) ? (type.name === t.type.name) : (type === t.type))) { + var d = {} + ok = false !== t.validate(d, k, val) + val = d[k] + if (ok) { + // if (isArray) (data[k] = data[k] || []).push(val) + // else data[k] = val + data[k] = val + break + } + } + } + debug("OK? %j (%j %j %j)", ok, k, val, types[i]) + + if (!ok) delete data[k] + return ok +} + +function parse (args, data, remain, types, shorthands) { + debug("parse", args, data, remain) + + var key = null + , abbrevs = abbrev(Object.keys(types)) + , shortAbbr = abbrev(Object.keys(shorthands)) + + for (var i = 0; i < args.length; i ++) { + var arg = args[i] + debug("arg", arg) + + if (arg.match(/^-{2,}$/)) { + // done with keys. + // the rest are args. + remain.push.apply(remain, args.slice(i + 1)) + args[i] = "--" + break + } + var hadEq = false + if (arg.charAt(0) === "-" && arg.length > 1) { + var at = arg.indexOf('=') + if (at > -1) { + hadEq = true + var v = arg.substr(at + 1) + arg = arg.substr(0, at) + args.splice(i, 1, arg, v) + } + + // see if it's a shorthand + // if so, splice and back up to re-parse it. + var shRes = resolveShort(arg, shorthands, shortAbbr, abbrevs) + debug("arg=%j shRes=%j", arg, shRes) + if (shRes) { + debug(arg, shRes) + args.splice.apply(args, [i, 1].concat(shRes)) + if (arg !== shRes[0]) { + i -- + continue + } + } + arg = arg.replace(/^-+/, "") + var no = null + while (arg.toLowerCase().indexOf("no-") === 0) { + no = !no + arg = arg.substr(3) + } + + if (abbrevs[arg]) arg = abbrevs[arg] + + var argType = types[arg] + var isTypeArray = Array.isArray(argType) + if (isTypeArray && argType.length === 1) { + isTypeArray = false + argType = argType[0] + } + + var isArray = argType === Array || + isTypeArray && argType.indexOf(Array) !== -1 + + // allow unknown things to be arrays if specified multiple times. + if (!types.hasOwnProperty(arg) && data.hasOwnProperty(arg)) { + if (!Array.isArray(data[arg])) + data[arg] = [data[arg]] + isArray = true + } + + var val + , la = args[i + 1] + + var isBool = typeof no === 'boolean' || + argType === Boolean || + isTypeArray && argType.indexOf(Boolean) !== -1 || + (typeof argType === 'undefined' && !hadEq) || + (la === "false" && + (argType === null || + isTypeArray && ~argType.indexOf(null))) + + if (isBool) { + // just set and move along + val = !no + // however, also support --bool true or --bool false + if (la === "true" || la === "false") { + val = JSON.parse(la) + la = null + if (no) val = !val + i ++ + } + + // also support "foo":[Boolean, "bar"] and "--foo bar" + if (isTypeArray && la) { + if (~argType.indexOf(la)) { + // an explicit type + val = la + i ++ + } else if ( la === "null" && ~argType.indexOf(null) ) { + // null allowed + val = null + i ++ + } else if ( !la.match(/^-{2,}[^-]/) && + !isNaN(la) && + ~argType.indexOf(Number) ) { + // number + val = +la + i ++ + } else if ( !la.match(/^-[^-]/) && ~argType.indexOf(String) ) { + // string + val = la + i ++ + } + } + + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + continue + } + + if (argType === String) { + if (la === undefined) { + la = "" + } else if (la.match(/^-{1,2}[^-]+/)) { + la = "" + i -- + } + } + + if (la && la.match(/^-{2,}$/)) { + la = undefined + i -- + } + + val = la === undefined ? true : la + if (isArray) (data[arg] = data[arg] || []).push(val) + else data[arg] = val + + i ++ + continue + } + remain.push(arg) + } +} + +function resolveShort (arg, shorthands, shortAbbr, abbrevs) { + // handle single-char shorthands glommed together, like + // npm ls -glp, but only if there is one dash, and only if + // all of the chars are single-char shorthands, and it's + // not a match to some other abbrev. + arg = arg.replace(/^-+/, '') + + // if it's an exact known option, then don't go any further + if (abbrevs[arg] === arg) + return null + + // if it's an exact known shortopt, same deal + if (shorthands[arg]) { + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] + } + + // first check to see if this arg is a set of single-char shorthands + var singles = shorthands.___singles + if (!singles) { + singles = Object.keys(shorthands).filter(function (s) { + return s.length === 1 + }).reduce(function (l,r) { + l[r] = true + return l + }, {}) + shorthands.___singles = singles + debug('shorthand singles', singles) + } + + var chrs = arg.split("").filter(function (c) { + return singles[c] + }) + + if (chrs.join("") === arg) return chrs.map(function (c) { + return shorthands[c] + }).reduce(function (l, r) { + return l.concat(r) + }, []) + + + // if it's an arg abbrev, and not a literal shorthand, then prefer the arg + if (abbrevs[arg] && !shorthands[arg]) + return null + + // if it's an abbr for a shorthand, then use that + if (shortAbbr[arg]) + arg = shortAbbr[arg] + + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) + shorthands[arg] = shorthands[arg].split(/\s+/) + + return shorthands[arg] +} diff --git a/node_modules/nopt/package.json b/node_modules/nopt/package.json new file mode 100644 index 0000000..12ed02d --- /dev/null +++ b/node_modules/nopt/package.json @@ -0,0 +1,34 @@ +{ + "name": "nopt", + "version": "5.0.0", + "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "main": "lib/nopt.js", + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "test": "tap test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/nopt.git" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "license": "ISC", + "dependencies": { + "abbrev": "1" + }, + "devDependencies": { + "tap": "^14.10.6" + }, + "files": [ + "bin", + "lib" + ], + "engines": { + "node": ">=6" + } +} diff --git a/node_modules/npmlog/LICENSE b/node_modules/npmlog/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/npmlog/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/npmlog/README.md b/node_modules/npmlog/README.md new file mode 100644 index 0000000..268a4af --- /dev/null +++ b/node_modules/npmlog/README.md @@ -0,0 +1,216 @@ +# npmlog + +The logger util that npm uses. + +This logger is very basic. It does the logging for npm. It supports +custom levels and colored output. + +By default, logs are written to stderr. If you want to send log messages +to outputs other than streams, then you can change the `log.stream` +member, or you can just listen to the events that it emits, and do +whatever you want with them. + +# Installation + +```console +npm install npmlog --save +``` + +# Basic Usage + +```javascript +var log = require('npmlog') + +// additional stuff ---------------------------+ +// message ----------+ | +// prefix ----+ | | +// level -+ | | | +// v v v v + log.info('fyi', 'I have a kitty cat: %j', myKittyCat) +``` + +## log.level + +* {String} + +The level to display logs at. Any logs at or above this level will be +displayed. The special level `silent` will prevent anything from being +displayed ever. + +## log.record + +* {Array} + +An array of all the log messages that have been entered. + +## log.maxRecordSize + +* {Number} + +The maximum number of records to keep. If log.record gets bigger than +10% over this value, then it is sliced down to 90% of this value. + +The reason for the 10% window is so that it doesn't have to resize a +large array on every log entry. + +## log.prefixStyle + +* {Object} + +A style object that specifies how prefixes are styled. (See below) + +## log.headingStyle + +* {Object} + +A style object that specifies how the heading is styled. (See below) + +## log.heading + +* {String} Default: "" + +If set, a heading that is printed at the start of every line. + +## log.stream + +* {Stream} Default: `process.stderr` + +The stream where output is written. + +## log.enableColor() + +Force colors to be used on all messages, regardless of the output +stream. + +## log.disableColor() + +Disable colors on all messages. + +## log.enableProgress() + +Enable the display of log activity spinner and progress bar + +## log.disableProgress() + +Disable the display of a progress bar + +## log.enableUnicode() + +Force the unicode theme to be used for the progress bar. + +## log.disableUnicode() + +Disable the use of unicode in the progress bar. + +## log.setGaugeTemplate(template) + +Set a template for outputting the progress bar. See the [gauge documentation] for details. + +[gauge documentation]: https://npmjs.com/package/gauge + +## log.setGaugeThemeset(themes) + +Select a themeset to pick themes from for the progress bar. See the [gauge documentation] for details. + +## log.pause() + +Stop emitting messages to the stream, but do not drop them. + +## log.resume() + +Emit all buffered messages that were written while paused. + +## log.log(level, prefix, message, ...) + +* `level` {String} The level to emit the message at +* `prefix` {String} A string prefix. Set to "" to skip. +* `message...` Arguments to `util.format` + +Emit a log message at the specified level. + +## log\[level](prefix, message, ...) + +For example, + +* log.silly(prefix, message, ...) +* log.verbose(prefix, message, ...) +* log.info(prefix, message, ...) +* log.http(prefix, message, ...) +* log.warn(prefix, message, ...) +* log.error(prefix, message, ...) + +Like `log.log(level, prefix, message, ...)`. In this way, each level is +given a shorthand, so you can do `log.info(prefix, message)`. + +## log.addLevel(level, n, style, disp) + +* `level` {String} Level indicator +* `n` {Number} The numeric level +* `style` {Object} Object with fg, bg, inverse, etc. +* `disp` {String} Optional replacement for `level` in the output. + +Sets up a new level with a shorthand function and so forth. + +Note that if the number is `Infinity`, then setting the level to that +will cause all log messages to be suppressed. If the number is +`-Infinity`, then the only way to show it is to enable all log messages. + +## log.newItem(name, todo, weight) + +* `name` {String} Optional; progress item name. +* `todo` {Number} Optional; total amount of work to be done. Default 0. +* `weight` {Number} Optional; the weight of this item relative to others. Default 1. + +This adds a new `are-we-there-yet` item tracker to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `Tracker` object. + +## log.newStream(name, todo, weight) + +This adds a new `are-we-there-yet` stream tracker to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `TrackerStream` object. + +## log.newGroup(name, weight) + +This adds a new `are-we-there-yet` tracker group to the progress tracker. The +object returned has the `log[level]` methods but is otherwise an +`are-we-there-yet` `TrackerGroup` object. + +# Events + +Events are all emitted with the message object. + +* `log` Emitted for all messages +* `log.` Emitted for all messages with the `` level. +* `` Messages with prefixes also emit their prefix as an event. + +# Style Objects + +Style objects can have the following fields: + +* `fg` {String} Color for the foreground text +* `bg` {String} Color for the background +* `bold`, `inverse`, `underline` {Boolean} Set the associated property +* `bell` {Boolean} Make a noise (This is pretty annoying, probably.) + +# Message Objects + +Every log event is emitted with a message object, and the `log.record` +list contains all of them that have been created. They have the +following fields: + +* `id` {Number} +* `level` {String} +* `prefix` {String} +* `message` {String} Result of `util.format()` +* `messageRaw` {Array} Arguments to `util.format()` + +# Blocking TTYs + +We use [`set-blocking`](https://npmjs.com/package/set-blocking) to set +stderr and stdout blocking if they are tty's and have the setBlocking call. +This is a work around for an issue in early versions of Node.js 6.x, which +made stderr and stdout non-blocking on OSX. (They are always blocking +Windows and were never blocking on Linux.) `npmlog` needs them to be blocking +so that it can allow output to stdout and stderr to be interlaced. diff --git a/node_modules/npmlog/log.js b/node_modules/npmlog/log.js new file mode 100644 index 0000000..85ab8a4 --- /dev/null +++ b/node_modules/npmlog/log.js @@ -0,0 +1,403 @@ +'use strict' +var Progress = require('are-we-there-yet') +var Gauge = require('gauge') +var EE = require('events').EventEmitter +var log = exports = module.exports = new EE() +var util = require('util') + +var setBlocking = require('set-blocking') +var consoleControl = require('console-control-strings') + +setBlocking(true) +var stream = process.stderr +Object.defineProperty(log, 'stream', { + set: function (newStream) { + stream = newStream + if (this.gauge) { + this.gauge.setWriteTo(stream, stream) + } + }, + get: function () { + return stream + }, +}) + +// by default, decide based on tty-ness. +var colorEnabled +log.useColor = function () { + return colorEnabled != null ? colorEnabled : stream.isTTY +} + +log.enableColor = function () { + colorEnabled = true + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} +log.disableColor = function () { + colorEnabled = false + this.gauge.setTheme({hasColor: colorEnabled, hasUnicode: unicodeEnabled}) +} + +// default level +log.level = 'info' + +log.gauge = new Gauge(stream, { + enabled: false, // no progress bars unless asked + theme: {hasColor: log.useColor()}, + template: [ + {type: 'progressbar', length: 20}, + {type: 'activityIndicator', kerning: 1, length: 1}, + {type: 'section', default: ''}, + ':', + {type: 'logline', kerning: 1, default: ''}, + ], +}) + +log.tracker = new Progress.TrackerGroup() + +// we track this separately as we may need to temporarily disable the +// display of the status bar for our own loggy purposes. +log.progressEnabled = log.gauge.isEnabled() + +var unicodeEnabled + +log.enableUnicode = function () { + unicodeEnabled = true + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) +} + +log.disableUnicode = function () { + unicodeEnabled = false + this.gauge.setTheme({hasColor: this.useColor(), hasUnicode: unicodeEnabled}) +} + +log.setGaugeThemeset = function (themes) { + this.gauge.setThemeset(themes) +} + +log.setGaugeTemplate = function (template) { + this.gauge.setTemplate(template) +} + +log.enableProgress = function () { + if (this.progressEnabled) { + return + } + + this.progressEnabled = true + this.tracker.on('change', this.showProgress) + if (this._paused) { + return + } + + this.gauge.enable() +} + +log.disableProgress = function () { + if (!this.progressEnabled) { + return + } + this.progressEnabled = false + this.tracker.removeListener('change', this.showProgress) + this.gauge.disable() +} + +var trackerConstructors = ['newGroup', 'newItem', 'newStream'] + +var mixinLog = function (tracker) { + // mixin the public methods from log into the tracker + // (except: conflicts and one's we handle specially) + Object.keys(log).forEach(function (P) { + if (P[0] === '_') { + return + } + + if (trackerConstructors.filter(function (C) { + return C === P + }).length) { + return + } + + if (tracker[P]) { + return + } + + if (typeof log[P] !== 'function') { + return + } + + var func = log[P] + tracker[P] = function () { + return func.apply(log, arguments) + } + }) + // if the new tracker is a group, make sure any subtrackers get + // mixed in too + if (tracker instanceof Progress.TrackerGroup) { + trackerConstructors.forEach(function (C) { + var func = tracker[C] + tracker[C] = function () { + return mixinLog(func.apply(tracker, arguments)) + } + }) + } + return tracker +} + +// Add tracker constructors to the top level log object +trackerConstructors.forEach(function (C) { + log[C] = function () { + return mixinLog(this.tracker[C].apply(this.tracker, arguments)) + } +}) + +log.clearProgress = function (cb) { + if (!this.progressEnabled) { + return cb && process.nextTick(cb) + } + + this.gauge.hide(cb) +} + +log.showProgress = function (name, completed) { + if (!this.progressEnabled) { + return + } + + var values = {} + if (name) { + values.section = name + } + + var last = log.record[log.record.length - 1] + if (last) { + values.subsection = last.prefix + var disp = log.disp[last.level] || last.level + var logline = this._format(disp, log.style[last.level]) + if (last.prefix) { + logline += ' ' + this._format(last.prefix, this.prefixStyle) + } + + logline += ' ' + last.message.split(/\r?\n/)[0] + values.logline = logline + } + values.completed = completed || this.tracker.completed() + this.gauge.show(values) +}.bind(log) // bind for use in tracker's on-change listener + +// temporarily stop emitting, but don't drop +log.pause = function () { + this._paused = true + if (this.progressEnabled) { + this.gauge.disable() + } +} + +log.resume = function () { + if (!this._paused) { + return + } + + this._paused = false + + var b = this._buffer + this._buffer = [] + b.forEach(function (m) { + this.emitLog(m) + }, this) + if (this.progressEnabled) { + this.gauge.enable() + } +} + +log._buffer = [] + +var id = 0 +log.record = [] +log.maxRecordSize = 10000 +log.log = function (lvl, prefix, message) { + var l = this.levels[lvl] + if (l === undefined) { + return this.emit('error', new Error(util.format( + 'Undefined log level: %j', lvl))) + } + + var a = new Array(arguments.length - 2) + var stack = null + for (var i = 2; i < arguments.length; i++) { + var arg = a[i - 2] = arguments[i] + + // resolve stack traces to a plain string. + if (typeof arg === 'object' && arg instanceof Error && arg.stack) { + Object.defineProperty(arg, 'stack', { + value: stack = arg.stack + '', + enumerable: true, + writable: true, + }) + } + } + if (stack) { + a.unshift(stack + '\n') + } + message = util.format.apply(util, a) + + var m = { + id: id++, + level: lvl, + prefix: String(prefix || ''), + message: message, + messageRaw: a, + } + + this.emit('log', m) + this.emit('log.' + lvl, m) + if (m.prefix) { + this.emit(m.prefix, m) + } + + this.record.push(m) + var mrs = this.maxRecordSize + var n = this.record.length - mrs + if (n > mrs / 10) { + var newSize = Math.floor(mrs * 0.9) + this.record = this.record.slice(-1 * newSize) + } + + this.emitLog(m) +}.bind(log) + +log.emitLog = function (m) { + if (this._paused) { + this._buffer.push(m) + return + } + if (this.progressEnabled) { + this.gauge.pulse(m.prefix) + } + + var l = this.levels[m.level] + if (l === undefined) { + return + } + + if (l < this.levels[this.level]) { + return + } + + if (l > 0 && !isFinite(l)) { + return + } + + // If 'disp' is null or undefined, use the lvl as a default + // Allows: '', 0 as valid disp + var disp = log.disp[m.level] != null ? log.disp[m.level] : m.level + this.clearProgress() + m.message.split(/\r?\n/).forEach(function (line) { + if (this.heading) { + this.write(this.heading, this.headingStyle) + this.write(' ') + } + this.write(disp, log.style[m.level]) + var p = m.prefix || '' + if (p) { + this.write(' ') + } + + this.write(p, this.prefixStyle) + this.write(' ' + line + '\n') + }, this) + this.showProgress() +} + +log._format = function (msg, style) { + if (!stream) { + return + } + + var output = '' + if (this.useColor()) { + style = style || {} + var settings = [] + if (style.fg) { + settings.push(style.fg) + } + + if (style.bg) { + settings.push('bg' + style.bg[0].toUpperCase() + style.bg.slice(1)) + } + + if (style.bold) { + settings.push('bold') + } + + if (style.underline) { + settings.push('underline') + } + + if (style.inverse) { + settings.push('inverse') + } + + if (settings.length) { + output += consoleControl.color(settings) + } + + if (style.beep) { + output += consoleControl.beep() + } + } + output += msg + if (this.useColor()) { + output += consoleControl.color('reset') + } + + return output +} + +log.write = function (msg, style) { + if (!stream) { + return + } + + stream.write(this._format(msg, style)) +} + +log.addLevel = function (lvl, n, style, disp) { + // If 'disp' is null or undefined, use the lvl as a default + if (disp == null) { + disp = lvl + } + + this.levels[lvl] = n + this.style[lvl] = style + if (!this[lvl]) { + this[lvl] = function () { + var a = new Array(arguments.length + 1) + a[0] = lvl + for (var i = 0; i < arguments.length; i++) { + a[i + 1] = arguments[i] + } + + return this.log.apply(this, a) + }.bind(this) + } + this.disp[lvl] = disp +} + +log.prefixStyle = { fg: 'magenta' } +log.headingStyle = { fg: 'white', bg: 'black' } + +log.style = {} +log.levels = {} +log.disp = {} +log.addLevel('silly', -Infinity, { inverse: true }, 'sill') +log.addLevel('verbose', 1000, { fg: 'blue', bg: 'black' }, 'verb') +log.addLevel('info', 2000, { fg: 'green' }) +log.addLevel('timing', 2500, { fg: 'green', bg: 'black' }) +log.addLevel('http', 3000, { fg: 'green', bg: 'black' }) +log.addLevel('notice', 3500, { fg: 'blue', bg: 'black' }) +log.addLevel('warn', 4000, { fg: 'black', bg: 'yellow' }, 'WARN') +log.addLevel('error', 5000, { fg: 'red', bg: 'black' }, 'ERR!') +log.addLevel('silent', Infinity) + +// allow 'error' prefix +log.on('error', function () {}) diff --git a/node_modules/npmlog/package.json b/node_modules/npmlog/package.json new file mode 100644 index 0000000..960ea92 --- /dev/null +++ b/node_modules/npmlog/package.json @@ -0,0 +1,33 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "npmlog", + "description": "logger for npm", + "version": "5.0.1", + "repository": { + "type": "git", + "url": "https://github.com/npm/npmlog.git" + }, + "main": "log.js", + "files": [ + "log.js" + ], + "scripts": { + "test": "tap test/*.js --branches=95", + "npmclilint": "npmcli-lint", + "lint": "npm run npmclilint -- \"*.*js\" \"test/**/*.*js\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint --", + "postsnap": "npm run lintfix --" + }, + "dependencies": { + "are-we-there-yet": "^2.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^3.0.0", + "set-blocking": "^2.0.0" + }, + "devDependencies": { + "@npmcli/lint": "^1.0.1", + "tap": "^15.0.9" + }, + "license": "ISC" +} diff --git a/node_modules/on-headers/HISTORY.md b/node_modules/on-headers/HISTORY.md new file mode 100644 index 0000000..a77a6aa --- /dev/null +++ b/node_modules/on-headers/HISTORY.md @@ -0,0 +1,26 @@ +1.1.0 / 2025-07-17 +================== + * - Fix [CVE-2025-7339](https://www.cve.org/CVERecord?id=CVE-2025-7339) ([GHSA-76c9-3jph-rj3q](https://github.com/expressjs/multer/security/advisories/GHSA-76c9-3jph-rj3q)) + + +1.0.2 / 2019-02-21 +================== + + * Fix `res.writeHead` patch missing return value + +1.0.1 / 2015-09-29 +================== + + * perf: enable strict mode + +1.0.0 / 2014-08-10 +================== + + * Honor `res.statusCode` change in `listener` + * Move to `jshttp` organization + * Prevent `arguments`-related de-opt + +0.0.0 / 2014-05-13 +================== + + * Initial implementation diff --git a/node_modules/on-headers/LICENSE b/node_modules/on-headers/LICENSE new file mode 100644 index 0000000..b7dce6c --- /dev/null +++ b/node_modules/on-headers/LICENSE @@ -0,0 +1,22 @@ +(The MIT License) + +Copyright (c) 2014 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/on-headers/README.md b/node_modules/on-headers/README.md new file mode 100644 index 0000000..865fe4b --- /dev/null +++ b/node_modules/on-headers/README.md @@ -0,0 +1,81 @@ +# on-headers + +[![NPM Version][npm-version-image]][npm-url] +[![NPM Downloads][npm-downloads-image]][npm-url] +[![Node.js Version][node-image]][node-url] +[![Build Status][ci-image]][ci-url] +[![Coverage Status][coveralls-image]][coveralls-url] + +Execute a listener when a response is about to write headers. + +## Installation + +This is a [Node.js](https://nodejs.org/en/) module available through the +[npm registry](https://www.npmjs.com/). Installation is done using the +[`npm install` command](https://docs.npmjs.com/getting-started/installing-npm-packages-locally): + +```sh +$ npm install on-headers +``` + +## API + + + +```js +var onHeaders = require('on-headers') +``` + +### onHeaders(res, listener) + +This will add the listener `listener` to fire when headers are emitted for `res`. +The listener is passed the `response` object as it's context (`this`). Headers are +considered to be emitted only once, right before they are sent to the client. + +When this is called multiple times on the same `res`, the `listener`s are fired +in the reverse order they were added. + +## Examples + +```js +var http = require('http') +var onHeaders = require('on-headers') + +http + .createServer(onRequest) + .listen(3000) + +function addPoweredBy () { + // set if not set by end of request + if (!this.getHeader('X-Powered-By')) { + this.setHeader('X-Powered-By', 'Node.js') + } +} + +function onRequest (req, res) { + onHeaders(res, addPoweredBy) + + res.setHeader('Content-Type', 'text/plain') + res.end('hello!') +} +``` + +## Testing + +```sh +$ npm test +``` + +## License + +[MIT](LICENSE) + +[ci-image]: https://badgen.net/github/checks/jshttp/on-headers/master?label=ci +[ci-url]: https://github.com/jshttp/on-headers/actions/workflows/ci.yml +[coveralls-image]: https://badgen.net/coveralls/c/github/jshttp/on-headers/master +[coveralls-url]: https://coveralls.io/r/jshttp/on-headers?branch=master +[node-image]: https://badgen.net/npm/node/on-headers +[node-url]: https://nodejs.org/en/download +[npm-downloads-image]: https://badgen.net/npm/dm/on-headers +[npm-url]: https://npmjs.org/package/on-headers +[npm-version-image]: https://badgen.net/npm/v/on-headers diff --git a/node_modules/on-headers/index.js b/node_modules/on-headers/index.js new file mode 100644 index 0000000..d691cc7 --- /dev/null +++ b/node_modules/on-headers/index.js @@ -0,0 +1,180 @@ +/*! + * on-headers + * Copyright(c) 2014 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module exports. + * @public + */ + +module.exports = onHeaders + +var http = require('http') + +// older node versions don't have appendHeader +var isAppendHeaderSupported = typeof http.ServerResponse.prototype.appendHeader === 'function' +var set1dArray = isAppendHeaderSupported ? set1dArrayWithAppend : set1dArrayWithSet + +/** + * Create a replacement writeHead method. + * + * @param {function} prevWriteHead + * @param {function} listener + * @private + */ + +function createWriteHead (prevWriteHead, listener) { + var fired = false + + // return function with core name and argument list + return function writeHead (statusCode) { + // set headers from arguments + var args = setWriteHeadHeaders.apply(this, arguments) + + // fire listener + if (!fired) { + fired = true + listener.call(this) + + // pass-along an updated status code + if (typeof args[0] === 'number' && this.statusCode !== args[0]) { + args[0] = this.statusCode + args.length = 1 + } + } + + return prevWriteHead.apply(this, args) + } +} + +/** + * Execute a listener when a response is about to write headers. + * + * @param {object} res + * @return {function} listener + * @public + */ + +function onHeaders (res, listener) { + if (!res) { + throw new TypeError('argument res is required') + } + + if (typeof listener !== 'function') { + throw new TypeError('argument listener must be a function') + } + + res.writeHead = createWriteHead(res.writeHead, listener) +} + +/** + * Set headers contained in array on the response object. + * + * @param {object} res + * @param {array} headers + * @private + */ + +function setHeadersFromArray (res, headers) { + if (headers.length && Array.isArray(headers[0])) { + // 2D + set2dArray(res, headers) + } else { + // 1D + if (headers.length % 2 !== 0) { + throw new TypeError('headers array is malformed') + } + + set1dArray(res, headers) + } +} + +/** + * Set headers contained in object on the response object. + * + * @param {object} res + * @param {object} headers + * @private + */ + +function setHeadersFromObject (res, headers) { + var keys = Object.keys(headers) + for (var i = 0; i < keys.length; i++) { + var k = keys[i] + if (k) res.setHeader(k, headers[k]) + } +} + +/** + * Set headers and other properties on the response object. + * + * @param {number} statusCode + * @private + */ + +function setWriteHeadHeaders (statusCode) { + var length = arguments.length + var headerIndex = length > 1 && typeof arguments[1] === 'string' + ? 2 + : 1 + + var headers = length >= headerIndex + 1 + ? arguments[headerIndex] + : undefined + + this.statusCode = statusCode + + if (Array.isArray(headers)) { + // handle array case + setHeadersFromArray(this, headers) + } else if (headers) { + // handle object case + setHeadersFromObject(this, headers) + } + + // copy leading arguments + var args = new Array(Math.min(length, headerIndex)) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + + return args +} + +function set2dArray (res, headers) { + var key + for (var i = 0; i < headers.length; i++) { + key = headers[i][0] + if (key) { + res.setHeader(key, headers[i][1]) + } + } +} + +function set1dArrayWithAppend (res, headers) { + for (var i = 0; i < headers.length; i += 2) { + res.removeHeader(headers[i]) + } + + var key + for (var j = 0; j < headers.length; j += 2) { + key = headers[j] + if (key) { + res.appendHeader(key, headers[j + 1]) + } + } +} + +function set1dArrayWithSet (res, headers) { + var key + for (var i = 0; i < headers.length; i += 2) { + key = headers[i] + if (key) { + res.setHeader(key, headers[i + 1]) + } + } +} diff --git a/node_modules/on-headers/package.json b/node_modules/on-headers/package.json new file mode 100644 index 0000000..341ce2f --- /dev/null +++ b/node_modules/on-headers/package.json @@ -0,0 +1,44 @@ +{ + "name": "on-headers", + "description": "Execute a listener when a response is about to write headers", + "version": "1.1.0", + "author": "Douglas Christopher Wilson ", + "license": "MIT", + "keywords": [ + "event", + "headers", + "http", + "onheaders" + ], + "repository": "jshttp/on-headers", + "devDependencies": { + "eslint": "6.8.0", + "eslint-config-standard": "14.1.1", + "eslint-plugin-import": "2.21.2", + "eslint-plugin-markdown": "1.0.2", + "eslint-plugin-node": "11.1.0", + "eslint-plugin-promise": "4.2.1", + "eslint-plugin-standard": "4.0.1", + "mocha": "10.2.0", + "nyc": "15.1.0", + "supertest": "4.0.2" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "lint": "eslint --plugin markdown --ext js,md .", + "test": "mocha --reporter spec --check-leaks test/test.js", + "test-ci": "nyc --reporter=lcov --reporter=text npm test", + "test-cov": "nyc --reporter=html --reporter=text npm test", + "update-upstream-hashes": "node scripts/update-upstream-hashes.js", + "upstream": "mocha --reporter spec --check-leaks test/upstream.js", + "version": "node scripts/version-history.js && git add HISTORY.md" + } +} diff --git a/node_modules/once/LICENSE b/node_modules/once/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/once/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/once/README.md b/node_modules/once/README.md new file mode 100644 index 0000000..1f1ffca --- /dev/null +++ b/node_modules/once/README.md @@ -0,0 +1,79 @@ +# once + +Only call a function once. + +## usage + +```javascript +var once = require('once') + +function load (file, cb) { + cb = once(cb) + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Or add to the Function.prototype in a responsible way: + +```javascript +// only has to be done once +require('once').proto() + +function load (file, cb) { + cb = cb.once() + loader.load('file') + loader.once('load', cb) + loader.once('error', cb) +} +``` + +Ironically, the prototype feature makes this module twice as +complicated as necessary. + +To check whether you function has been called, use `fn.called`. Once the +function is called for the first time the return value of the original +function is saved in `fn.value` and subsequent calls will continue to +return this value. + +```javascript +var once = require('once') + +function load (cb) { + cb = once(cb) + var stream = createStream() + stream.once('data', cb) + stream.once('end', function () { + if (!cb.called) cb(new Error('not found')) + }) +} +``` + +## `once.strict(func)` + +Throw an error if the function is called twice. + +Some functions are expected to be called only once. Using `once` for them would +potentially hide logical errors. + +In the example below, the `greet` function has to call the callback only once: + +```javascript +function greet (name, cb) { + // return is missing from the if statement + // when no name is passed, the callback is called twice + if (!name) cb('Hello anonymous') + cb('Hello ' + name) +} + +function log (msg) { + console.log(msg) +} + +// this will print 'Hello anonymous' but the logical error will be missed +greet(null, once(msg)) + +// once.strict will print 'Hello anonymous' and throw an error when the callback will be called the second time +greet(null, once.strict(msg)) +``` diff --git a/node_modules/once/once.js b/node_modules/once/once.js new file mode 100644 index 0000000..2354067 --- /dev/null +++ b/node_modules/once/once.js @@ -0,0 +1,42 @@ +var wrappy = require('wrappy') +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true + }) +}) + +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} diff --git a/node_modules/once/package.json b/node_modules/once/package.json new file mode 100644 index 0000000..16815b2 --- /dev/null +++ b/node_modules/once/package.json @@ -0,0 +1,33 @@ +{ + "name": "once", + "version": "1.4.0", + "description": "Run a function exactly one time", + "main": "once.js", + "directories": { + "test": "test" + }, + "dependencies": { + "wrappy": "1" + }, + "devDependencies": { + "tap": "^7.0.1" + }, + "scripts": { + "test": "tap test/*.js" + }, + "files": [ + "once.js" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/once" + }, + "keywords": [ + "once", + "function", + "one", + "single" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/node_modules/path-is-absolute/index.js b/node_modules/path-is-absolute/index.js new file mode 100644 index 0000000..22aa6c3 --- /dev/null +++ b/node_modules/path-is-absolute/index.js @@ -0,0 +1,20 @@ +'use strict'; + +function posix(path) { + return path.charAt(0) === '/'; +} + +function win32(path) { + // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = Boolean(device && device.charAt(1) !== ':'); + + // UNC paths are always absolute + return Boolean(result[2] || isUnc); +} + +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; diff --git a/node_modules/path-is-absolute/license b/node_modules/path-is-absolute/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/path-is-absolute/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/path-is-absolute/package.json b/node_modules/path-is-absolute/package.json new file mode 100644 index 0000000..91196d5 --- /dev/null +++ b/node_modules/path-is-absolute/package.json @@ -0,0 +1,43 @@ +{ + "name": "path-is-absolute", + "version": "1.0.1", + "description": "Node.js 0.12 path.isAbsolute() ponyfill", + "license": "MIT", + "repository": "sindresorhus/path-is-absolute", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && node test.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "path", + "paths", + "file", + "dir", + "absolute", + "isabsolute", + "is-absolute", + "built-in", + "util", + "utils", + "core", + "ponyfill", + "polyfill", + "shim", + "is", + "detect", + "check" + ], + "devDependencies": { + "xo": "^0.16.0" + } +} diff --git a/node_modules/path-is-absolute/readme.md b/node_modules/path-is-absolute/readme.md new file mode 100644 index 0000000..8dbdf5f --- /dev/null +++ b/node_modules/path-is-absolute/readme.md @@ -0,0 +1,59 @@ +# path-is-absolute [![Build Status](https://travis-ci.org/sindresorhus/path-is-absolute.svg?branch=master)](https://travis-ci.org/sindresorhus/path-is-absolute) + +> Node.js 0.12 [`path.isAbsolute()`](http://nodejs.org/api/path.html#path_path_isabsolute_path) [ponyfill](https://ponyfill.com) + + +## Install + +``` +$ npm install --save path-is-absolute +``` + + +## Usage + +```js +const pathIsAbsolute = require('path-is-absolute'); + +// Running on Linux +pathIsAbsolute('/home/foo'); +//=> true +pathIsAbsolute('C:/Users/foo'); +//=> false + +// Running on Windows +pathIsAbsolute('C:/Users/foo'); +//=> true +pathIsAbsolute('/home/foo'); +//=> false + +// Running on any OS +pathIsAbsolute.posix('/home/foo'); +//=> true +pathIsAbsolute.posix('C:/Users/foo'); +//=> false +pathIsAbsolute.win32('C:/Users/foo'); +//=> true +pathIsAbsolute.win32('/home/foo'); +//=> false +``` + + +## API + +See the [`path.isAbsolute()` docs](http://nodejs.org/api/path.html#path_path_isabsolute_path). + +### pathIsAbsolute(path) + +### pathIsAbsolute.posix(path) + +POSIX specific version. + +### pathIsAbsolute.win32(path) + +Windows specific version. + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/random-bytes/HISTORY.md b/node_modules/random-bytes/HISTORY.md new file mode 100644 index 0000000..8cabd9d --- /dev/null +++ b/node_modules/random-bytes/HISTORY.md @@ -0,0 +1,4 @@ +1.0.0 / 2016-01-17 +================== + + * Initial release diff --git a/node_modules/random-bytes/LICENSE b/node_modules/random-bytes/LICENSE new file mode 100644 index 0000000..c24dbe3 --- /dev/null +++ b/node_modules/random-bytes/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/random-bytes/README.md b/node_modules/random-bytes/README.md new file mode 100644 index 0000000..df5aacc --- /dev/null +++ b/node_modules/random-bytes/README.md @@ -0,0 +1,77 @@ +# random-bytes + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +Generate strong pseudo-random bytes. + +This module is a simple wrapper around the Node.js core `crypto.randomBytes` API, +with the following additions: + + * A `Promise` interface for environments with promises. + * For Node.js versions that do not wait for the PRNG to be seeded, this module + will wait a bit. + +## Installation + +```sh +$ npm install random-bytes +``` + +## API + +```js +var randomBytes = require('random-bytes') +``` + +### randomBytes(size, callback) + +Generates strong pseudo-random bytes. The `size` argument is a number indicating +the number of bytes to generate. + +```js +randomBytes(12, function (error, bytes) { + if (error) throw error + // do something with the bytes +}) +``` + +### randomBytes(size) + +Generates strong pseudo-random bytes and return a `Promise`. The `size` argument is +a number indicating the number of bytes to generate. + +**Note**: To use promises in Node.js _prior to 0.12_, promises must be +"polyfilled" using `global.Promise = require('bluebird')`. + +```js +randomBytes(18).then(function (string) { + // do something with the string +}) +``` + +### randomBytes.sync(size) + +A synchronous version of above. + +```js +var bytes = randomBytes.sync(18) +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/random-bytes.svg +[npm-url]: https://npmjs.org/package/random-bytes +[node-version-image]: https://img.shields.io/node/v/random-bytes.svg +[node-version-url]: http://nodejs.org/download/ +[travis-image]: https://img.shields.io/travis/crypto-utils/random-bytes/master.svg +[travis-url]: https://travis-ci.org/crypto-utils/random-bytes +[coveralls-image]: https://img.shields.io/coveralls/crypto-utils/random-bytes/master.svg +[coveralls-url]: https://coveralls.io/r/crypto-utils/random-bytes?branch=master +[downloads-image]: https://img.shields.io/npm/dm/random-bytes.svg +[downloads-url]: https://npmjs.org/package/random-bytes diff --git a/node_modules/random-bytes/index.js b/node_modules/random-bytes/index.js new file mode 100644 index 0000000..9ad930f --- /dev/null +++ b/node_modules/random-bytes/index.js @@ -0,0 +1,101 @@ +/*! + * random-bytes + * Copyright(c) 2016 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var crypto = require('crypto') + +/** + * Module variables. + * @private + */ + +var generateAttempts = crypto.randomBytes === crypto.pseudoRandomBytes ? 1 : 3 + +/** + * Module exports. + * @public + */ + +module.exports = randomBytes +module.exports.sync = randomBytesSync + +/** + * Generates strong pseudo-random bytes. + * + * @param {number} size + * @param {function} [callback] + * @return {Promise} + * @public + */ + +function randomBytes(size, callback) { + // validate callback is a function, if provided + if (callback !== undefined && typeof callback !== 'function') { + throw new TypeError('argument callback must be a function') + } + + // require the callback without promises + if (!callback && !global.Promise) { + throw new TypeError('argument callback is required') + } + + if (callback) { + // classic callback style + return generateRandomBytes(size, generateAttempts, callback) + } + + return new Promise(function executor(resolve, reject) { + generateRandomBytes(size, generateAttempts, function onRandomBytes(err, str) { + if (err) return reject(err) + resolve(str) + }) + }) +} + +/** + * Generates strong pseudo-random bytes sync. + * + * @param {number} size + * @return {Buffer} + * @public + */ + +function randomBytesSync(size) { + var err = null + + for (var i = 0; i < generateAttempts; i++) { + try { + return crypto.randomBytes(size) + } catch (e) { + err = e + } + } + + throw err +} + +/** + * Generates strong pseudo-random bytes. + * + * @param {number} size + * @param {number} attempts + * @param {function} callback + * @private + */ + +function generateRandomBytes(size, attempts, callback) { + crypto.randomBytes(size, function onRandomBytes(err, buf) { + if (!err) return callback(null, buf) + if (!--attempts) return callback(err) + setTimeout(generateRandomBytes.bind(null, size, attempts, callback), 10) + }) +} diff --git a/node_modules/random-bytes/package.json b/node_modules/random-bytes/package.json new file mode 100644 index 0000000..c67e0e8 --- /dev/null +++ b/node_modules/random-bytes/package.json @@ -0,0 +1,36 @@ +{ + "name": "random-bytes", + "description": "URL and cookie safe UIDs", + "version": "1.0.0", + "contributors": [ + "Douglas Christopher Wilson " + ], + "license": "MIT", + "repository": "crypto-utils/random-bytes", + "devDependencies": { + "bluebird": "3.1.1", + "istanbul": "0.4.2", + "mocha": "2.3.4", + "proxyquire": "1.2.0" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "test": "mocha --trace-deprecation --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --trace-deprecation --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --trace-deprecation --reporter spec --check-leaks test/" + }, + "keywords": [ + "bytes", + "generator", + "random", + "safe" + ] +} diff --git a/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000..f478d58 --- /dev/null +++ b/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/readable-stream/GOVERNANCE.md b/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000..16ffb93 --- /dev/null +++ b/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/readable-stream/LICENSE b/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000..2873b3b --- /dev/null +++ b/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/readable-stream/README.md b/node_modules/readable-stream/README.md new file mode 100644 index 0000000..19117c1 --- /dev/null +++ b/node_modules/readable-stream/README.md @@ -0,0 +1,106 @@ +# readable-stream + +***Node.js core streams for userland*** [![Build Status](https://travis-ci.com/nodejs/readable-stream.svg?branch=master)](https://travis-ci.com/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readabe-stream.svg)](https://saucelabs.com/u/readabe-stream) + +```bash +npm install --save readable-stream +``` + +This package is a mirror of the streams implementations in Node.js. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v10.18.1/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +## Version 3.x.x + +v3.x.x of `readable-stream` is a cut from Node 10. This version supports Node 6, 8, and 10, as well as evergreen browsers, IE 11 and latest Safari. The breaking changes introduced by v3 are composed by the combined breaking changes in [Node v9](https://nodejs.org/en/blog/release/v9.0.0/) and [Node v10](https://nodejs.org/en/blog/release/v10.0.0/), as follows: + +1. Error codes: https://github.com/nodejs/node/pull/13310, + https://github.com/nodejs/node/pull/13291, + https://github.com/nodejs/node/pull/16589, + https://github.com/nodejs/node/pull/15042, + https://github.com/nodejs/node/pull/15665, + https://github.com/nodejs/readable-stream/pull/344 +2. 'readable' have precedence over flowing + https://github.com/nodejs/node/pull/18994 +3. make virtual methods errors consistent + https://github.com/nodejs/node/pull/18813 +4. updated streams error handling + https://github.com/nodejs/node/pull/18438 +5. writable.end should return this. + https://github.com/nodejs/node/pull/18780 +6. readable continues to read when push('') + https://github.com/nodejs/node/pull/18211 +7. add custom inspect to BufferList + https://github.com/nodejs/node/pull/17907 +8. always defer 'readable' with nextTick + https://github.com/nodejs/node/pull/17979 + +## Version 2.x.x +v2.x.x of `readable-stream` is a cut of the stream module from Node 8 (there have been no semver-major changes from Node 4 to 8). This version supports all Node.js versions from 0.8, as well as evergreen browsers and IE 10 & 11. + +### Big Thanks + +Cross-browser Testing Platform and Open Source <3 Provided by [Sauce Labs][sauce] + +# Usage + +You can swap your `require('stream')` with `require('readable-stream')` +without any changes, if you are just using one of the main classes and +functions. + +```js +const { + Readable, + Writable, + Transform, + Duplex, + pipeline, + finished +} = require('readable-stream') +```` + +Note that `require('stream')` will return `Stream`, while +`require('readable-stream')` will return `Readable`. We discourage using +whatever is exported directly, but rather use one of the properties as +shown in the example above. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> +* **Yoshua Wyuts** ([@yoshuawuyts](https://github.com/yoshuawuyts)) <yoshuawuyts@gmail.com> + +[sauce]: https://saucelabs.com diff --git a/node_modules/readable-stream/errors-browser.js b/node_modules/readable-stream/errors-browser.js new file mode 100644 index 0000000..fb8e73e --- /dev/null +++ b/node_modules/readable-stream/errors-browser.js @@ -0,0 +1,127 @@ +'use strict'; + +function _inheritsLoose(subClass, superClass) { subClass.prototype = Object.create(superClass.prototype); subClass.prototype.constructor = subClass; subClass.__proto__ = superClass; } + +var codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error; + } + + function getMessage(arg1, arg2, arg3) { + if (typeof message === 'string') { + return message; + } else { + return message(arg1, arg2, arg3); + } + } + + var NodeError = + /*#__PURE__*/ + function (_Base) { + _inheritsLoose(NodeError, _Base); + + function NodeError(arg1, arg2, arg3) { + return _Base.call(this, getMessage(arg1, arg2, arg3)) || this; + } + + return NodeError; + }(Base); + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + codes[code] = NodeError; +} // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js + + +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + var len = expected.length; + expected = expected.map(function (i) { + return String(i); + }); + + if (len > 2) { + return "one of ".concat(thing, " ").concat(expected.slice(0, len - 1).join(', '), ", or ") + expected[len - 1]; + } else if (len === 2) { + return "one of ".concat(thing, " ").concat(expected[0], " or ").concat(expected[1]); + } else { + return "of ".concat(thing, " ").concat(expected[0]); + } + } else { + return "of ".concat(thing, " ").concat(String(expected)); + } +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith + + +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith + + +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + + return str.substring(this_len - search.length, this_len) === search; +} // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes + + +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"'; +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + var determiner; + + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + var msg; + + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = "The ".concat(name, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } else { + var type = includes(name, '.') ? 'property' : 'argument'; + msg = "The \"".concat(name, "\" ").concat(type, " ").concat(determiner, " ").concat(oneOf(expected, 'type')); + } + + msg += ". Received type ".concat(typeof actual); + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented'; +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg; +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); +module.exports.codes = codes; diff --git a/node_modules/readable-stream/errors.js b/node_modules/readable-stream/errors.js new file mode 100644 index 0000000..8471526 --- /dev/null +++ b/node_modules/readable-stream/errors.js @@ -0,0 +1,116 @@ +'use strict'; + +const codes = {}; + +function createErrorType(code, message, Base) { + if (!Base) { + Base = Error + } + + function getMessage (arg1, arg2, arg3) { + if (typeof message === 'string') { + return message + } else { + return message(arg1, arg2, arg3) + } + } + + class NodeError extends Base { + constructor (arg1, arg2, arg3) { + super(getMessage(arg1, arg2, arg3)); + } + } + + NodeError.prototype.name = Base.name; + NodeError.prototype.code = code; + + codes[code] = NodeError; +} + +// https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js +function oneOf(expected, thing) { + if (Array.isArray(expected)) { + const len = expected.length; + expected = expected.map((i) => String(i)); + if (len > 2) { + return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + + expected[len - 1]; + } else if (len === 2) { + return `one of ${thing} ${expected[0]} or ${expected[1]}`; + } else { + return `of ${thing} ${expected[0]}`; + } + } else { + return `of ${thing} ${String(expected)}`; + } +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith +function startsWith(str, search, pos) { + return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith +function endsWith(str, search, this_len) { + if (this_len === undefined || this_len > str.length) { + this_len = str.length; + } + return str.substring(this_len - search.length, this_len) === search; +} + +// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes +function includes(str, search, start) { + if (typeof start !== 'number') { + start = 0; + } + + if (start + search.length > str.length) { + return false; + } else { + return str.indexOf(search, start) !== -1; + } +} + +createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { + return 'The value "' + value + '" is invalid for option "' + name + '"' +}, TypeError); +createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { + // determiner: 'must be' or 'must not be' + let determiner; + if (typeof expected === 'string' && startsWith(expected, 'not ')) { + determiner = 'must not be'; + expected = expected.replace(/^not /, ''); + } else { + determiner = 'must be'; + } + + let msg; + if (endsWith(name, ' argument')) { + // For cases like 'first argument' + msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; + } else { + const type = includes(name, '.') ? 'property' : 'argument'; + msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; + } + + msg += `. Received type ${typeof actual}`; + return msg; +}, TypeError); +createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); +createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { + return 'The ' + name + ' method is not implemented' +}); +createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); +createErrorType('ERR_STREAM_DESTROYED', function (name) { + return 'Cannot call ' + name + ' after a stream was destroyed'; +}); +createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); +createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); +createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); +createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); +createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { + return 'Unknown encoding: ' + arg +}, TypeError); +createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); + +module.exports.codes = codes; diff --git a/node_modules/readable-stream/experimentalWarning.js b/node_modules/readable-stream/experimentalWarning.js new file mode 100644 index 0000000..78e8414 --- /dev/null +++ b/node_modules/readable-stream/experimentalWarning.js @@ -0,0 +1,17 @@ +'use strict' + +var experimentalWarnings = new Set(); + +function emitExperimentalWarning(feature) { + if (experimentalWarnings.has(feature)) return; + var msg = feature + ' is an experimental feature. This feature could ' + + 'change at any time'; + experimentalWarnings.add(feature); + process.emitWarning(msg, 'ExperimentalWarning'); +} + +function noop() {} + +module.exports.emitExperimentalWarning = process.emitWarning + ? emitExperimentalWarning + : noop; diff --git a/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..19abfa6 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,126 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) keys.push(key); + return keys; +}; +/**/ + +module.exports = Duplex; +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); +require('inherits')(Duplex, Readable); +{ + // Allow the keys array to be GC'ed. + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + Readable.call(this, options); + Writable.call(this, options); + this.allowHalfOpen = true; + if (options) { + if (options.readable === false) this.readable = false; + if (options.writable === false) this.writable = false; + if (options.allowHalfOpen === false) { + this.allowHalfOpen = false; + this.once('end', onend); + } + } +} +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); +Object.defineProperty(Duplex.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +Object.defineProperty(Duplex.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); + +// the no-half-open enforcer +function onend() { + // If the writable side ended, then we're ok. + if (this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + process.nextTick(onEndNT, this); +} +function onEndNT(self) { + self.end(); +} +Object.defineProperty(Duplex.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..24a6bdd --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,37 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; +var Transform = require('./_stream_transform'); +require('inherits')(PassThrough, Transform); +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + Transform.call(this, options); +} +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..df1f608 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1027 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +module.exports = Readable; + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; +var EElistenerCount = function EElistenerCount(emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ +var debugUtil = require('util'); +var debug; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function debug() {}; +} +/**/ + +var BufferList = require('./internal/streams/buffer_list'); +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; + +// Lazy loaded to improve the startup performance. +var StringDecoder; +var createReadableStreamAsyncIterator; +var from; +require('inherits')(Readable, Stream); +var errorOrDestroy = destroyImpl.errorOrDestroy; +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} +function ReadableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + this.paused = true; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'end' (and potentially 'finish') + this.autoDestroy = !!options.autoDestroy; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + if (!(this instanceof Readable)) return new Readable(options); + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the ReadableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + this._readableState = new ReadableState(options, this, isDuplex); + + // legacy + this.readable = true; + if (options) { + if (typeof options.read === 'function') this._read = options.read; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + Stream.call(this); +} +Object.defineProperty(Readable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + debug('readableAddChunk', chunk); + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + errorOrDestroy(stream, er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (addToFront) { + if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); + } else if (state.ended) { + errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); + } else if (state.destroyed) { + return false; + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + maybeReadMore(stream, state); + } + } + + // We can push more data if we are below the highWaterMark. + // Also, if we have no data yet, we can stand some more bytes. + // This is to work around cases where hwm=0, such as the repl. + return !state.ended && (state.length < state.highWaterMark || state.length === 0); +} +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + state.awaitDrain = 0; + stream.emit('data', chunk); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); + } + return er; +} +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + var decoder = new StringDecoder(enc); + this._readableState.decoder = decoder; + // If setEncoding(null), decoder.encoding equals utf8 + this._readableState.encoding = this._readableState.decoder.encoding; + + // Iterate over current buffer to convert already stored Buffers: + var p = this._readableState.buffer.head; + var content = ''; + while (p !== null) { + content += decoder.write(p.data); + p = p.next; + } + this._readableState.buffer.clear(); + if (content !== '') this._readableState.buffer.push(content); + this._readableState.length = content.length; + return this; +}; + +// Don't raise the hwm > 1GB +var MAX_HWM = 0x40000000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + if (ret === null) { + state.needReadable = state.length <= state.highWaterMark; + n = 0; + } else { + state.length -= n; + state.awaitDrain = 0; + } + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + if (ret !== null) this.emit('data', ret); + return ret; +}; +function onEofChunk(stream, state) { + debug('onEofChunk'); + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + if (state.sync) { + // if we are sync, wait until next tick to emit the data. + // Otherwise we risk emitting data in the flow() + // the readable code triggers during a read() call + emitReadable(stream); + } else { + // emit 'readable' now to make sure it gets picked up. + state.needReadable = false; + if (!state.emittedReadable) { + state.emittedReadable = true; + emitReadable_(stream); + } + } +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + debug('emitReadable', state.needReadable, state.emittedReadable); + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + process.nextTick(emitReadable_, stream); + } +} +function emitReadable_(stream) { + var state = stream._readableState; + debug('emitReadable_', state.destroyed, state.length, state.ended); + if (!state.destroyed && (state.length || state.ended)) { + stream.emit('readable'); + state.emittedReadable = false; + } + + // The stream needs another readable event if + // 1. It is not flowing, as the flow mechanism will take + // care of it. + // 2. It is not ended. + // 3. It is below the highWaterMark, so we can schedule + // another readable later. + state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + process.nextTick(maybeReadMore_, stream, state); + } +} +function maybeReadMore_(stream, state) { + // Attempt to read more data if we should. + // + // The conditions for reading more data are (one of): + // - Not enough data buffered (state.length < state.highWaterMark). The loop + // is responsible for filling the buffer with enough data if such data + // is available. If highWaterMark is 0 and we are not in the flowing mode + // we should _not_ attempt to buffer any extra data. We'll get more data + // when the stream consumer calls read() instead. + // - No data in the buffer, and the stream is in flowing mode. In this mode + // the loop below is responsible for ensuring read() is called. Failing to + // call read here would abort the flow and there's no other mechanism for + // continuing the flow if the stream consumer has just subscribed to the + // 'data' event. + // + // In addition to the above conditions to keep reading data, the following + // conditions prevent the data from being read: + // - The stream has ended (state.ended). + // - There is already a pending 'read' operation (state.reading). This is a + // case where the the stream has called the implementation defined _read() + // method, but they are processing the call asynchronously and have _not_ + // called push() with new data. In this case we skip performing more + // read()s. The execution ends in this method again after the _read() ends + // up calling push() with more data. + while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { + var len = state.length; + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); +}; +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + var ret = dest.write(chunk); + debug('dest.write', ret); + if (ret === false) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + return dest; +}; +function pipeOnDrain(src) { + return function pipeOnDrainFunctionResult() { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { + hasUnpiped: false + }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { + hasUnpiped: false + }); + return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + dest.emit('unpipe', this, unpipeInfo); + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + var state = this._readableState; + if (ev === 'data') { + // update readableListening so that resume() may be a no-op + // a few lines down. This is needed to support once('readable'). + state.readableListening = this.listenerCount('readable') > 0; + + // Try start flowing on next tick if stream isn't explicitly paused + if (state.flowing !== false) this.resume(); + } else if (ev === 'readable') { + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.flowing = false; + state.emittedReadable = false; + debug('on readable', state.length, state.reading); + if (state.length) { + emitReadable(this); + } else if (!state.reading) { + process.nextTick(nReadingNextTick, this); + } + } + } + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; +Readable.prototype.removeListener = function (ev, fn) { + var res = Stream.prototype.removeListener.call(this, ev, fn); + if (ev === 'readable') { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +Readable.prototype.removeAllListeners = function (ev) { + var res = Stream.prototype.removeAllListeners.apply(this, arguments); + if (ev === 'readable' || ev === undefined) { + // We need to check if there is someone still listening to + // readable and reset the state. However this needs to happen + // after readable has been emitted but before I/O (nextTick) to + // support once('readable', fn) cycles. This means that calling + // resume within the same tick will have no + // effect. + process.nextTick(updateReadableListening, this); + } + return res; +}; +function updateReadableListening(self) { + var state = self._readableState; + state.readableListening = self.listenerCount('readable') > 0; + if (state.resumeScheduled && !state.paused) { + // flowing needs to be set to true now, otherwise + // the upcoming resume will not flow. + state.flowing = true; + + // crude way to check if we should resume + } else if (self.listenerCount('data') > 0) { + self.resume(); + } +} +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + // we flow only if there is no one listening + // for readable, but we still have to call + // resume() + state.flowing = !state.readableListening; + resume(this, state); + } + state.paused = false; + return this; +}; +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + process.nextTick(resume_, stream, state); + } +} +function resume_(stream, state) { + debug('resume', state.reading); + if (!state.reading) { + stream.read(0); + } + state.resumeScheduled = false; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (this._readableState.flowing !== false) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + this._readableState.paused = true; + return this; +}; +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null); +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + var state = this._readableState; + var paused = false; + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + _this.push(null); + }); + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function methodWrap(method) { + return function methodWrapReturnFunction() { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + return this; +}; +if (typeof Symbol === 'function') { + Readable.prototype[Symbol.asyncIterator] = function () { + if (createReadableStreamAsyncIterator === undefined) { + createReadableStreamAsyncIterator = require('./internal/streams/async_iterator'); + } + return createReadableStreamAsyncIterator(this); + }; +} +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.highWaterMark; + } +}); +Object.defineProperty(Readable.prototype, 'readableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState && this._readableState.buffer; + } +}); +Object.defineProperty(Readable.prototype, 'readableFlowing', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.flowing; + }, + set: function set(state) { + if (this._readableState) { + this._readableState.flowing = state; + } + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; +Object.defineProperty(Readable.prototype, 'readableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._readableState.length; + } +}); + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = state.buffer.consume(n, state.decoder); + } + return ret; +} +function endReadable(stream) { + var state = stream._readableState; + debug('endReadable', state.endEmitted); + if (!state.endEmitted) { + state.ended = true; + process.nextTick(endReadableNT, state, stream); + } +} +function endReadableNT(state, stream) { + debug('endReadableNT', state.endEmitted, state.length); + + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the writable side is ready for autoDestroy as well + var wState = stream._writableState; + if (!wState || wState.autoDestroy && wState.finished) { + stream.destroy(); + } + } + } +} +if (typeof Symbol === 'function') { + Readable.from = function (iterable, opts) { + if (from === undefined) { + from = require('./internal/streams/from'); + } + return from(Readable, iterable, opts); + }; +} +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..1ccb715 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,190 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; +var _require$codes = require('../errors').codes, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, + ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; +var Duplex = require('./_stream_duplex'); +require('inherits')(Transform, Duplex); +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + var cb = ts.writecb; + if (cb === null) { + return this.emit('error', new ERR_MULTIPLE_CALLBACK()); + } + ts.writechunk = null; + ts.writecb = null; + if (data != null) + // single equals check for both `null` and `undefined` + this.push(data); + cb(er); + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + Duplex.call(this, options); + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} +function prefinish() { + var _this = this; + if (typeof this._flush === 'function' && !this._readableState.destroyed) { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); +}; +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + if (ts.writechunk !== null && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; +Transform.prototype._destroy = function (err, cb) { + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + }); +}; +function done(stream, er, data) { + if (er) return stream.emit('error', er); + if (data != null) + // single equals check for both `null` and `undefined` + stream.push(data); + + // TODO(BridgeAR): Write a test for these two error cases + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); + if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..292415e --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,641 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +var Buffer = require('buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} +var destroyImpl = require('./internal/streams/destroy'); +var _require = require('./internal/streams/state'), + getHighWaterMark = _require.getHighWaterMark; +var _require$codes = require('../errors').codes, + ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, + ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, + ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, + ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, + ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, + ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, + ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; +var errorOrDestroy = destroyImpl.errorOrDestroy; +require('inherits')(Writable, Stream); +function nop() {} +function WritableState(options, stream, isDuplex) { + Duplex = Duplex || require('./_stream_duplex'); + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream, + // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. + if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // Should close be emitted on destroy. Defaults to true. + this.emitClose = options.emitClose !== false; + + // Should .destroy() be called after 'finish' (and potentially 'end') + this.autoDestroy = !!options.autoDestroy; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function writableStateBufferGetter() { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function value(object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function realHasInstance(object) { + return object instanceof this; + }; +} +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + + // Checking for a Stream.Duplex instance is faster here instead of inside + // the WritableState constructor, at least with V8 6.5 + var isDuplex = this instanceof Duplex; + if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); + this._writableState = new WritableState(options, this, isDuplex); + + // legacy. + this.writable = true; + if (options) { + if (typeof options.write === 'function') this._write = options.write; + if (typeof options.writev === 'function') this._writev = options.writev; + if (typeof options.destroy === 'function') this._destroy = options.destroy; + if (typeof options.final === 'function') this._final = options.final; + } + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); +}; +function writeAfterEnd(stream, cb) { + var er = new ERR_STREAM_WRITE_AFTER_END(); + // TODO: defer error events consistently everywhere, not just the cb + errorOrDestroy(stream, er); + process.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var er; + if (chunk === null) { + er = new ERR_STREAM_NULL_VALUES(); + } else if (typeof chunk !== 'string' && !state.objectMode) { + er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); + } + if (er) { + errorOrDestroy(stream, er); + process.nextTick(cb, er); + return false; + } + return true; +} +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + if (typeof cb !== 'function') cb = nop; + if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + return ret; +}; +Writable.prototype.cork = function () { + this._writableState.corked++; +}; +Writable.prototype.uncork = function () { + var state = this._writableState; + if (state.corked) { + state.corked--; + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; +Object.defineProperty(Writable.prototype, 'writableBuffer', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState && this._writableState.getBuffer(); + } +}); +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + state.length += len; + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + return ret; +} +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + process.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + process.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + errorOrDestroy(stream, er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); + onwriteStateUpdate(state); + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state) || stream.destroyed; + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + if (sync) { + process.nextTick(afterWrite, stream, state, finished, cb); + } else { + afterWrite(stream, state, finished, cb); + } + } +} +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + if (entry === null) state.lastBufferedRequest = null; + } + state.bufferedRequest = entry; + state.bufferProcessing = false; +} +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); +}; +Writable.prototype._writev = null; +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); + return this; +}; +Object.defineProperty(Writable.prototype, 'writableLength', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + return this._writableState.length; + } +}); +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + errorOrDestroy(stream, err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function' && !state.destroyed) { + state.pendingcb++; + state.finalCalled = true; + process.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + if (state.autoDestroy) { + // In case of duplex streams we need a way to detect + // if the readable side is ready for autoDestroy as well + var rState = stream._readableState; + if (!rState || rState.autoDestroy && rState.endEmitted) { + stream.destroy(); + } + } + } + } + return need; +} +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) process.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} +Object.defineProperty(Writable.prototype, 'destroyed', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function get() { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function set(value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + cb(err); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/async_iterator.js b/node_modules/readable-stream/lib/internal/streams/async_iterator.js new file mode 100644 index 0000000..742c5a4 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/async_iterator.js @@ -0,0 +1,180 @@ +'use strict'; + +var _Object$setPrototypeO; +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var finished = require('./end-of-stream'); +var kLastResolve = Symbol('lastResolve'); +var kLastReject = Symbol('lastReject'); +var kError = Symbol('error'); +var kEnded = Symbol('ended'); +var kLastPromise = Symbol('lastPromise'); +var kHandlePromise = Symbol('handlePromise'); +var kStream = Symbol('stream'); +function createIterResult(value, done) { + return { + value: value, + done: done + }; +} +function readAndResolve(iter) { + var resolve = iter[kLastResolve]; + if (resolve !== null) { + var data = iter[kStream].read(); + // we defer if data is null + // we can be expecting either 'end' or + // 'error' + if (data !== null) { + iter[kLastPromise] = null; + iter[kLastResolve] = null; + iter[kLastReject] = null; + resolve(createIterResult(data, false)); + } + } +} +function onReadable(iter) { + // we wait for the next tick, because it might + // emit an error with process.nextTick + process.nextTick(readAndResolve, iter); +} +function wrapForNext(lastPromise, iter) { + return function (resolve, reject) { + lastPromise.then(function () { + if (iter[kEnded]) { + resolve(createIterResult(undefined, true)); + return; + } + iter[kHandlePromise](resolve, reject); + }, reject); + }; +} +var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); +var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { + get stream() { + return this[kStream]; + }, + next: function next() { + var _this = this; + // if we have detected an error in the meanwhile + // reject straight away + var error = this[kError]; + if (error !== null) { + return Promise.reject(error); + } + if (this[kEnded]) { + return Promise.resolve(createIterResult(undefined, true)); + } + if (this[kStream].destroyed) { + // We need to defer via nextTick because if .destroy(err) is + // called, the error will be emitted via nextTick, and + // we cannot guarantee that there is no error lingering around + // waiting to be emitted. + return new Promise(function (resolve, reject) { + process.nextTick(function () { + if (_this[kError]) { + reject(_this[kError]); + } else { + resolve(createIterResult(undefined, true)); + } + }); + }); + } + + // if we have multiple next() calls + // we will wait for the previous Promise to finish + // this logic is optimized to support for await loops, + // where next() is only called once at a time + var lastPromise = this[kLastPromise]; + var promise; + if (lastPromise) { + promise = new Promise(wrapForNext(lastPromise, this)); + } else { + // fast path needed to support multiple this.push() + // without triggering the next() queue + var data = this[kStream].read(); + if (data !== null) { + return Promise.resolve(createIterResult(data, false)); + } + promise = new Promise(this[kHandlePromise]); + } + this[kLastPromise] = promise; + return promise; + } +}, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { + return this; +}), _defineProperty(_Object$setPrototypeO, "return", function _return() { + var _this2 = this; + // destroy(err, cb) is a private API + // we can guarantee we have that here, because we control the + // Readable class this is attached to + return new Promise(function (resolve, reject) { + _this2[kStream].destroy(null, function (err) { + if (err) { + reject(err); + return; + } + resolve(createIterResult(undefined, true)); + }); + }); +}), _Object$setPrototypeO), AsyncIteratorPrototype); +var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { + var _Object$create; + var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { + value: stream, + writable: true + }), _defineProperty(_Object$create, kLastResolve, { + value: null, + writable: true + }), _defineProperty(_Object$create, kLastReject, { + value: null, + writable: true + }), _defineProperty(_Object$create, kError, { + value: null, + writable: true + }), _defineProperty(_Object$create, kEnded, { + value: stream._readableState.endEmitted, + writable: true + }), _defineProperty(_Object$create, kHandlePromise, { + value: function value(resolve, reject) { + var data = iterator[kStream].read(); + if (data) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(data, false)); + } else { + iterator[kLastResolve] = resolve; + iterator[kLastReject] = reject; + } + }, + writable: true + }), _Object$create)); + iterator[kLastPromise] = null; + finished(stream, function (err) { + if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { + var reject = iterator[kLastReject]; + // reject if we are waiting for data in the Promise + // returned by next() and store the error + if (reject !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + reject(err); + } + iterator[kError] = err; + return; + } + var resolve = iterator[kLastResolve]; + if (resolve !== null) { + iterator[kLastPromise] = null; + iterator[kLastResolve] = null; + iterator[kLastReject] = null; + resolve(createIterResult(undefined, true)); + } + iterator[kEnded] = true; + }); + stream.on('readable', onReadable.bind(null, iterator)); + return iterator; +}; +module.exports = createReadableStreamAsyncIterator; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/buffer_list.js b/node_modules/readable-stream/lib/internal/streams/buffer_list.js new file mode 100644 index 0000000..69bda49 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/buffer_list.js @@ -0,0 +1,183 @@ +'use strict'; + +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } +function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } +function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var _require = require('buffer'), + Buffer = _require.Buffer; +var _require2 = require('util'), + inspect = _require2.inspect; +var custom = inspect && inspect.custom || 'inspect'; +function copyBuffer(src, target, offset) { + Buffer.prototype.copy.call(src, target, offset); +} +module.exports = /*#__PURE__*/function () { + function BufferList() { + _classCallCheck(this, BufferList); + this.head = null; + this.tail = null; + this.length = 0; + } + _createClass(BufferList, [{ + key: "push", + value: function push(v) { + var entry = { + data: v, + next: null + }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + } + }, { + key: "unshift", + value: function unshift(v) { + var entry = { + data: v, + next: this.head + }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + } + }, { + key: "shift", + value: function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + } + }, { + key: "clear", + value: function clear() { + this.head = this.tail = null; + this.length = 0; + } + }, { + key: "join", + value: function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) ret += s + p.data; + return ret; + } + }, { + key: "concat", + value: function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + } + + // Consumes a specified amount of bytes or characters from the buffered data. + }, { + key: "consume", + value: function consume(n, hasStrings) { + var ret; + if (n < this.head.data.length) { + // `slice` is the same for buffers and strings. + ret = this.head.data.slice(0, n); + this.head.data = this.head.data.slice(n); + } else if (n === this.head.data.length) { + // First chunk is a perfect match. + ret = this.shift(); + } else { + // Result spans more than one buffer. + ret = hasStrings ? this._getString(n) : this._getBuffer(n); + } + return ret; + } + }, { + key: "first", + value: function first() { + return this.head.data; + } + + // Consumes a specified amount of characters from the buffered data. + }, { + key: "_getString", + value: function _getString(n) { + var p = this.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Consumes a specified amount of bytes from the buffered data. + }, { + key: "_getBuffer", + value: function _getBuffer(n) { + var ret = Buffer.allocUnsafe(n); + var p = this.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) this.head = p.next;else this.head = this.tail = null; + } else { + this.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + this.length -= c; + return ret; + } + + // Make sure the linked list only shows the minimal necessary information. + }, { + key: custom, + value: function value(_, options) { + return inspect(this, _objectSpread(_objectSpread({}, options), {}, { + // Only inspect one level. + depth: 0, + // It should not recurse. + customInspect: false + })); + } + }]); + return BufferList; +}(); \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000..31a17c4 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,96 @@ +'use strict'; + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + process.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + process.nextTick(emitErrorNT, this, err); + } + } + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + process.nextTick(emitErrorAndCloseNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + process.nextTick(emitErrorAndCloseNT, _this, err); + } else { + process.nextTick(emitCloseNT, _this); + } + } else if (cb) { + process.nextTick(emitCloseNT, _this); + cb(err); + } else { + process.nextTick(emitCloseNT, _this); + } + }); + return this; +} +function emitErrorAndCloseNT(self, err) { + emitErrorNT(self, err); + emitCloseNT(self); +} +function emitCloseNT(self) { + if (self._writableState && !self._writableState.emitClose) return; + if (self._readableState && !self._readableState.emitClose) return; + self.emit('close'); +} +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} +function emitErrorNT(self, err) { + self.emit('error', err); +} +function errorOrDestroy(stream, err) { + // We have tests that rely on errors being emitted + // in the same tick, so changing this is semver major. + // For now when you opt-in to autoDestroy we allow + // the error to be emitted nextTick. In a future + // semver major update we should change the default to this. + + var rState = stream._readableState; + var wState = stream._writableState; + if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); +} +module.exports = { + destroy: destroy, + undestroy: undestroy, + errorOrDestroy: errorOrDestroy +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/end-of-stream.js b/node_modules/readable-stream/lib/internal/streams/end-of-stream.js new file mode 100644 index 0000000..59c671b --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/end-of-stream.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/end-of-stream with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var ERR_STREAM_PREMATURE_CLOSE = require('../../../errors').codes.ERR_STREAM_PREMATURE_CLOSE; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { + args[_key] = arguments[_key]; + } + callback.apply(this, args); + }; +} +function noop() {} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function eos(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + callback = once(callback || noop); + var readable = opts.readable || opts.readable !== false && stream.readable; + var writable = opts.writable || opts.writable !== false && stream.writable; + var onlegacyfinish = function onlegacyfinish() { + if (!stream.writable) onfinish(); + }; + var writableEnded = stream._writableState && stream._writableState.finished; + var onfinish = function onfinish() { + writable = false; + writableEnded = true; + if (!readable) callback.call(stream); + }; + var readableEnded = stream._readableState && stream._readableState.endEmitted; + var onend = function onend() { + readable = false; + readableEnded = true; + if (!writable) callback.call(stream); + }; + var onerror = function onerror(err) { + callback.call(stream, err); + }; + var onclose = function onclose() { + var err; + if (readable && !readableEnded) { + if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + if (writable && !writableEnded) { + if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); + return callback.call(stream, err); + } + }; + var onrequest = function onrequest() { + stream.req.on('finish', onfinish); + }; + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest();else stream.on('request', onrequest); + } else if (writable && !stream._writableState) { + // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + return function () { + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +} +module.exports = eos; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/from-browser.js b/node_modules/readable-stream/lib/internal/streams/from-browser.js new file mode 100644 index 0000000..a4ce56f --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/from-browser.js @@ -0,0 +1,3 @@ +module.exports = function () { + throw new Error('Readable.from is not available in the browser') +}; diff --git a/node_modules/readable-stream/lib/internal/streams/from.js b/node_modules/readable-stream/lib/internal/streams/from.js new file mode 100644 index 0000000..0a34ee9 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/from.js @@ -0,0 +1,52 @@ +'use strict'; + +function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } +function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } +function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } +function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } +function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } +function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } +function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } +var ERR_INVALID_ARG_TYPE = require('../../../errors').codes.ERR_INVALID_ARG_TYPE; +function from(Readable, iterable, opts) { + var iterator; + if (iterable && typeof iterable.next === 'function') { + iterator = iterable; + } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); + var readable = new Readable(_objectSpread({ + objectMode: true + }, opts)); + // Reading boolean to protect against _read + // being called before last iteration completion. + var reading = false; + readable._read = function () { + if (!reading) { + reading = true; + next(); + } + }; + function next() { + return _next2.apply(this, arguments); + } + function _next2() { + _next2 = _asyncToGenerator(function* () { + try { + var _yield$iterator$next = yield iterator.next(), + value = _yield$iterator$next.value, + done = _yield$iterator$next.done; + if (done) { + readable.push(null); + } else if (readable.push(yield value)) { + next(); + } else { + reading = false; + } + } catch (err) { + readable.destroy(err); + } + }); + return _next2.apply(this, arguments); + } + return readable; +} +module.exports = from; diff --git a/node_modules/readable-stream/lib/internal/streams/pipeline.js b/node_modules/readable-stream/lib/internal/streams/pipeline.js new file mode 100644 index 0000000..e6f3924 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/pipeline.js @@ -0,0 +1,86 @@ +// Ported from https://github.com/mafintosh/pump with +// permission from the author, Mathias Buus (@mafintosh). + +'use strict'; + +var eos; +function once(callback) { + var called = false; + return function () { + if (called) return; + called = true; + callback.apply(void 0, arguments); + }; +} +var _require$codes = require('../../../errors').codes, + ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, + ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; +function noop(err) { + // Rethrow the error if it exists to avoid swallowing it + if (err) throw err; +} +function isRequest(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +} +function destroyer(stream, reading, writing, callback) { + callback = once(callback); + var closed = false; + stream.on('close', function () { + closed = true; + }); + if (eos === undefined) eos = require('./end-of-stream'); + eos(stream, { + readable: reading, + writable: writing + }, function (err) { + if (err) return callback(err); + closed = true; + callback(); + }); + var destroyed = false; + return function (err) { + if (closed) return; + if (destroyed) return; + destroyed = true; + + // request.destroy just do .end - .abort is what we want + if (isRequest(stream)) return stream.abort(); + if (typeof stream.destroy === 'function') return stream.destroy(); + callback(err || new ERR_STREAM_DESTROYED('pipe')); + }; +} +function call(fn) { + fn(); +} +function pipe(from, to) { + return from.pipe(to); +} +function popCallback(streams) { + if (!streams.length) return noop; + if (typeof streams[streams.length - 1] !== 'function') return noop; + return streams.pop(); +} +function pipeline() { + for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { + streams[_key] = arguments[_key]; + } + var callback = popCallback(streams); + if (Array.isArray(streams[0])) streams = streams[0]; + if (streams.length < 2) { + throw new ERR_MISSING_ARGS('streams'); + } + var error; + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1; + var writing = i > 0; + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err; + if (err) destroys.forEach(call); + if (reading) return; + destroys.forEach(call); + callback(error); + }); + }); + return streams.reduce(pipe); +} +module.exports = pipeline; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/state.js b/node_modules/readable-stream/lib/internal/streams/state.js new file mode 100644 index 0000000..3fbf892 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/state.js @@ -0,0 +1,22 @@ +'use strict'; + +var ERR_INVALID_OPT_VALUE = require('../../../errors').codes.ERR_INVALID_OPT_VALUE; +function highWaterMarkFrom(options, isDuplex, duplexKey) { + return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; +} +function getHighWaterMark(state, options, duplexKey, isDuplex) { + var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); + if (hwm != null) { + if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { + var name = isDuplex ? duplexKey : 'highWaterMark'; + throw new ERR_INVALID_OPT_VALUE(name, hwm); + } + return Math.floor(hwm); + } + + // Default value + return state.objectMode ? 16 : 16 * 1024; +} +module.exports = { + getHighWaterMark: getHighWaterMark +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000..9332a3f --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000..ce2ad5b --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/readable-stream/package.json b/node_modules/readable-stream/package.json new file mode 100644 index 0000000..ade59e7 --- /dev/null +++ b/node_modules/readable-stream/package.json @@ -0,0 +1,68 @@ +{ + "name": "readable-stream", + "version": "3.6.2", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "engines": { + "node": ">= 6" + }, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "devDependencies": { + "@babel/cli": "^7.2.0", + "@babel/core": "^7.2.0", + "@babel/polyfill": "^7.0.0", + "@babel/preset-env": "^7.2.0", + "airtap": "0.0.9", + "assert": "^1.4.0", + "bl": "^2.0.0", + "deep-strict-equal": "^0.2.0", + "events.once": "^2.0.2", + "glob": "^7.1.2", + "gunzip-maybe": "^1.4.1", + "hyperquest": "^2.1.3", + "lolex": "^2.6.0", + "nyc": "^11.0.0", + "pump": "^3.0.0", + "rimraf": "^2.6.2", + "tap": "^12.0.0", + "tape": "^4.9.0", + "tar-fs": "^1.16.2", + "util-promisify": "^2.1.0" + }, + "scripts": { + "test": "tap -J --no-esm test/parallel/*.js test/ours/*.js", + "ci": "TAP=1 tap --no-esm test/parallel/*.js test/ours/*.js | tee test.tap", + "test-browsers": "airtap --sauce-connect --loopback airtap.local -- test/browser.js", + "test-browser-local": "airtap --open --local -- test/browser.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov", + "update-browser-errors": "babel -o errors-browser.js errors.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "worker_threads": false, + "./errors": "./errors-browser.js", + "./readable.js": "./readable-browser.js", + "./lib/internal/streams/from.js": "./lib/internal/streams/from-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/node_modules/readable-stream/readable-browser.js b/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000..adbf60d --- /dev/null +++ b/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,9 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); +exports.finished = require('./lib/internal/streams/end-of-stream.js'); +exports.pipeline = require('./lib/internal/streams/pipeline.js'); diff --git a/node_modules/readable-stream/readable.js b/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..9e0ca12 --- /dev/null +++ b/node_modules/readable-stream/readable.js @@ -0,0 +1,16 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream.Readable; + Object.assign(module.exports, Stream); + module.exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); + exports.finished = require('./lib/internal/streams/end-of-stream.js'); + exports.pipeline = require('./lib/internal/streams/pipeline.js'); +} diff --git a/node_modules/redis/LICENSE b/node_modules/redis/LICENSE new file mode 100644 index 0000000..8509ccd --- /dev/null +++ b/node_modules/redis/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022-2023, Redis, inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/redis/README.md b/node_modules/redis/README.md new file mode 100644 index 0000000..a590372 --- /dev/null +++ b/node_modules/redis/README.md @@ -0,0 +1,384 @@ +# Node-Redis + +[![Tests](https://img.shields.io/github/actions/workflow/status/redis/node-redis/tests.yml?branch=master)](https://github.com/redis/node-redis/actions/workflows/tests.yml) +[![Coverage](https://codecov.io/gh/redis/node-redis/branch/master/graph/badge.svg?token=xcfqHhJC37)](https://codecov.io/gh/redis/node-redis) +[![License](https://img.shields.io/github/license/redis/node-redis.svg)](https://github.com/redis/node-redis/blob/master/LICENSE) + +[![Discord](https://img.shields.io/discord/697882427875393627.svg?style=social&logo=discord)](https://discord.gg/redis) +[![Twitch](https://img.shields.io/twitch/status/redisinc?style=social)](https://www.twitch.tv/redisinc) +[![YouTube](https://img.shields.io/youtube/channel/views/UCD78lHSwYqMlyetR0_P4Vig?style=social)](https://www.youtube.com/redisinc) +[![Twitter](https://img.shields.io/twitter/follow/redisinc?style=social)](https://twitter.com/redisinc) + +node-redis is a modern, high performance [Redis](https://redis.io) client for Node.js. + +## How do I Redis? + +[Learn for free at Redis University](https://university.redis.com/) + +[Build faster with the Redis Launchpad](https://launchpad.redis.com/) + +[Try the Redis Cloud](https://redis.com/try-free/) + +[Dive in developer tutorials](https://developer.redis.com/) + +[Join the Redis community](https://redis.com/community/) + +[Work at Redis](https://redis.com/company/careers/jobs/) + +## Packages + +| Name | Description | +|----------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| [redis](./) | [![Downloads](https://img.shields.io/npm/dm/redis.svg)](https://www.npmjs.com/package/redis) [![Version](https://img.shields.io/npm/v/redis.svg)](https://www.npmjs.com/package/redis) | +| [@redis/client](./packages/client) | [![Downloads](https://img.shields.io/npm/dm/@redis/client.svg)](https://www.npmjs.com/package/@redis/client) [![Version](https://img.shields.io/npm/v/@redis/client.svg)](https://www.npmjs.com/package/@redis/client) [![Docs](https://img.shields.io/badge/-documentation-dc382c)](https://redis.js.org/documentation/client/) | +| [@redis/bloom](./packages/bloom) | [![Downloads](https://img.shields.io/npm/dm/@redis/bloom.svg)](https://www.npmjs.com/package/@redis/bloom) [![Version](https://img.shields.io/npm/v/@redis/bloom.svg)](https://www.npmjs.com/package/@redis/bloom) [![Docs](https://img.shields.io/badge/-documentation-dc382c)](https://redis.js.org/documentation/bloom/) [Redis Bloom](https://oss.redis.com/redisbloom/) commands | +| [@redis/graph](./packages/graph) | [![Downloads](https://img.shields.io/npm/dm/@redis/graph.svg)](https://www.npmjs.com/package/@redis/graph) [![Version](https://img.shields.io/npm/v/@redis/graph.svg)](https://www.npmjs.com/package/@redis/graph) [![Docs](https://img.shields.io/badge/-documentation-dc382c)](https://redis.js.org/documentation/graph/) [Redis Graph](https://oss.redis.com/redisgraph/) commands | +| [@redis/json](./packages/json) | [![Downloads](https://img.shields.io/npm/dm/@redis/json.svg)](https://www.npmjs.com/package/@redis/json) [![Version](https://img.shields.io/npm/v/@redis/json.svg)](https://www.npmjs.com/package/@redis/json) [![Docs](https://img.shields.io/badge/-documentation-dc382c)](https://redis.js.org/documentation/json/) [Redis JSON](https://oss.redis.com/redisjson/) commands | +| [@redis/search](./packages/search) | [![Downloads](https://img.shields.io/npm/dm/@redis/search.svg)](https://www.npmjs.com/package/@redis/search) [![Version](https://img.shields.io/npm/v/@redis/search.svg)](https://www.npmjs.com/package/@redis/search) [![Docs](https://img.shields.io/badge/-documentation-dc382c)](https://redis.js.org/documentation/search/) [RediSearch](https://oss.redis.com/redisearch/) commands | +| [@redis/time-series](./packages/time-series) | [![Downloads](https://img.shields.io/npm/dm/@redis/time-series.svg)](https://www.npmjs.com/package/@redis/time-series) [![Version](https://img.shields.io/npm/v/@redis/time-series.svg)](https://www.npmjs.com/package/@redis/time-series) [![Docs](https://img.shields.io/badge/-documentation-dc382c)](https://redis.js.org/documentation/time-series/) [Redis Time-Series](https://oss.redis.com/redistimeseries/) commands | + +> :warning: In version 4.1.0 we moved our subpackages from `@node-redis` to `@redis`. If you're just using `npm install redis`, you don't need to do anything—it'll upgrade automatically. If you're using the subpackages directly, you'll need to point to the new scope (e.g. `@redis/client` instead of `@node-redis/client`). + +## Installation + +Start a redis via docker: + +``` bash +docker run -p 6379:6379 -it redis/redis-stack-server:latest +``` + +To install node-redis, simply: + +```bash +npm install redis +``` + +> :warning: The new interface is clean and cool, but if you have an existing codebase, you'll want to read the [migration guide](./docs/v3-to-v4.md). + +Looking for a high-level library to handle object mapping? See [redis-om-node](https://github.com/redis/redis-om-node)! + +## Usage + +### Basic Example + +```typescript +import { createClient } from 'redis'; + +const client = await createClient() + .on('error', err => console.log('Redis Client Error', err)) + .connect(); + +await client.set('key', 'value'); +const value = await client.get('key'); +await client.disconnect(); +``` + +The above code connects to localhost on port 6379. To connect to a different host or port, use a connection string in the format `redis[s]://[[username][:password]@][host][:port][/db-number]`: + +```typescript +createClient({ + url: 'redis://alice:foobared@awesome.redis.server:6380' +}); +``` + +You can also use discrete parameters, UNIX sockets, and even TLS to connect. Details can be found in the [client configuration guide](./docs/client-configuration.md). + +To check if the the client is connected and ready to send commands, use `client.isReady` which returns a boolean. `client.isOpen` is also available. This returns `true` when the client's underlying socket is open, and `false` when it isn't (for example when the client is still connecting or reconnecting after a network error). + +### Redis Commands + +There is built-in support for all of the [out-of-the-box Redis commands](https://redis.io/commands). They are exposed using the raw Redis command names (`HSET`, `HGETALL`, etc.) and a friendlier camel-cased version (`hSet`, `hGetAll`, etc.): + +```typescript +// raw Redis commands +await client.HSET('key', 'field', 'value'); +await client.HGETALL('key'); + +// friendly JavaScript commands +await client.hSet('key', 'field', 'value'); +await client.hGetAll('key'); +``` + +Modifiers to commands are specified using a JavaScript object: + +```typescript +await client.set('key', 'value', { + EX: 10, + NX: true +}); +``` + +Replies will be transformed into useful data structures: + +```typescript +await client.hGetAll('key'); // { field1: 'value1', field2: 'value2' } +await client.hVals('key'); // ['value1', 'value2'] +``` + +`Buffer`s are supported as well: + +```typescript +await client.hSet('key', 'field', Buffer.from('value')); // 'OK' +await client.hGetAll( + commandOptions({ returnBuffers: true }), + 'key' +); // { field: } +``` + +### Unsupported Redis Commands + +If you want to run commands and/or use arguments that Node Redis doesn't know about (yet!) use `.sendCommand()`: + +```typescript +await client.sendCommand(['SET', 'key', 'value', 'NX']); // 'OK' + +await client.sendCommand(['HGETALL', 'key']); // ['key1', 'field1', 'key2', 'field2'] +``` + +### Transactions (Multi/Exec) + +Start a [transaction](https://redis.io/topics/transactions) by calling `.multi()`, then chaining your commands. When you're done, call `.exec()` and you'll get an array back with your results: + +```typescript +await client.set('another-key', 'another-value'); + +const [setKeyReply, otherKeyValue] = await client + .multi() + .set('key', 'value') + .get('another-key') + .exec(); // ['OK', 'another-value'] +``` + +You can also [watch](https://redis.io/topics/transactions#optimistic-locking-using-check-and-set) keys by calling `.watch()`. Your transaction will abort if any of the watched keys change. + +To dig deeper into transactions, check out the [Isolated Execution Guide](./docs/isolated-execution.md). + +### Blocking Commands + +Any command can be run on a new connection by specifying the `isolated` option. The newly created connection is closed when the command's `Promise` is fulfilled. + +This pattern works especially well for blocking commands—such as `BLPOP` and `BLMOVE`: + +```typescript +import { commandOptions } from 'redis'; + +const blPopPromise = client.blPop( + commandOptions({ isolated: true }), + 'key', + 0 +); + +await client.lPush('key', ['1', '2']); + +await blPopPromise; // '2' +``` + +To learn more about isolated execution, check out the [guide](./docs/isolated-execution.md). + +### Pub/Sub + +See the [Pub/Sub overview](./docs/pub-sub.md). + +### Scan Iterator + +[`SCAN`](https://redis.io/commands/scan) results can be looped over using [async iterators](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Symbol/asyncIterator): + +```typescript +for await (const key of client.scanIterator()) { + // use the key! + await client.get(key); +} +``` + +This works with `HSCAN`, `SSCAN`, and `ZSCAN` too: + +```typescript +for await (const { field, value } of client.hScanIterator('hash')) {} +for await (const member of client.sScanIterator('set')) {} +for await (const { score, value } of client.zScanIterator('sorted-set')) {} +``` + +You can override the default options by providing a configuration object: + +```typescript +client.scanIterator({ + TYPE: 'string', // `SCAN` only + MATCH: 'patter*', + COUNT: 100 +}); +``` + +### [Programmability](https://redis.io/docs/manual/programmability/) + +Redis provides a programming interface allowing code execution on the redis server. + +#### [Functions](https://redis.io/docs/manual/programmability/functions-intro/) + +The following example retrieves a key in redis, returning the value of the key, incremented by an integer. For example, if your key _foo_ has the value _17_ and we run `add('foo', 25)`, it returns the answer to Life, the Universe and Everything. + +```lua +#!lua name=library + +redis.register_function { + function_name = 'add', + callback = function(keys, args) return redis.call('GET', keys[1]) + args[1] end, + flags = { 'no-writes' } +} +``` + +Here is the same example, but in a format that can be pasted into the `redis-cli`. + +``` +FUNCTION LOAD "#!lua name=library\nredis.register_function{function_name=\"add\", callback=function(keys, args) return redis.call('GET', keys[1])+args[1] end, flags={\"no-writes\"}}" +``` + +Load the prior redis function on the _redis server_ before running the example below. + +```typescript +import { createClient } from 'redis'; + +const client = createClient({ + functions: { + library: { + add: { + NUMBER_OF_KEYS: 1, + transformArguments(key: string, toAdd: number): Array { + return [key, toAdd.toString()]; + }, + transformReply(reply: number): number { + return reply; + } + } + } + } +}); + +await client.connect(); + +await client.set('key', '1'); +await client.library.add('key', 2); // 3 +``` + +#### [Lua Scripts](https://redis.io/docs/manual/programmability/eval-intro/) + +The following is an end-to-end example of the prior concept. + +```typescript +import { createClient, defineScript } from 'redis'; + +const client = createClient({ + scripts: { + add: defineScript({ + NUMBER_OF_KEYS: 1, + SCRIPT: + 'return redis.call("GET", KEYS[1]) + ARGV[1];', + transformArguments(key: string, toAdd: number): Array { + return [key, toAdd.toString()]; + }, + transformReply(reply: number): number { + return reply; + } + }) + } +}); + +await client.connect(); + +await client.set('key', '1'); +await client.add('key', 2); // 3 +``` + +### Disconnecting + +There are two functions that disconnect a client from the Redis server. In most scenarios you should use `.quit()` to ensure that pending commands are sent to Redis before closing a connection. + +#### `.QUIT()`/`.quit()` + +Gracefully close a client's connection to Redis, by sending the [`QUIT`](https://redis.io/commands/quit) command to the server. Before quitting, the client executes any remaining commands in its queue, and will receive replies from Redis for each of them. + +```typescript +const [ping, get, quit] = await Promise.all([ + client.ping(), + client.get('key'), + client.quit() +]); // ['PONG', null, 'OK'] + +try { + await client.get('key'); +} catch (err) { + // ClosedClient Error +} +``` + +#### `.disconnect()` + +Forcibly close a client's connection to Redis immediately. Calling `disconnect` will not send further pending commands to the Redis server, or wait for or parse outstanding responses. + +```typescript +await client.disconnect(); +``` + +### Auto-Pipelining + +Node Redis will automatically pipeline requests that are made during the same "tick". + +```typescript +client.set('Tm9kZSBSZWRpcw==', 'users:1'); +client.sAdd('users:1:tokens', 'Tm9kZSBSZWRpcw=='); +``` + +Of course, if you don't do something with your Promises you're certain to get [unhandled Promise exceptions](https://nodejs.org/api/process.html#process_event_unhandledrejection). To take advantage of auto-pipelining and handle your Promises, use `Promise.all()`. + +```typescript +await Promise.all([ + client.set('Tm9kZSBSZWRpcw==', 'users:1'), + client.sAdd('users:1:tokens', 'Tm9kZSBSZWRpcw==') +]); +``` + +### Clustering + +Check out the [Clustering Guide](./docs/clustering.md) when using Node Redis to connect to a Redis Cluster. + +### Events + +The Node Redis client class is an Nodejs EventEmitter and it emits an event each time the network status changes: + +| Name | When | Listener arguments | +|-------------------------|------------------------------------------------------------------------------------|------------------------------------------------------------| +| `connect` | Initiating a connection to the server | *No arguments* | +| `ready` | Client is ready to use | *No arguments* | +| `end` | Connection has been closed (via `.quit()` or `.disconnect()`) | *No arguments* | +| `error` | An error has occurred—usually a network issue such as "Socket closed unexpectedly" | `(error: Error)` | +| `reconnecting` | Client is trying to reconnect to the server | *No arguments* | +| `sharded-channel-moved` | See [here](./docs/pub-sub.md#sharded-channel-moved-event) | See [here](./docs/pub-sub.md#sharded-channel-moved-event) | + +> :warning: You **MUST** listen to `error` events. If a client doesn't have at least one `error` listener registered and an `error` occurs, that error will be thrown and the Node.js process will exit. See the [`EventEmitter` docs](https://nodejs.org/api/events.html#events_error_events) for more details. + +> The client will not emit [any other events](./docs/v3-to-v4.md#all-the-removed-events) beyond those listed above. + +## Supported Redis versions + +Node Redis is supported with the following versions of Redis: + +| Version | Supported | +|---------|--------------------| +| 7.0.z | :heavy_check_mark: | +| 6.2.z | :heavy_check_mark: | +| 6.0.z | :heavy_check_mark: | +| 5.0.z | :heavy_check_mark: | +| < 5.0 | :x: | + +> Node Redis should work with older versions of Redis, but it is not fully tested and we cannot offer support. + +## Contributing + +If you'd like to contribute, check out the [contributing guide](CONTRIBUTING.md). + +Thank you to all the people who already contributed to Node Redis! + +[![Contributors](https://contrib.rocks/image?repo=redis/node-redis)](https://github.com/redis/node-redis/graphs/contributors) + +## License + +This repository is licensed under the "MIT" license. See [LICENSE](LICENSE). diff --git a/node_modules/redis/dist/index.d.ts b/node_modules/redis/dist/index.d.ts new file mode 100644 index 0000000..ef004a3 --- /dev/null +++ b/node_modules/redis/dist/index.d.ts @@ -0,0 +1,302 @@ +import { RedisModules, RedisFunctions, RedisScripts, RedisClientOptions, RedisClientType as _RedisClientType, RedisClusterOptions, RedisClusterType as _RedisClusterType } from '@redis/client'; +export * from '@redis/client'; +export * from '@redis/bloom'; +export * from '@redis/graph'; +export * from '@redis/json'; +export * from '@redis/search'; +export * from '@redis/time-series'; +declare const modules: { + graph: { + CONFIG_GET: typeof import("@redis/graph/dist/commands/CONFIG_GET"); + configGet: typeof import("@redis/graph/dist/commands/CONFIG_GET"); + CONFIG_SET: typeof import("@redis/graph/dist/commands/CONFIG_SET"); + configSet: typeof import("@redis/graph/dist/commands/CONFIG_SET"); + DELETE: typeof import("@redis/graph/dist/commands/DELETE"); + delete: typeof import("@redis/graph/dist/commands/DELETE"); + EXPLAIN: typeof import("@redis/graph/dist/commands/EXPLAIN"); + explain: typeof import("@redis/graph/dist/commands/EXPLAIN"); + LIST: typeof import("@redis/graph/dist/commands/LIST"); + list: typeof import("@redis/graph/dist/commands/LIST"); + PROFILE: typeof import("@redis/graph/dist/commands/PROFILE"); + profile: typeof import("@redis/graph/dist/commands/PROFILE"); + QUERY: typeof import("@redis/graph/dist/commands/QUERY"); + query: typeof import("@redis/graph/dist/commands/QUERY"); + RO_QUERY: typeof import("@redis/graph/dist/commands/RO_QUERY"); + roQuery: typeof import("@redis/graph/dist/commands/RO_QUERY"); + SLOWLOG: typeof import("@redis/graph/dist/commands/SLOWLOG"); + slowLog: typeof import("@redis/graph/dist/commands/SLOWLOG"); + }; + json: { + ARRAPPEND: typeof import("@redis/json/dist/commands/ARRAPPEND"); + arrAppend: typeof import("@redis/json/dist/commands/ARRAPPEND"); + ARRINDEX: typeof import("@redis/json/dist/commands/ARRINDEX"); + arrIndex: typeof import("@redis/json/dist/commands/ARRINDEX"); + ARRINSERT: typeof import("@redis/json/dist/commands/ARRINSERT"); + arrInsert: typeof import("@redis/json/dist/commands/ARRINSERT"); + ARRLEN: typeof import("@redis/json/dist/commands/ARRLEN"); + arrLen: typeof import("@redis/json/dist/commands/ARRLEN"); + ARRPOP: typeof import("@redis/json/dist/commands/ARRPOP"); + arrPop: typeof import("@redis/json/dist/commands/ARRPOP"); + ARRTRIM: typeof import("@redis/json/dist/commands/ARRTRIM"); + arrTrim: typeof import("@redis/json/dist/commands/ARRTRIM"); + DEBUG_MEMORY: typeof import("@redis/json/dist/commands/DEBUG_MEMORY"); + debugMemory: typeof import("@redis/json/dist/commands/DEBUG_MEMORY"); + DEL: typeof import("@redis/json/dist/commands/DEL"); + del: typeof import("@redis/json/dist/commands/DEL"); + FORGET: typeof import("@redis/json/dist/commands/FORGET"); + forget: typeof import("@redis/json/dist/commands/FORGET"); + GET: typeof import("@redis/json/dist/commands/GET"); + get: typeof import("@redis/json/dist/commands/GET"); + MERGE: typeof import("@redis/json/dist/commands/MERGE"); + merge: typeof import("@redis/json/dist/commands/MERGE"); + MGET: typeof import("@redis/json/dist/commands/MGET"); + mGet: typeof import("@redis/json/dist/commands/MGET"); + MSET: typeof import("@redis/json/dist/commands/MSET"); + mSet: typeof import("@redis/json/dist/commands/MSET"); + NUMINCRBY: typeof import("@redis/json/dist/commands/NUMINCRBY"); + numIncrBy: typeof import("@redis/json/dist/commands/NUMINCRBY"); + NUMMULTBY: typeof import("@redis/json/dist/commands/NUMMULTBY"); + numMultBy: typeof import("@redis/json/dist/commands/NUMMULTBY"); + OBJKEYS: typeof import("@redis/json/dist/commands/OBJKEYS"); + objKeys: typeof import("@redis/json/dist/commands/OBJKEYS"); + OBJLEN: typeof import("@redis/json/dist/commands/OBJLEN"); + objLen: typeof import("@redis/json/dist/commands/OBJLEN"); + RESP: typeof import("@redis/json/dist/commands/RESP"); + resp: typeof import("@redis/json/dist/commands/RESP"); + SET: typeof import("@redis/json/dist/commands/SET"); + set: typeof import("@redis/json/dist/commands/SET"); + STRAPPEND: typeof import("@redis/json/dist/commands/STRAPPEND"); + strAppend: typeof import("@redis/json/dist/commands/STRAPPEND"); + STRLEN: typeof import("@redis/json/dist/commands/STRLEN"); + strLen: typeof import("@redis/json/dist/commands/STRLEN"); + TYPE: typeof import("@redis/json/dist/commands/TYPE"); + type: typeof import("@redis/json/dist/commands/TYPE"); + }; + ft: { + _LIST: typeof import("@redis/search/dist/commands/_LIST"); + _list: typeof import("@redis/search/dist/commands/_LIST"); + ALTER: typeof import("@redis/search/dist/commands/ALTER"); + alter: typeof import("@redis/search/dist/commands/ALTER"); + AGGREGATE_WITHCURSOR: typeof import("@redis/search/dist/commands/AGGREGATE_WITHCURSOR"); + aggregateWithCursor: typeof import("@redis/search/dist/commands/AGGREGATE_WITHCURSOR"); + AGGREGATE: typeof import("@redis/search/dist/commands/AGGREGATE"); + aggregate: typeof import("@redis/search/dist/commands/AGGREGATE"); + ALIASADD: typeof import("@redis/search/dist/commands/ALIASADD"); + aliasAdd: typeof import("@redis/search/dist/commands/ALIASADD"); + ALIASDEL: typeof import("@redis/search/dist/commands/ALIASDEL"); + aliasDel: typeof import("@redis/search/dist/commands/ALIASDEL"); + ALIASUPDATE: typeof import("@redis/search/dist/commands/ALIASUPDATE"); + aliasUpdate: typeof import("@redis/search/dist/commands/ALIASUPDATE"); + CONFIG_GET: typeof import("@redis/search/dist/commands/CONFIG_GET"); + configGet: typeof import("@redis/search/dist/commands/CONFIG_GET"); + CONFIG_SET: typeof import("@redis/search/dist/commands/CONFIG_SET"); + configSet: typeof import("@redis/search/dist/commands/CONFIG_SET"); + CREATE: typeof import("@redis/search/dist/commands/CREATE"); + create: typeof import("@redis/search/dist/commands/CREATE"); + CURSOR_DEL: typeof import("@redis/search/dist/commands/CURSOR_DEL"); + cursorDel: typeof import("@redis/search/dist/commands/CURSOR_DEL"); + CURSOR_READ: typeof import("@redis/search/dist/commands/CURSOR_READ"); + cursorRead: typeof import("@redis/search/dist/commands/CURSOR_READ"); + DICTADD: typeof import("@redis/search/dist/commands/DICTADD"); + dictAdd: typeof import("@redis/search/dist/commands/DICTADD"); + DICTDEL: typeof import("@redis/search/dist/commands/DICTDEL"); + dictDel: typeof import("@redis/search/dist/commands/DICTDEL"); + DICTDUMP: typeof import("@redis/search/dist/commands/DICTDUMP"); + dictDump: typeof import("@redis/search/dist/commands/DICTDUMP"); + DROPINDEX: typeof import("@redis/search/dist/commands/DROPINDEX"); + dropIndex: typeof import("@redis/search/dist/commands/DROPINDEX"); + EXPLAIN: typeof import("@redis/search/dist/commands/EXPLAIN"); + explain: typeof import("@redis/search/dist/commands/EXPLAIN"); + EXPLAINCLI: typeof import("@redis/search/dist/commands/EXPLAINCLI"); + explainCli: typeof import("@redis/search/dist/commands/EXPLAINCLI"); + INFO: typeof import("@redis/search/dist/commands/INFO"); + info: typeof import("@redis/search/dist/commands/INFO"); + PROFILESEARCH: typeof import("@redis/search/dist/commands/PROFILE_SEARCH"); + profileSearch: typeof import("@redis/search/dist/commands/PROFILE_SEARCH"); + PROFILEAGGREGATE: typeof import("@redis/search/dist/commands/PROFILE_AGGREGATE"); + profileAggregate: typeof import("@redis/search/dist/commands/PROFILE_AGGREGATE"); + SEARCH: typeof import("@redis/search/dist/commands/SEARCH"); + search: typeof import("@redis/search/dist/commands/SEARCH"); + SEARCH_NOCONTENT: typeof import("@redis/search/dist/commands/SEARCH_NOCONTENT"); + searchNoContent: typeof import("@redis/search/dist/commands/SEARCH_NOCONTENT"); + SPELLCHECK: typeof import("@redis/search/dist/commands/SPELLCHECK"); + spellCheck: typeof import("@redis/search/dist/commands/SPELLCHECK"); + SUGADD: typeof import("@redis/search/dist/commands/SUGADD"); + sugAdd: typeof import("@redis/search/dist/commands/SUGADD"); + SUGDEL: typeof import("@redis/search/dist/commands/SUGDEL"); + sugDel: typeof import("@redis/search/dist/commands/SUGDEL"); + SUGGET_WITHPAYLOADS: typeof import("@redis/search/dist/commands/SUGGET_WITHPAYLOADS"); + sugGetWithPayloads: typeof import("@redis/search/dist/commands/SUGGET_WITHPAYLOADS"); + SUGGET_WITHSCORES_WITHPAYLOADS: typeof import("@redis/search/dist/commands/SUGGET_WITHSCORES_WITHPAYLOADS"); + sugGetWithScoresWithPayloads: typeof import("@redis/search/dist/commands/SUGGET_WITHSCORES_WITHPAYLOADS"); + SUGGET_WITHSCORES: typeof import("@redis/search/dist/commands/SUGGET_WITHSCORES"); + sugGetWithScores: typeof import("@redis/search/dist/commands/SUGGET_WITHSCORES"); + SUGGET: typeof import("@redis/search/dist/commands/SUGGET"); + sugGet: typeof import("@redis/search/dist/commands/SUGGET"); + SUGLEN: typeof import("@redis/search/dist/commands/SUGLEN"); + sugLen: typeof import("@redis/search/dist/commands/SUGLEN"); + SYNDUMP: typeof import("@redis/search/dist/commands/SYNDUMP"); + synDump: typeof import("@redis/search/dist/commands/SYNDUMP"); + SYNUPDATE: typeof import("@redis/search/dist/commands/SYNUPDATE"); + synUpdate: typeof import("@redis/search/dist/commands/SYNUPDATE"); + TAGVALS: typeof import("@redis/search/dist/commands/TAGVALS"); + tagVals: typeof import("@redis/search/dist/commands/TAGVALS"); + }; + ts: { + ADD: typeof import("@redis/time-series/dist/commands/ADD"); + add: typeof import("@redis/time-series/dist/commands/ADD"); + ALTER: typeof import("@redis/time-series/dist/commands/ALTER"); + alter: typeof import("@redis/time-series/dist/commands/ALTER"); + CREATE: typeof import("@redis/time-series/dist/commands/CREATE"); + create: typeof import("@redis/time-series/dist/commands/CREATE"); + CREATERULE: typeof import("@redis/time-series/dist/commands/CREATERULE"); + createRule: typeof import("@redis/time-series/dist/commands/CREATERULE"); + DECRBY: typeof import("@redis/time-series/dist/commands/DECRBY"); + decrBy: typeof import("@redis/time-series/dist/commands/DECRBY"); + DEL: typeof import("@redis/time-series/dist/commands/DEL"); + del: typeof import("@redis/time-series/dist/commands/DEL"); + DELETERULE: typeof import("@redis/time-series/dist/commands/DELETERULE"); + deleteRule: typeof import("@redis/time-series/dist/commands/DELETERULE"); + GET: typeof import("@redis/time-series/dist/commands/GET"); + get: typeof import("@redis/time-series/dist/commands/GET"); + INCRBY: typeof import("@redis/time-series/dist/commands/INCRBY"); + incrBy: typeof import("@redis/time-series/dist/commands/INCRBY"); + INFO_DEBUG: typeof import("@redis/time-series/dist/commands/INFO_DEBUG"); + infoDebug: typeof import("@redis/time-series/dist/commands/INFO_DEBUG"); + INFO: typeof import("@redis/time-series/dist/commands/INFO"); + info: typeof import("@redis/time-series/dist/commands/INFO"); + MADD: typeof import("@redis/time-series/dist/commands/MADD"); + mAdd: typeof import("@redis/time-series/dist/commands/MADD"); + MGET: typeof import("@redis/time-series/dist/commands/MGET"); + mGet: typeof import("@redis/time-series/dist/commands/MGET"); + MGET_WITHLABELS: typeof import("@redis/time-series/dist/commands/MGET_WITHLABELS"); + mGetWithLabels: typeof import("@redis/time-series/dist/commands/MGET_WITHLABELS"); + QUERYINDEX: typeof import("@redis/time-series/dist/commands/QUERYINDEX"); + queryIndex: typeof import("@redis/time-series/dist/commands/QUERYINDEX"); + RANGE: typeof import("@redis/time-series/dist/commands/RANGE"); + range: typeof import("@redis/time-series/dist/commands/RANGE"); + REVRANGE: typeof import("@redis/time-series/dist/commands/REVRANGE"); + revRange: typeof import("@redis/time-series/dist/commands/REVRANGE"); + MRANGE: typeof import("@redis/time-series/dist/commands/MRANGE"); + mRange: typeof import("@redis/time-series/dist/commands/MRANGE"); + MRANGE_WITHLABELS: typeof import("@redis/time-series/dist/commands/MRANGE_WITHLABELS"); + mRangeWithLabels: typeof import("@redis/time-series/dist/commands/MRANGE_WITHLABELS"); + MREVRANGE: typeof import("@redis/time-series/dist/commands/MREVRANGE"); + mRevRange: typeof import("@redis/time-series/dist/commands/MREVRANGE"); + MREVRANGE_WITHLABELS: typeof import("@redis/time-series/dist/commands/MREVRANGE_WITHLABELS"); + mRevRangeWithLabels: typeof import("@redis/time-series/dist/commands/MREVRANGE_WITHLABELS"); + }; + bf: { + ADD: typeof import("@redis/bloom/dist/commands/bloom/ADD"); + add: typeof import("@redis/bloom/dist/commands/bloom/ADD"); + CARD: typeof import("@redis/bloom/dist/commands/bloom/CARD"); + card: typeof import("@redis/bloom/dist/commands/bloom/CARD"); + EXISTS: typeof import("@redis/bloom/dist/commands/bloom/EXISTS"); + exists: typeof import("@redis/bloom/dist/commands/bloom/EXISTS"); + INFO: typeof import("@redis/bloom/dist/commands/bloom/INFO"); + info: typeof import("@redis/bloom/dist/commands/bloom/INFO"); + INSERT: typeof import("@redis/bloom/dist/commands/bloom/INSERT"); + insert: typeof import("@redis/bloom/dist/commands/bloom/INSERT"); + LOADCHUNK: typeof import("@redis/bloom/dist/commands/bloom/LOADCHUNK"); + loadChunk: typeof import("@redis/bloom/dist/commands/bloom/LOADCHUNK"); + MADD: typeof import("@redis/bloom/dist/commands/bloom/MADD"); + mAdd: typeof import("@redis/bloom/dist/commands/bloom/MADD"); + MEXISTS: typeof import("@redis/bloom/dist/commands/bloom/MEXISTS"); + mExists: typeof import("@redis/bloom/dist/commands/bloom/MEXISTS"); + RESERVE: typeof import("@redis/bloom/dist/commands/bloom/RESERVE"); + reserve: typeof import("@redis/bloom/dist/commands/bloom/RESERVE"); + SCANDUMP: typeof import("@redis/bloom/dist/commands/bloom/SCANDUMP"); + scanDump: typeof import("@redis/bloom/dist/commands/bloom/SCANDUMP"); + }; + cms: { + INCRBY: typeof import("@redis/bloom/dist/commands/count-min-sketch/INCRBY"); + incrBy: typeof import("@redis/bloom/dist/commands/count-min-sketch/INCRBY"); + INFO: typeof import("@redis/bloom/dist/commands/count-min-sketch/INFO"); + info: typeof import("@redis/bloom/dist/commands/count-min-sketch/INFO"); + INITBYDIM: typeof import("@redis/bloom/dist/commands/count-min-sketch/INITBYDIM"); + initByDim: typeof import("@redis/bloom/dist/commands/count-min-sketch/INITBYDIM"); + INITBYPROB: typeof import("@redis/bloom/dist/commands/count-min-sketch/INITBYPROB"); + initByProb: typeof import("@redis/bloom/dist/commands/count-min-sketch/INITBYPROB"); + MERGE: typeof import("@redis/bloom/dist/commands/count-min-sketch/MERGE"); + merge: typeof import("@redis/bloom/dist/commands/count-min-sketch/MERGE"); + QUERY: typeof import("@redis/bloom/dist/commands/count-min-sketch/QUERY"); + query: typeof import("@redis/bloom/dist/commands/count-min-sketch/QUERY"); + }; + cf: { + ADD: typeof import("@redis/bloom/dist/commands/cuckoo/ADD"); + add: typeof import("@redis/bloom/dist/commands/cuckoo/ADD"); + ADDNX: typeof import("@redis/bloom/dist/commands/cuckoo/ADDNX"); + addNX: typeof import("@redis/bloom/dist/commands/cuckoo/ADDNX"); + COUNT: typeof import("@redis/bloom/dist/commands/cuckoo/COUNT"); + count: typeof import("@redis/bloom/dist/commands/cuckoo/COUNT"); + DEL: typeof import("@redis/bloom/dist/commands/cuckoo/DEL"); + del: typeof import("@redis/bloom/dist/commands/cuckoo/DEL"); + EXISTS: typeof import("@redis/bloom/dist/commands/cuckoo/EXISTS"); + exists: typeof import("@redis/bloom/dist/commands/cuckoo/EXISTS"); + INFO: typeof import("@redis/bloom/dist/commands/cuckoo/INFO"); + info: typeof import("@redis/bloom/dist/commands/cuckoo/INFO"); + INSERT: typeof import("@redis/bloom/dist/commands/cuckoo/INSERT"); + insert: typeof import("@redis/bloom/dist/commands/cuckoo/INSERT"); + INSERTNX: typeof import("@redis/bloom/dist/commands/cuckoo/INSERTNX"); + insertNX: typeof import("@redis/bloom/dist/commands/cuckoo/INSERTNX"); + LOADCHUNK: typeof import("@redis/bloom/dist/commands/cuckoo/LOADCHUNK"); + loadChunk: typeof import("@redis/bloom/dist/commands/cuckoo/LOADCHUNK"); + RESERVE: typeof import("@redis/bloom/dist/commands/cuckoo/RESERVE"); + reserve: typeof import("@redis/bloom/dist/commands/cuckoo/RESERVE"); + SCANDUMP: typeof import("@redis/bloom/dist/commands/cuckoo/SCANDUMP"); + scanDump: typeof import("@redis/bloom/dist/commands/cuckoo/SCANDUMP"); + }; + tDigest: { + ADD: typeof import("@redis/bloom/dist/commands/t-digest/ADD"); + add: typeof import("@redis/bloom/dist/commands/t-digest/ADD"); + BYRANK: typeof import("@redis/bloom/dist/commands/t-digest/BYRANK"); + byRank: typeof import("@redis/bloom/dist/commands/t-digest/BYRANK"); + BYREVRANK: typeof import("@redis/bloom/dist/commands/t-digest/BYREVRANK"); + byRevRank: typeof import("@redis/bloom/dist/commands/t-digest/BYREVRANK"); + CDF: typeof import("@redis/bloom/dist/commands/t-digest/CDF"); + cdf: typeof import("@redis/bloom/dist/commands/t-digest/CDF"); + CREATE: typeof import("@redis/bloom/dist/commands/t-digest/CREATE"); + create: typeof import("@redis/bloom/dist/commands/t-digest/CREATE"); + INFO: typeof import("@redis/bloom/dist/commands/t-digest/INFO"); + info: typeof import("@redis/bloom/dist/commands/t-digest/INFO"); + MAX: typeof import("@redis/bloom/dist/commands/t-digest/MAX"); + max: typeof import("@redis/bloom/dist/commands/t-digest/MAX"); + MERGE: typeof import("@redis/bloom/dist/commands/t-digest/MERGE"); + merge: typeof import("@redis/bloom/dist/commands/t-digest/MERGE"); + MIN: typeof import("@redis/bloom/dist/commands/t-digest/MIN"); + min: typeof import("@redis/bloom/dist/commands/t-digest/MIN"); + QUANTILE: typeof import("@redis/bloom/dist/commands/t-digest/QUANTILE"); + quantile: typeof import("@redis/bloom/dist/commands/t-digest/QUANTILE"); + RANK: typeof import("@redis/bloom/dist/commands/t-digest/RANK"); + rank: typeof import("@redis/bloom/dist/commands/t-digest/RANK"); + RESET: typeof import("@redis/bloom/dist/commands/t-digest/RESET"); + reset: typeof import("@redis/bloom/dist/commands/t-digest/RESET"); + REVRANK: typeof import("@redis/bloom/dist/commands/t-digest/REVRANK"); + revRank: typeof import("@redis/bloom/dist/commands/t-digest/REVRANK"); + TRIMMED_MEAN: typeof import("@redis/bloom/dist/commands/t-digest/TRIMMED_MEAN"); + trimmedMean: typeof import("@redis/bloom/dist/commands/t-digest/TRIMMED_MEAN"); + }; + topK: { + ADD: typeof import("@redis/bloom/dist/commands/top-k/ADD"); + add: typeof import("@redis/bloom/dist/commands/top-k/ADD"); + COUNT: typeof import("@redis/bloom/dist/commands/top-k/COUNT"); + count: typeof import("@redis/bloom/dist/commands/top-k/COUNT"); + INCRBY: typeof import("@redis/bloom/dist/commands/top-k/INCRBY"); + incrBy: typeof import("@redis/bloom/dist/commands/top-k/INCRBY"); + INFO: typeof import("@redis/bloom/dist/commands/top-k/INFO"); + info: typeof import("@redis/bloom/dist/commands/top-k/INFO"); + LIST_WITHCOUNT: typeof import("@redis/bloom/dist/commands/top-k/LIST_WITHCOUNT"); + listWithCount: typeof import("@redis/bloom/dist/commands/top-k/LIST_WITHCOUNT"); + LIST: typeof import("@redis/bloom/dist/commands/top-k/LIST"); + list: typeof import("@redis/bloom/dist/commands/top-k/LIST"); + QUERY: typeof import("@redis/bloom/dist/commands/top-k/QUERY"); + query: typeof import("@redis/bloom/dist/commands/top-k/QUERY"); + RESERVE: typeof import("@redis/bloom/dist/commands/top-k/RESERVE"); + reserve: typeof import("@redis/bloom/dist/commands/top-k/RESERVE"); + }; +}; +export type RedisDefaultModules = typeof modules; +export type RedisClientType, S extends RedisScripts = Record> = _RedisClientType; +export declare function createClient(options?: RedisClientOptions): _RedisClientType; +export type RedisClusterType, S extends RedisScripts = Record> = _RedisClusterType; +export declare function createCluster(options: RedisClusterOptions): RedisClusterType; diff --git a/node_modules/redis/dist/index.js b/node_modules/redis/dist/index.js new file mode 100644 index 0000000..fb6b04b --- /dev/null +++ b/node_modules/redis/dist/index.js @@ -0,0 +1,56 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.createCluster = exports.createClient = void 0; +const client_1 = require("@redis/client"); +const bloom_1 = require("@redis/bloom"); +const graph_1 = require("@redis/graph"); +const json_1 = require("@redis/json"); +const search_1 = require("@redis/search"); +const time_series_1 = require("@redis/time-series"); +__exportStar(require("@redis/client"), exports); +__exportStar(require("@redis/bloom"), exports); +__exportStar(require("@redis/graph"), exports); +__exportStar(require("@redis/json"), exports); +__exportStar(require("@redis/search"), exports); +__exportStar(require("@redis/time-series"), exports); +const modules = { + ...bloom_1.default, + graph: graph_1.default, + json: json_1.default, + ft: search_1.default, + ts: time_series_1.default +}; +function createClient(options) { + return (0, client_1.createClient)({ + ...options, + modules: { + ...modules, + ...options?.modules + } + }); +} +exports.createClient = createClient; +function createCluster(options) { + return (0, client_1.createCluster)({ + ...options, + modules: { + ...modules, + ...options?.modules + } + }); +} +exports.createCluster = createCluster; diff --git a/node_modules/redis/package.json b/node_modules/redis/package.json new file mode 100644 index 0000000..cba8252 --- /dev/null +++ b/node_modules/redis/package.json @@ -0,0 +1,50 @@ +{ + "name": "redis", + "description": "A modern, high performance Redis client", + "version": "4.7.1", + "license": "MIT", + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "files": [ + "dist/" + ], + "workspaces": [ + "./packages/*" + ], + "scripts": { + "test": "npm run test -ws --if-present", + "build:client": "npm run build -w ./packages/client", + "build:test-utils": "npm run build -w ./packages/test-utils", + "build:tests-tools": "npm run build:client && npm run build:test-utils", + "build:modules": "find ./packages -mindepth 1 -maxdepth 1 -type d ! -name 'client' ! -name 'test-utils' -exec npm run build -w {} \\;", + "build": "tsc", + "build-all": "npm run build:client && npm run build:test-utils && npm run build:modules && npm run build", + "documentation": "npm run documentation -ws --if-present", + "gh-pages": "gh-pages -d ./documentation -e ./documentation -u 'documentation-bot '" + }, + "dependencies": { + "@redis/bloom": "1.2.0", + "@redis/client": "1.6.1", + "@redis/graph": "1.1.1", + "@redis/json": "1.0.7", + "@redis/search": "1.2.0", + "@redis/time-series": "1.1.0" + }, + "devDependencies": { + "@tsconfig/node14": "^14.1.0", + "gh-pages": "^6.0.0", + "release-it": "^16.1.5", + "typescript": "^5.2.2" + }, + "repository": { + "type": "git", + "url": "git://github.com/redis/node-redis.git" + }, + "bugs": { + "url": "https://github.com/redis/node-redis/issues" + }, + "homepage": "https://github.com/redis/node-redis", + "keywords": [ + "redis" + ] +} diff --git a/node_modules/rimraf/CHANGELOG.md b/node_modules/rimraf/CHANGELOG.md new file mode 100644 index 0000000..f116f14 --- /dev/null +++ b/node_modules/rimraf/CHANGELOG.md @@ -0,0 +1,65 @@ +# v3.0 + +- Add `--preserve-root` option to executable (default true) +- Drop support for Node.js below version 6 + +# v2.7 + +- Make `glob` an optional dependency + +# 2.6 + +- Retry on EBUSY on non-windows platforms as well +- Make `rimraf.sync` 10000% more reliable on Windows + +# 2.5 + +- Handle Windows EPERM when lstat-ing read-only dirs +- Add glob option to pass options to glob + +# 2.4 + +- Add EPERM to delay/retry loop +- Add `disableGlob` option + +# 2.3 + +- Make maxBusyTries and emfileWait configurable +- Handle weird SunOS unlink-dir issue +- Glob the CLI arg for better Windows support + +# 2.2 + +- Handle ENOENT properly on Windows +- Allow overriding fs methods +- Treat EPERM as indicative of non-empty dir +- Remove optional graceful-fs dep +- Consistently return null error instead of undefined on success +- win32: Treat ENOTEMPTY the same as EBUSY +- Add `rimraf` binary + +# 2.1 + +- Fix SunOS error code for a non-empty directory +- Try rmdir before readdir +- Treat EISDIR like EPERM +- Remove chmod +- Remove lstat polyfill, node 0.7 is not supported + +# 2.0 + +- Fix myGid call to check process.getgid +- Simplify the EBUSY backoff logic. +- Use fs.lstat in node >= 0.7.9 +- Remove gently option +- remove fiber implementation +- Delete files that are marked read-only + +# 1.0 + +- Allow ENOENT in sync method +- Throw when no callback is provided +- Make opts.gently an absolute path +- use 'stat' if 'lstat' is not available +- Consistent error naming, and rethrow non-ENOENT stat errors +- add fiber implementation diff --git a/node_modules/rimraf/LICENSE b/node_modules/rimraf/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/rimraf/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/rimraf/README.md b/node_modules/rimraf/README.md new file mode 100644 index 0000000..423b8cf --- /dev/null +++ b/node_modules/rimraf/README.md @@ -0,0 +1,101 @@ +[![Build Status](https://travis-ci.org/isaacs/rimraf.svg?branch=master)](https://travis-ci.org/isaacs/rimraf) [![Dependency Status](https://david-dm.org/isaacs/rimraf.svg)](https://david-dm.org/isaacs/rimraf) [![devDependency Status](https://david-dm.org/isaacs/rimraf/dev-status.svg)](https://david-dm.org/isaacs/rimraf#info=devDependencies) + +The [UNIX command](http://en.wikipedia.org/wiki/Rm_(Unix)) `rm -rf` for node. + +Install with `npm install rimraf`, or just drop rimraf.js somewhere. + +## API + +`rimraf(f, [opts], callback)` + +The first parameter will be interpreted as a globbing pattern for files. If you +want to disable globbing you can do so with `opts.disableGlob` (defaults to +`false`). This might be handy, for instance, if you have filenames that contain +globbing wildcard characters. + +The callback will be called with an error if there is one. Certain +errors are handled for you: + +* Windows: `EBUSY` and `ENOTEMPTY` - rimraf will back off a maximum of + `opts.maxBusyTries` times before giving up, adding 100ms of wait + between each attempt. The default `maxBusyTries` is 3. +* `ENOENT` - If the file doesn't exist, rimraf will return + successfully, since your desired outcome is already the case. +* `EMFILE` - Since `readdir` requires opening a file descriptor, it's + possible to hit `EMFILE` if too many file descriptors are in use. + In the sync case, there's nothing to be done for this. But in the + async case, rimraf will gradually back off with timeouts up to + `opts.emfileWait` ms, which defaults to 1000. + +## options + +* unlink, chmod, stat, lstat, rmdir, readdir, + unlinkSync, chmodSync, statSync, lstatSync, rmdirSync, readdirSync + + In order to use a custom file system library, you can override + specific fs functions on the options object. + + If any of these functions are present on the options object, then + the supplied function will be used instead of the default fs + method. + + Sync methods are only relevant for `rimraf.sync()`, of course. + + For example: + + ```javascript + var myCustomFS = require('some-custom-fs') + + rimraf('some-thing', myCustomFS, callback) + ``` + +* maxBusyTries + + If an `EBUSY`, `ENOTEMPTY`, or `EPERM` error code is encountered + on Windows systems, then rimraf will retry with a linear backoff + wait of 100ms longer on each try. The default maxBusyTries is 3. + + Only relevant for async usage. + +* emfileWait + + If an `EMFILE` error is encountered, then rimraf will retry + repeatedly with a linear backoff of 1ms longer on each try, until + the timeout counter hits this max. The default limit is 1000. + + If you repeatedly encounter `EMFILE` errors, then consider using + [graceful-fs](http://npm.im/graceful-fs) in your program. + + Only relevant for async usage. + +* glob + + Set to `false` to disable [glob](http://npm.im/glob) pattern + matching. + + Set to an object to pass options to the glob module. The default + glob options are `{ nosort: true, silent: true }`. + + Glob version 6 is used in this module. + + Relevant for both sync and async usage. + +* disableGlob + + Set to any non-falsey value to disable globbing entirely. + (Equivalent to setting `glob: false`.) + +## rimraf.sync + +It can remove stuff synchronously, too. But that's not so good. Use +the async API. It's better. + +## CLI + +If installed with `npm install rimraf -g` it can be used as a global +command `rimraf [ ...]` which is useful for cross platform support. + +## mkdirp + +If you need to create a directory recursively, check out +[mkdirp](https://github.com/substack/node-mkdirp). diff --git a/node_modules/rimraf/bin.js b/node_modules/rimraf/bin.js new file mode 100644 index 0000000..023814c --- /dev/null +++ b/node_modules/rimraf/bin.js @@ -0,0 +1,68 @@ +#!/usr/bin/env node + +const rimraf = require('./') + +const path = require('path') + +const isRoot = arg => /^(\/|[a-zA-Z]:\\)$/.test(path.resolve(arg)) +const filterOutRoot = arg => { + const ok = preserveRoot === false || !isRoot(arg) + if (!ok) { + console.error(`refusing to remove ${arg}`) + console.error('Set --no-preserve-root to allow this') + } + return ok +} + +let help = false +let dashdash = false +let noglob = false +let preserveRoot = true +const args = process.argv.slice(2).filter(arg => { + if (dashdash) + return !!arg + else if (arg === '--') + dashdash = true + else if (arg === '--no-glob' || arg === '-G') + noglob = true + else if (arg === '--glob' || arg === '-g') + noglob = false + else if (arg.match(/^(-+|\/)(h(elp)?|\?)$/)) + help = true + else if (arg === '--preserve-root') + preserveRoot = true + else if (arg === '--no-preserve-root') + preserveRoot = false + else + return !!arg +}).filter(arg => !preserveRoot || filterOutRoot(arg)) + +const go = n => { + if (n >= args.length) + return + const options = noglob ? { glob: false } : {} + rimraf(args[n], options, er => { + if (er) + throw er + go(n+1) + }) +} + +if (help || args.length === 0) { + // If they didn't ask for help, then this is not a "success" + const log = help ? console.log : console.error + log('Usage: rimraf [ ...]') + log('') + log(' Deletes all files and folders at "path" recursively.') + log('') + log('Options:') + log('') + log(' -h, --help Display this usage info') + log(' -G, --no-glob Do not expand glob patterns in arguments') + log(' -g, --glob Expand glob patterns in arguments (default)') + log(' --preserve-root Do not remove \'/\' (default)') + log(' --no-preserve-root Do not treat \'/\' specially') + log(' -- Stop parsing flags') + process.exit(help ? 0 : 1) +} else + go(0) diff --git a/node_modules/rimraf/package.json b/node_modules/rimraf/package.json new file mode 100644 index 0000000..1bf8d5e --- /dev/null +++ b/node_modules/rimraf/package.json @@ -0,0 +1,32 @@ +{ + "name": "rimraf", + "version": "3.0.2", + "main": "rimraf.js", + "description": "A deep deletion module for node (like `rm -rf`)", + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": "git://github.com/isaacs/rimraf.git", + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --follow-tags", + "test": "tap test/*.js" + }, + "bin": "./bin.js", + "dependencies": { + "glob": "^7.1.3" + }, + "files": [ + "LICENSE", + "README.md", + "bin.js", + "rimraf.js" + ], + "devDependencies": { + "mkdirp": "^0.5.1", + "tap": "^12.1.1" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } +} diff --git a/node_modules/rimraf/rimraf.js b/node_modules/rimraf/rimraf.js new file mode 100644 index 0000000..34da417 --- /dev/null +++ b/node_modules/rimraf/rimraf.js @@ -0,0 +1,360 @@ +const assert = require("assert") +const path = require("path") +const fs = require("fs") +let glob = undefined +try { + glob = require("glob") +} catch (_err) { + // treat glob as optional. +} + +const defaultGlobOpts = { + nosort: true, + silent: true +} + +// for EMFILE handling +let timeout = 0 + +const isWindows = (process.platform === "win32") + +const defaults = options => { + const methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(m => { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) + + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true + } + if (options.disableGlob !== true && glob === undefined) { + throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') + } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts +} + +const rimraf = (p, options, cb) => { + if (typeof options === 'function') { + cb = options + options = {} + } + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + defaults(options) + + let busyTries = 0 + let errState = null + let n = 0 + + const next = (er) => { + errState = errState || er + if (--n === 0) + cb(errState) + } + + const afterGlob = (er, results) => { + if (er) + return cb(er) + + n = results.length + if (n === 0) + return cb() + + results.forEach(p => { + const CB = (er) => { + if (er) { + if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + // try again, with the same exact callback as this one. + return setTimeout(() => rimraf_(p, options, CB), busyTries * 100) + } + + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(() => rimraf_(p, options, CB), timeout ++) + } + + // already gone + if (er.code === "ENOENT") er = null + } + + timeout = 0 + next(er) + } + rimraf_(p, options, CB) + }) + } + + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) + + options.lstat(p, (er, stat) => { + if (!er) + return afterGlob(null, [p]) + + glob(p, options.glob, afterGlob) + }) + +} + +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +const rimraf_ = (p, options, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, (er, st) => { + if (er && er.code === "ENOENT") + return cb(null) + + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) + + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) + + options.unlink(p, er => { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) + } + return cb(er) + }) + }) +} + +const fixWinEPERM = (p, options, er, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.chmod(p, 0o666, er2 => { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, (er3, stats) => { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) + }) +} + +const fixWinEPERMSync = (p, options, er) => { + assert(p) + assert(options) + + try { + options.chmodSync(p, 0o666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else + throw er + } + + let stats + try { + stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } + + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) +} + +const rmdir = (p, options, originalEr, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, er => { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) +} + +const rmkids = (p, options, cb) => { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.readdir(p, (er, files) => { + if (er) + return cb(er) + let n = files.length + if (n === 0) + return options.rmdir(p, cb) + let errState + files.forEach(f => { + rimraf(path.join(p, f), options, er => { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) +} + +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +const rimrafSync = (p, options) => { + options = options || {} + defaults(options) + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + let results + + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { + try { + options.lstatSync(p) + results = [p] + } catch (er) { + results = glob.sync(p, options.glob) + } + } + + if (!results.length) + return + + for (let i = 0; i < results.length; i++) { + const p = results[i] + + let st + try { + st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) + } + + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er + + rmdirSync(p, options, er) + } + } +} + +const rmdirSync = (p, options, originalEr) => { + assert(p) + assert(options) + + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) + } +} + +const rmkidsSync = (p, options) => { + assert(p) + assert(options) + options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) + + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + const retries = isWindows ? 100 : 1 + let i = 0 + do { + let threw = true + try { + const ret = options.rmdirSync(p, options) + threw = false + return ret + } finally { + if (++i < retries && threw) + continue + } + } while (true) +} + +module.exports = rimraf +rimraf.sync = rimrafSync diff --git a/node_modules/semver/LICENSE b/node_modules/semver/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/semver/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/semver/README.md b/node_modules/semver/README.md new file mode 100644 index 0000000..e952215 --- /dev/null +++ b/node_modules/semver/README.md @@ -0,0 +1,664 @@ +semver(1) -- The semantic versioner for npm +=========================================== + +## Install + +```bash +npm install semver +```` + +## Usage + +As a node module: + +```js +const semver = require('semver') + +semver.valid('1.2.3') // '1.2.3' +semver.valid('a.b.c') // null +semver.clean(' =v1.2.3 ') // '1.2.3' +semver.satisfies('1.2.3', '1.x || >=2.5.0 || 5.0.0 - 7.2.3') // true +semver.gt('1.2.3', '9.8.7') // false +semver.lt('1.2.3', '9.8.7') // true +semver.minVersion('>=1.0.0') // '1.0.0' +semver.valid(semver.coerce('v2')) // '2.0.0' +semver.valid(semver.coerce('42.6.7.9.3-alpha')) // '42.6.7' +``` + +You can also just load the module for the function that you care about if +you'd like to minimize your footprint. + +```js +// load the whole API at once in a single object +const semver = require('semver') + +// or just load the bits you need +// all of them listed here, just pick and choose what you want + +// classes +const SemVer = require('semver/classes/semver') +const Comparator = require('semver/classes/comparator') +const Range = require('semver/classes/range') + +// functions for working with versions +const semverParse = require('semver/functions/parse') +const semverValid = require('semver/functions/valid') +const semverClean = require('semver/functions/clean') +const semverInc = require('semver/functions/inc') +const semverDiff = require('semver/functions/diff') +const semverMajor = require('semver/functions/major') +const semverMinor = require('semver/functions/minor') +const semverPatch = require('semver/functions/patch') +const semverPrerelease = require('semver/functions/prerelease') +const semverCompare = require('semver/functions/compare') +const semverRcompare = require('semver/functions/rcompare') +const semverCompareLoose = require('semver/functions/compare-loose') +const semverCompareBuild = require('semver/functions/compare-build') +const semverSort = require('semver/functions/sort') +const semverRsort = require('semver/functions/rsort') + +// low-level comparators between versions +const semverGt = require('semver/functions/gt') +const semverLt = require('semver/functions/lt') +const semverEq = require('semver/functions/eq') +const semverNeq = require('semver/functions/neq') +const semverGte = require('semver/functions/gte') +const semverLte = require('semver/functions/lte') +const semverCmp = require('semver/functions/cmp') +const semverCoerce = require('semver/functions/coerce') + +// working with ranges +const semverSatisfies = require('semver/functions/satisfies') +const semverMaxSatisfying = require('semver/ranges/max-satisfying') +const semverMinSatisfying = require('semver/ranges/min-satisfying') +const semverToComparators = require('semver/ranges/to-comparators') +const semverMinVersion = require('semver/ranges/min-version') +const semverValidRange = require('semver/ranges/valid') +const semverOutside = require('semver/ranges/outside') +const semverGtr = require('semver/ranges/gtr') +const semverLtr = require('semver/ranges/ltr') +const semverIntersects = require('semver/ranges/intersects') +const semverSimplifyRange = require('semver/ranges/simplify') +const semverRangeSubset = require('semver/ranges/subset') +``` + +As a command-line utility: + +``` +$ semver -h + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, prerelease, or release. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-n <0|1> + This is the base to be used for the prerelease identifier. + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them. +``` + +## Versions + +A "version" is described by the `v2.0.0` specification found at +. + +A leading `"="` or `"v"` character is stripped off and ignored. +Support for stripping a leading "v" is kept for compatibility with `v1.0.0` of the SemVer +specification but should not be used anymore. + +## Ranges + +A `version range` is a set of `comparators` that specify versions +that satisfy the range. + +A `comparator` is composed of an `operator` and a `version`. The set +of primitive `operators` is: + +* `<` Less than +* `<=` Less than or equal to +* `>` Greater than +* `>=` Greater than or equal to +* `=` Equal. If no operator is specified, then equality is assumed, + so this operator is optional but MAY be included. + +For example, the comparator `>=1.2.7` would match the versions +`1.2.7`, `1.2.8`, `2.5.3`, and `1.3.9`, but not the versions `1.2.6` +or `1.1.0`. The comparator `>1` is equivalent to `>=2.0.0` and +would match the versions `2.0.0` and `3.1.0`, but not the versions +`1.0.1` or `1.1.0`. + +Comparators can be joined by whitespace to form a `comparator set`, +which is satisfied by the **intersection** of all of the comparators +it includes. + +A range is composed of one or more comparator sets, joined by `||`. A +version matches a range if and only if every comparator in at least +one of the `||`-separated comparator sets is satisfied by the version. + +For example, the range `>=1.2.7 <1.3.0` would match the versions +`1.2.7`, `1.2.8`, and `1.2.99`, but not the versions `1.2.6`, `1.3.0`, +or `1.1.0`. + +The range `1.2.7 || >=1.2.9 <2.0.0` would match the versions `1.2.7`, +`1.2.9`, and `1.4.6`, but not the versions `1.2.8` or `2.0.0`. + +### Prerelease Tags + +If a version has a prerelease tag (for example, `1.2.3-alpha.3`) then +it will only be allowed to satisfy comparator sets if at least one +comparator with the same `[major, minor, patch]` tuple also has a +prerelease tag. + +For example, the range `>1.2.3-alpha.3` would be allowed to match the +version `1.2.3-alpha.7`, but it would *not* be satisfied by +`3.4.5-alpha.9`, even though `3.4.5-alpha.9` is technically "greater +than" `1.2.3-alpha.3` according to the SemVer sort rules. The version +range only accepts prerelease tags on the `1.2.3` version. +Version `3.4.5` *would* satisfy the range because it does not have a +prerelease flag, and `3.4.5` is greater than `1.2.3-alpha.7`. + +The purpose of this behavior is twofold. First, prerelease versions +frequently are updated very quickly, and contain many breaking changes +that are (by the author's design) not yet fit for public consumption. +Therefore, by default, they are excluded from range-matching +semantics. + +Second, a user who has opted into using a prerelease version has +indicated the intent to use *that specific* set of +alpha/beta/rc versions. By including a prerelease tag in the range, +the user is indicating that they are aware of the risk. However, it +is still not appropriate to assume that they have opted into taking a +similar risk on the *next* set of prerelease versions. + +Note that this behavior can be suppressed (treating all prerelease +versions as if they were normal versions, for range-matching) +by setting the `includePrerelease` flag on the options +object to any +[functions](https://github.com/npm/node-semver#functions) that do +range matching. + +#### Prerelease Identifiers + +The method `.inc` takes an additional `identifier` string argument that +will append the value of the string as a prerelease identifier: + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta') +// '1.2.4-beta.0' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta +1.2.4-beta.0 +``` + +Which then can be used to increment further: + +```bash +$ semver 1.2.4-beta.0 -i prerelease +1.2.4-beta.1 +``` + +To get out of the prerelease phase, use the `release` option: + +```bash +$ semver 1.2.4-beta.1 -i release +1.2.4 +``` + +#### Prerelease Identifier Base + +The method `.inc` takes an optional parameter 'identifierBase' string +that will let you let your prerelease number as zero-based or one-based. +Set to `false` to omit the prerelease number altogether. +If you do not specify this parameter, it will default to zero-based. + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta', '1') +// '1.2.4-beta.1' +``` + +```javascript +semver.inc('1.2.3', 'prerelease', 'beta', false) +// '1.2.4-beta' +``` + +command-line example: + +```bash +$ semver 1.2.3 -i prerelease --preid beta -n 1 +1.2.4-beta.1 +``` + +```bash +$ semver 1.2.3 -i prerelease --preid beta -n false +1.2.4-beta +``` + +### Advanced Range Syntax + +Advanced range syntax desugars to primitive comparators in +deterministic ways. + +Advanced ranges may be combined in the same way as primitive +comparators using white space or `||`. + +#### Hyphen Ranges `X.Y.Z - A.B.C` + +Specifies an inclusive set. + +* `1.2.3 - 2.3.4` := `>=1.2.3 <=2.3.4` + +If a partial version is provided as the first version in the inclusive +range, then the missing pieces are replaced with zeroes. + +* `1.2 - 2.3.4` := `>=1.2.0 <=2.3.4` + +If a partial version is provided as the second version in the +inclusive range, then all versions that start with the supplied parts +of the tuple are accepted, but nothing that would be greater than the +provided tuple parts. + +* `1.2.3 - 2.3` := `>=1.2.3 <2.4.0-0` +* `1.2.3 - 2` := `>=1.2.3 <3.0.0-0` + +#### X-Ranges `1.2.x` `1.X` `1.2.*` `*` + +Any of `X`, `x`, or `*` may be used to "stand in" for one of the +numeric values in the `[major, minor, patch]` tuple. + +* `*` := `>=0.0.0` (Any non-prerelease version satisfies, unless + `includePrerelease` is specified, in which case any version at all + satisfies) +* `1.x` := `>=1.0.0 <2.0.0-0` (Matching major version) +* `1.2.x` := `>=1.2.0 <1.3.0-0` (Matching major and minor versions) + +A partial version range is treated as an X-Range, so the special +character is in fact optional. + +* `""` (empty string) := `*` := `>=0.0.0` +* `1` := `1.x.x` := `>=1.0.0 <2.0.0-0` +* `1.2` := `1.2.x` := `>=1.2.0 <1.3.0-0` + +#### Tilde Ranges `~1.2.3` `~1.2` `~1` + +Allows patch-level changes if a minor version is specified on the +comparator. Allows minor-level changes if not. + +* `~1.2.3` := `>=1.2.3 <1.(2+1).0` := `>=1.2.3 <1.3.0-0` +* `~1.2` := `>=1.2.0 <1.(2+1).0` := `>=1.2.0 <1.3.0-0` (Same as `1.2.x`) +* `~1` := `>=1.0.0 <(1+1).0.0` := `>=1.0.0 <2.0.0-0` (Same as `1.x`) +* `~0.2.3` := `>=0.2.3 <0.(2+1).0` := `>=0.2.3 <0.3.0-0` +* `~0.2` := `>=0.2.0 <0.(2+1).0` := `>=0.2.0 <0.3.0-0` (Same as `0.2.x`) +* `~0` := `>=0.0.0 <(0+1).0.0` := `>=0.0.0 <1.0.0-0` (Same as `0.x`) +* `~1.2.3-beta.2` := `>=1.2.3-beta.2 <1.3.0-0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. + +#### Caret Ranges `^1.2.3` `^0.2.5` `^0.0.4` + +Allows changes that do not modify the left-most non-zero element in the +`[major, minor, patch]` tuple. In other words, this allows patch and +minor updates for versions `1.0.0` and above, patch updates for +versions `0.X >=0.1.0`, and *no* updates for versions `0.0.X`. + +Many authors treat a `0.x` version as if the `x` were the major +"breaking-change" indicator. + +Caret ranges are ideal when an author may make breaking changes +between `0.2.4` and `0.3.0` releases, which is a common practice. +However, it presumes that there will *not* be breaking changes between +`0.2.4` and `0.2.5`. It allows for changes that are presumed to be +additive (but non-breaking), according to commonly observed practices. + +* `^1.2.3` := `>=1.2.3 <2.0.0-0` +* `^0.2.3` := `>=0.2.3 <0.3.0-0` +* `^0.0.3` := `>=0.0.3 <0.0.4-0` +* `^1.2.3-beta.2` := `>=1.2.3-beta.2 <2.0.0-0` Note that prereleases in + the `1.2.3` version will be allowed, if they are greater than or + equal to `beta.2`. So, `1.2.3-beta.4` would be allowed, but + `1.2.4-beta.2` would not, because it is a prerelease of a + different `[major, minor, patch]` tuple. +* `^0.0.3-beta` := `>=0.0.3-beta <0.0.4-0` Note that prereleases in the + `0.0.3` version *only* will be allowed, if they are greater than or + equal to `beta`. So, `0.0.3-pr.2` would be allowed. + +When parsing caret ranges, a missing `patch` value desugars to the +number `0`, but will allow flexibility within that value, even if the +major and minor versions are both `0`. + +* `^1.2.x` := `>=1.2.0 <2.0.0-0` +* `^0.0.x` := `>=0.0.0 <0.1.0-0` +* `^0.0` := `>=0.0.0 <0.1.0-0` + +A missing `minor` and `patch` values will desugar to zero, but also +allow flexibility within those values, even if the major version is +zero. + +* `^1.x` := `>=1.0.0 <2.0.0-0` +* `^0.x` := `>=0.0.0 <1.0.0-0` + +### Range Grammar + +Putting all this together, here is a Backus-Naur grammar for ranges, +for the benefit of parser authors: + +```bnf +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | ['1'-'9'] ( ['0'-'9'] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ +``` + +## Functions + +All methods and classes take a final `options` object argument. All +options in this object are `false` by default. The options supported +are: + +- `loose`: Be more forgiving about not-quite-valid semver strings. + (Any resulting output will always be 100% strict compliant, of + course.) For backwards compatibility reasons, if the `options` + argument is a boolean value instead of an object, it is interpreted + to be the `loose` param. +- `includePrerelease`: Set to suppress the [default + behavior](https://github.com/npm/node-semver#prerelease-tags) of + excluding prerelease tagged versions from ranges unless they are + explicitly opted into. + +Strict-mode Comparators and Ranges will be strict about the SemVer +strings that they parse. + +* `valid(v)`: Return the parsed version, or null if it's not valid. +* `inc(v, releaseType, options, identifier, identifierBase)`: + Return the version incremented by the release + type (`major`, `premajor`, `minor`, `preminor`, `patch`, + `prepatch`, `prerelease`, or `release`), or null if it's not valid + * `premajor` in one call will bump the version up to the next major + version and down to a prerelease of that major version. + `preminor`, and `prepatch` work the same way. + * If called from a non-prerelease version, `prerelease` will work the + same as `prepatch`. It increments the patch version and then makes a + prerelease. If the input version is already a prerelease it simply + increments it. + * `release` will remove any prerelease part of the version. + * `identifier` can be used to prefix `premajor`, `preminor`, + `prepatch`, or `prerelease` version increments. `identifierBase` + is the base to be used for the `prerelease` identifier. +* `prerelease(v)`: Returns an array of prerelease components, or null + if none exist. Example: `prerelease('1.2.3-alpha.1') -> ['alpha', 1]` +* `major(v)`: Return the major version number. +* `minor(v)`: Return the minor version number. +* `patch(v)`: Return the patch version number. +* `intersects(r1, r2, loose)`: Return true if the two supplied ranges + or comparators intersect. +* `parse(v)`: Attempt to parse a string as a semantic version, returning either + a `SemVer` object or `null`. + +### Comparison + +* `gt(v1, v2)`: `v1 > v2` +* `gte(v1, v2)`: `v1 >= v2` +* `lt(v1, v2)`: `v1 < v2` +* `lte(v1, v2)`: `v1 <= v2` +* `eq(v1, v2)`: `v1 == v2` This is true if they're logically equivalent, + even if they're not the same string. You already know how to + compare strings. +* `neq(v1, v2)`: `v1 != v2` The opposite of `eq`. +* `cmp(v1, comparator, v2)`: Pass in a comparison string, and it'll call + the corresponding function above. `"==="` and `"!=="` do simple + string comparison, but are included for completeness. Throws if an + invalid comparison string is provided. +* `compare(v1, v2)`: Return `0` if `v1 == v2`, or `1` if `v1` is greater, or `-1` if + `v2` is greater. Sorts in ascending order if passed to `Array.sort()`. +* `rcompare(v1, v2)`: The reverse of `compare`. Sorts an array of versions + in descending order when passed to `Array.sort()`. +* `compareBuild(v1, v2)`: The same as `compare` but considers `build` when two versions + are equal. Sorts in ascending order if passed to `Array.sort()`. +* `compareLoose(v1, v2)`: Short for `compare(v1, v2, { loose: true })`. +* `diff(v1, v2)`: Returns the difference between two versions by the release type + (`major`, `premajor`, `minor`, `preminor`, `patch`, `prepatch`, or `prerelease`), + or null if the versions are the same. + +### Sorting + +* `sort(versions)`: Returns a sorted array of versions based on the `compareBuild` + function. +* `rsort(versions)`: The reverse of `sort`. Returns an array of versions based on + the `compareBuild` function in descending order. + +### Comparators + +* `intersects(comparator)`: Return true if the comparators intersect + +### Ranges + +* `validRange(range)`: Return the valid range or null if it's not valid. +* `satisfies(version, range)`: Return true if the version satisfies the + range. +* `maxSatisfying(versions, range)`: Return the highest version in the list + that satisfies the range, or `null` if none of them do. +* `minSatisfying(versions, range)`: Return the lowest version in the list + that satisfies the range, or `null` if none of them do. +* `minVersion(range)`: Return the lowest version that can match + the given range. +* `gtr(version, range)`: Return `true` if the version is greater than all the + versions possible in the range. +* `ltr(version, range)`: Return `true` if the version is less than all the + versions possible in the range. +* `outside(version, range, hilo)`: Return true if the version is outside + the bounds of the range in either the high or low direction. The + `hilo` argument must be either the string `'>'` or `'<'`. (This is + the function called by `gtr` and `ltr`.) +* `intersects(range)`: Return true if any of the range comparators intersect. +* `simplifyRange(versions, range)`: Return a "simplified" range that + matches the same items in the `versions` list as the range specified. Note + that it does *not* guarantee that it would match the same versions in all + cases, only for the set of versions provided. This is useful when + generating ranges by joining together multiple versions with `||` + programmatically, to provide the user with something a bit more + ergonomic. If the provided range is shorter in string-length than the + generated range, then that is returned. +* `subset(subRange, superRange)`: Return `true` if the `subRange` range is + entirely contained by the `superRange` range. + +Note that, since ranges may be non-contiguous, a version might not be +greater than a range, less than a range, *or* satisfy a range! For +example, the range `1.2 <1.2.9 || >2.0.0` would have a hole from `1.2.9` +until `2.0.0`, so version `1.2.10` would not be greater than the +range (because `2.0.1` satisfies, which is higher), nor less than the +range (since `1.2.8` satisfies, which is lower), and it also does not +satisfy the range. + +If you want to know if a version satisfies or does not satisfy a +range, use the `satisfies(version, range)` function. + +### Coercion + +* `coerce(version, options)`: Coerces a string to semver if possible + +This aims to provide a very forgiving translation of a non-semver string to +semver. It looks for the first digit in a string and consumes all +remaining characters which satisfy at least a partial semver (e.g., `1`, +`1.2`, `1.2.3`) up to the max permitted length (256 characters). Longer +versions are simply truncated (`4.6.3.9.2-alpha2` becomes `4.6.3`). All +surrounding text is simply ignored (`v3.4 replaces v3.3.1` becomes +`3.4.0`). Only text which lacks digits will fail coercion (`version one` +is not valid). The maximum length for any semver component considered for +coercion is 16 characters; longer components will be ignored +(`10000000000000000.4.7.4` becomes `4.7.4`). The maximum value for any +semver component is `Number.MAX_SAFE_INTEGER || (2**53 - 1)`; higher value +components are invalid (`9999999999999999.4.7.4` is likely invalid). + +If the `options.rtl` flag is set, then `coerce` will return the right-most +coercible tuple that does not share an ending index with a longer coercible +tuple. For example, `1.2.3.4` will return `2.3.4` in rtl mode, not +`4.0.0`. `1.2.3/4` will return `4.0.0`, because the `4` is not a part of +any other overlapping SemVer tuple. + +If the `options.includePrerelease` flag is set, then the `coerce` result will contain +prerelease and build parts of a version. For example, `1.2.3.4-rc.1+rev.2` +will preserve prerelease `rc.1` and build `rev.2` in the result. + +### Clean + +* `clean(version)`: Clean a string to be a valid semver if possible + +This will return a cleaned and trimmed semver version. If the provided +version is not valid a null will be returned. This does not work for +ranges. + +ex. +* `s.clean(' = v 2.1.5foo')`: `null` +* `s.clean(' = v 2.1.5foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean(' = v 2.1.5-foo')`: `null` +* `s.clean(' = v 2.1.5-foo', { loose: true })`: `'2.1.5-foo'` +* `s.clean('=v2.1.5')`: `'2.1.5'` +* `s.clean(' =v2.1.5')`: `'2.1.5'` +* `s.clean(' 2.1.5 ')`: `'2.1.5'` +* `s.clean('~1.0.0')`: `null` + +## Constants + +As a convenience, helper constants are exported to provide information about what `node-semver` supports: + +### `RELEASE_TYPES` + +- major +- premajor +- minor +- preminor +- patch +- prepatch +- prerelease + +``` +const semver = require('semver'); + +if (semver.RELEASE_TYPES.includes(arbitraryUserInput)) { + console.log('This is a valid release type!'); +} else { + console.warn('This is NOT a valid release type!'); +} +``` + +### `SEMVER_SPEC_VERSION` + +2.0.0 + +``` +const semver = require('semver'); + +console.log('We are currently using the semver specification version:', semver.SEMVER_SPEC_VERSION); +``` + +## Exported Modules + + + +You may pull in just the part of this semver utility that you need if you +are sensitive to packing and tree-shaking concerns. The main +`require('semver')` export uses getter functions to lazily load the parts +of the API that are used. + +The following modules are available: + +* `require('semver')` +* `require('semver/classes')` +* `require('semver/classes/comparator')` +* `require('semver/classes/range')` +* `require('semver/classes/semver')` +* `require('semver/functions/clean')` +* `require('semver/functions/cmp')` +* `require('semver/functions/coerce')` +* `require('semver/functions/compare')` +* `require('semver/functions/compare-build')` +* `require('semver/functions/compare-loose')` +* `require('semver/functions/diff')` +* `require('semver/functions/eq')` +* `require('semver/functions/gt')` +* `require('semver/functions/gte')` +* `require('semver/functions/inc')` +* `require('semver/functions/lt')` +* `require('semver/functions/lte')` +* `require('semver/functions/major')` +* `require('semver/functions/minor')` +* `require('semver/functions/neq')` +* `require('semver/functions/parse')` +* `require('semver/functions/patch')` +* `require('semver/functions/prerelease')` +* `require('semver/functions/rcompare')` +* `require('semver/functions/rsort')` +* `require('semver/functions/satisfies')` +* `require('semver/functions/sort')` +* `require('semver/functions/valid')` +* `require('semver/ranges/gtr')` +* `require('semver/ranges/intersects')` +* `require('semver/ranges/ltr')` +* `require('semver/ranges/max-satisfying')` +* `require('semver/ranges/min-satisfying')` +* `require('semver/ranges/min-version')` +* `require('semver/ranges/outside')` +* `require('semver/ranges/simplify')` +* `require('semver/ranges/subset')` +* `require('semver/ranges/to-comparators')` +* `require('semver/ranges/valid')` + diff --git a/node_modules/semver/bin/semver.js b/node_modules/semver/bin/semver.js new file mode 100644 index 0000000..dbb1bf5 --- /dev/null +++ b/node_modules/semver/bin/semver.js @@ -0,0 +1,191 @@ +#!/usr/bin/env node +// Standalone semver comparison program. +// Exits successfully and prints matching version(s) if +// any supplied version is valid and passes all tests. + +'use strict' + +const argv = process.argv.slice(2) + +let versions = [] + +const range = [] + +let inc = null + +const version = require('../package.json').version + +let loose = false + +let includePrerelease = false + +let coerce = false + +let rtl = false + +let identifier + +let identifierBase + +const semver = require('../') +const parseOptions = require('../internal/parse-options') + +let reverse = false + +let options = {} + +const main = () => { + if (!argv.length) { + return help() + } + while (argv.length) { + let a = argv.shift() + const indexOfEqualSign = a.indexOf('=') + if (indexOfEqualSign !== -1) { + const value = a.slice(indexOfEqualSign + 1) + a = a.slice(0, indexOfEqualSign) + argv.unshift(value) + } + switch (a) { + case '-rv': case '-rev': case '--rev': case '--reverse': + reverse = true + break + case '-l': case '--loose': + loose = true + break + case '-p': case '--include-prerelease': + includePrerelease = true + break + case '-v': case '--version': + versions.push(argv.shift()) + break + case '-i': case '--inc': case '--increment': + switch (argv[0]) { + case 'major': case 'minor': case 'patch': case 'prerelease': + case 'premajor': case 'preminor': case 'prepatch': + case 'release': + inc = argv.shift() + break + default: + inc = 'patch' + break + } + break + case '--preid': + identifier = argv.shift() + break + case '-r': case '--range': + range.push(argv.shift()) + break + case '-n': + identifierBase = argv.shift() + if (identifierBase === 'false') { + identifierBase = false + } + break + case '-c': case '--coerce': + coerce = true + break + case '--rtl': + rtl = true + break + case '--ltr': + rtl = false + break + case '-h': case '--help': case '-?': + return help() + default: + versions.push(a) + break + } + } + + options = parseOptions({ loose, includePrerelease, rtl }) + + versions = versions.map((v) => { + return coerce ? (semver.coerce(v, options) || { version: v }).version : v + }).filter((v) => { + return semver.valid(v) + }) + if (!versions.length) { + return fail() + } + if (inc && (versions.length !== 1 || range.length)) { + return failInc() + } + + for (let i = 0, l = range.length; i < l; i++) { + versions = versions.filter((v) => { + return semver.satisfies(v, range[i], options) + }) + if (!versions.length) { + return fail() + } + } + versions + .sort((a, b) => semver[reverse ? 'rcompare' : 'compare'](a, b, options)) + .map(v => semver.clean(v, options)) + .map(v => inc ? semver.inc(v, inc, options, identifier, identifierBase) : v) + .forEach(v => console.log(v)) +} + +const failInc = () => { + console.error('--inc can only be used on a single version with no range') + fail() +} + +const fail = () => process.exit(1) + +const help = () => console.log( +`SemVer ${version} + +A JavaScript implementation of the https://semver.org/ specification +Copyright Isaac Z. Schlueter + +Usage: semver [options] [ [...]] +Prints valid versions sorted by SemVer precedence + +Options: +-r --range + Print versions that match the specified range. + +-i --increment [] + Increment a version by the specified level. Level can + be one of: major, minor, patch, premajor, preminor, + prepatch, prerelease, or release. Default level is 'patch'. + Only one version may be specified. + +--preid + Identifier to be used to prefix premajor, preminor, + prepatch or prerelease version increments. + +-l --loose + Interpret versions and ranges loosely + +-p --include-prerelease + Always include prerelease versions in range matching + +-c --coerce + Coerce a string into SemVer if possible + (does not imply --loose) + +--rtl + Coerce version strings right to left + +--ltr + Coerce version strings left to right (default) + +-n + Base number to be used for the prerelease identifier. + Can be either 0 or 1, or false to omit the number altogether. + Defaults to 0. + +Program exits successfully if any valid version satisfies +all supplied ranges, and prints all satisfying versions. + +If no satisfying versions are found, then exits failure. + +Versions are printed in ascending order, so supplying +multiple versions to the utility will just sort them.`) + +main() diff --git a/node_modules/semver/classes/comparator.js b/node_modules/semver/classes/comparator.js new file mode 100644 index 0000000..647c1f0 --- /dev/null +++ b/node_modules/semver/classes/comparator.js @@ -0,0 +1,143 @@ +'use strict' + +const ANY = Symbol('SemVer ANY') +// hoisted class for cyclic dependency +class Comparator { + static get ANY () { + return ANY + } + + constructor (comp, options) { + options = parseOptions(options) + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + comp = comp.trim().split(/\s+/).join(' ') + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) + } + + parse (comp) { + const r = this.options.loose ? re[t.COMPARATORLOOSE] : re[t.COMPARATOR] + const m = comp.match(r) + + if (!m) { + throw new TypeError(`Invalid comparator: ${comp}`) + } + + this.operator = m[1] !== undefined ? m[1] : '' + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } + } + + toString () { + return this.value + } + + test (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY || version === ANY) { + return true + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + return cmp(version, this.operator, this.semver, this.options) + } + + intersects (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (this.operator === '') { + if (this.value === '') { + return true + } + return new Range(comp.value, options).test(this.value) + } else if (comp.operator === '') { + if (comp.value === '') { + return true + } + return new Range(this.value, options).test(comp.semver) + } + + options = parseOptions(options) + + // Special cases where nothing can possibly be lower + if (options.includePrerelease && + (this.value === '<0.0.0-0' || comp.value === '<0.0.0-0')) { + return false + } + if (!options.includePrerelease && + (this.value.startsWith('<0.0.0') || comp.value.startsWith('<0.0.0'))) { + return false + } + + // Same direction increasing (> or >=) + if (this.operator.startsWith('>') && comp.operator.startsWith('>')) { + return true + } + // Same direction decreasing (< or <=) + if (this.operator.startsWith('<') && comp.operator.startsWith('<')) { + return true + } + // same SemVer and both sides are inclusive (<= or >=) + if ( + (this.semver.version === comp.semver.version) && + this.operator.includes('=') && comp.operator.includes('=')) { + return true + } + // opposite directions less than + if (cmp(this.semver, '<', comp.semver, options) && + this.operator.startsWith('>') && comp.operator.startsWith('<')) { + return true + } + // opposite directions greater than + if (cmp(this.semver, '>', comp.semver, options) && + this.operator.startsWith('<') && comp.operator.startsWith('>')) { + return true + } + return false + } +} + +module.exports = Comparator + +const parseOptions = require('../internal/parse-options') +const { safeRe: re, t } = require('../internal/re') +const cmp = require('../functions/cmp') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const Range = require('./range') diff --git a/node_modules/semver/classes/index.js b/node_modules/semver/classes/index.js new file mode 100644 index 0000000..91c24ec --- /dev/null +++ b/node_modules/semver/classes/index.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = { + SemVer: require('./semver.js'), + Range: require('./range.js'), + Comparator: require('./comparator.js'), +} diff --git a/node_modules/semver/classes/range.js b/node_modules/semver/classes/range.js new file mode 100644 index 0000000..94629ce --- /dev/null +++ b/node_modules/semver/classes/range.js @@ -0,0 +1,557 @@ +'use strict' + +const SPACE_CHARACTERS = /\s+/g + +// hoisted class for cyclic dependency +class Range { + constructor (range, options) { + options = parseOptions(options) + + if (range instanceof Range) { + if ( + range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease + ) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + // just put it in the set and return + this.raw = range.value + this.set = [[range]] + this.formatted = undefined + return this + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First reduce all whitespace as much as possible so we do not have to rely + // on potentially slow regexes like \s*. This is then stored and used for + // future error messages as well. + this.raw = range.trim().replace(SPACE_CHARACTERS, ' ') + + // First, split on || + this.set = this.raw + .split('||') + // map the range to a 2d array of comparators + .map(r => this.parseRange(r.trim())) + // throw out any comparator lists that are empty + // this generally means that it was not a valid range, which is allowed + // in loose mode, but will still throw if the WHOLE range is invalid. + .filter(c => c.length) + + if (!this.set.length) { + throw new TypeError(`Invalid SemVer Range: ${this.raw}`) + } + + // if we have any that are not the null set, throw out null sets. + if (this.set.length > 1) { + // keep the first one, in case they're all null sets + const first = this.set[0] + this.set = this.set.filter(c => !isNullSet(c[0])) + if (this.set.length === 0) { + this.set = [first] + } else if (this.set.length > 1) { + // if we have any that are *, then the range is just * + for (const c of this.set) { + if (c.length === 1 && isAny(c[0])) { + this.set = [c] + break + } + } + } + } + + this.formatted = undefined + } + + get range () { + if (this.formatted === undefined) { + this.formatted = '' + for (let i = 0; i < this.set.length; i++) { + if (i > 0) { + this.formatted += '||' + } + const comps = this.set[i] + for (let k = 0; k < comps.length; k++) { + if (k > 0) { + this.formatted += ' ' + } + this.formatted += comps[k].toString().trim() + } + } + } + return this.formatted + } + + format () { + return this.range + } + + toString () { + return this.range + } + + parseRange (range) { + // memoize range parsing for performance. + // this is a very hot path, and fully deterministic. + const memoOpts = + (this.options.includePrerelease && FLAG_INCLUDE_PRERELEASE) | + (this.options.loose && FLAG_LOOSE) + const memoKey = memoOpts + ':' + range + const cached = cache.get(memoKey) + if (cached) { + return cached + } + + const loose = this.options.loose + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + const hr = loose ? re[t.HYPHENRANGELOOSE] : re[t.HYPHENRANGE] + range = range.replace(hr, hyphenReplace(this.options.includePrerelease)) + debug('hyphen replace', range) + + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[t.COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[t.TILDETRIM], tildeTrimReplace) + debug('tilde trim', range) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[t.CARETTRIM], caretTrimReplace) + debug('caret trim', range) + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + let rangeList = range + .split(' ') + .map(comp => parseComparator(comp, this.options)) + .join(' ') + .split(/\s+/) + // >=0.0.0 is equivalent to * + .map(comp => replaceGTE0(comp, this.options)) + + if (loose) { + // in loose mode, throw out any that are not valid comparators + rangeList = rangeList.filter(comp => { + debug('loose invalid filter', comp, this.options) + return !!comp.match(re[t.COMPARATORLOOSE]) + }) + } + debug('range list', rangeList) + + // if any comparators are the null set, then replace with JUST null set + // if more than one comparator, remove any * comparators + // also, don't include the same comparator more than once + const rangeMap = new Map() + const comparators = rangeList.map(comp => new Comparator(comp, this.options)) + for (const comp of comparators) { + if (isNullSet(comp)) { + return [comp] + } + rangeMap.set(comp.value, comp) + } + if (rangeMap.size > 1 && rangeMap.has('')) { + rangeMap.delete('') + } + + const result = [...rangeMap.values()] + cache.set(memoKey, result) + return result + } + + intersects (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some((thisComparators) => { + return ( + isSatisfiable(thisComparators, options) && + range.set.some((rangeComparators) => { + return ( + isSatisfiable(rangeComparators, options) && + thisComparators.every((thisComparator) => { + return rangeComparators.every((rangeComparator) => { + return thisComparator.intersects(rangeComparator, options) + }) + }) + ) + }) + ) + }) + } + + // if ANY of the sets match ALL of its comparators, then pass + test (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + try { + version = new SemVer(version, this.options) + } catch (er) { + return false + } + } + + for (let i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false + } +} + +module.exports = Range + +const LRU = require('../internal/lrucache') +const cache = new LRU() + +const parseOptions = require('../internal/parse-options') +const Comparator = require('./comparator') +const debug = require('../internal/debug') +const SemVer = require('./semver') +const { + safeRe: re, + t, + comparatorTrimReplace, + tildeTrimReplace, + caretTrimReplace, +} = require('../internal/re') +const { FLAG_INCLUDE_PRERELEASE, FLAG_LOOSE } = require('../internal/constants') + +const isNullSet = c => c.value === '<0.0.0-0' +const isAny = c => c.value === '' + +// take a set of comparators and determine whether there +// exists a version which can satisfy it +const isSatisfiable = (comparators, options) => { + let result = true + const remainingComparators = comparators.slice() + let testComparator = remainingComparators.pop() + + while (result && remainingComparators.length) { + result = remainingComparators.every((otherComparator) => { + return testComparator.intersects(otherComparator, options) + }) + + testComparator = remainingComparators.pop() + } + + return result +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +const parseComparator = (comp, options) => { + comp = comp.replace(re[t.BUILD], '') + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +const isX = id => !id || id.toLowerCase() === 'x' || id === '*' + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0-0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0-0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0-0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0-0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0-0 +// ~0.0.1 --> >=0.0.1 <0.1.0-0 +const replaceTildes = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceTilde(c, options)) + .join(' ') +} + +const replaceTilde = (comp, options) => { + const r = options.loose ? re[t.TILDELOOSE] : re[t.TILDE] + return comp.replace(r, (_, M, m, p, pr) => { + debug('tilde', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0 <${+M + 1}.0.0-0` + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0-0 + ret = `>=${M}.${m}.0 <${M}.${+m + 1}.0-0` + } else if (pr) { + debug('replaceTilde pr', pr) + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } else { + // ~1.2.3 == >=1.2.3 <1.3.0-0 + ret = `>=${M}.${m}.${p + } <${M}.${+m + 1}.0-0` + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0-0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0-0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0-0 +// ^1.2.3 --> >=1.2.3 <2.0.0-0 +// ^1.2.0 --> >=1.2.0 <2.0.0-0 +// ^0.0.1 --> >=0.0.1 <0.0.2-0 +// ^0.1.0 --> >=0.1.0 <0.2.0-0 +const replaceCarets = (comp, options) => { + return comp + .trim() + .split(/\s+/) + .map((c) => replaceCaret(c, options)) + .join(' ') +} + +const replaceCaret = (comp, options) => { + debug('caret', comp, options) + const r = options.loose ? re[t.CARETLOOSE] : re[t.CARET] + const z = options.includePrerelease ? '-0' : '' + return comp.replace(r, (_, M, m, p, pr) => { + debug('caret', comp, _, M, m, p, pr) + let ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = `>=${M}.0.0${z} <${+M + 1}.0.0-0` + } else if (isX(p)) { + if (M === '0') { + ret = `>=${M}.${m}.0${z} <${M}.${+m + 1}.0-0` + } else { + ret = `>=${M}.${m}.0${z} <${+M + 1}.0.0-0` + } + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p}-${pr + } <${+M + 1}.0.0-0` + } + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = `>=${M}.${m}.${p + }${z} <${M}.${m}.${+p + 1}-0` + } else { + ret = `>=${M}.${m}.${p + }${z} <${M}.${+m + 1}.0-0` + } + } else { + ret = `>=${M}.${m}.${p + } <${+M + 1}.0.0-0` + } + } + + debug('caret return', ret) + return ret + }) +} + +const replaceXRanges = (comp, options) => { + debug('replaceXRanges', comp, options) + return comp + .split(/\s+/) + .map((c) => replaceXRange(c, options)) + .join(' ') +} + +const replaceXRange = (comp, options) => { + comp = comp.trim() + const r = options.loose ? re[t.XRANGELOOSE] : re[t.XRANGE] + return comp.replace(r, (ret, gtlt, M, m, p, pr) => { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + const xM = isX(M) + const xm = xM || isX(m) + const xp = xm || isX(p) + const anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + // if we're including prereleases in the match, then we need + // to fix this to -0, the lowest possible prerelease value + pr = options.includePrerelease ? '-0' : '' + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0-0' + } else { + // nothing is forbidden + ret = '*' + } + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 + } + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } + } + + if (gtlt === '<') { + pr = '-0' + } + + ret = `${gtlt + M}.${m}.${p}${pr}` + } else if (xm) { + ret = `>=${M}.0.0${pr} <${+M + 1}.0.0-0` + } else if (xp) { + ret = `>=${M}.${m}.0${pr + } <${M}.${+m + 1}.0-0` + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +const replaceStars = (comp, options) => { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp + .trim() + .replace(re[t.STAR], '') +} + +const replaceGTE0 = (comp, options) => { + debug('replaceGTE0', comp, options) + return comp + .trim() + .replace(re[options.includePrerelease ? t.GTE0PRE : t.GTE0], '') +} + +// This function is passed to string.replace(re[t.HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0-0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0-0 +// TODO build? +const hyphenReplace = incPr => ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr) => { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = `>=${fM}.0.0${incPr ? '-0' : ''}` + } else if (isX(fp)) { + from = `>=${fM}.${fm}.0${incPr ? '-0' : ''}` + } else if (fpr) { + from = `>=${from}` + } else { + from = `>=${from}${incPr ? '-0' : ''}` + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = `<${+tM + 1}.0.0-0` + } else if (isX(tp)) { + to = `<${tM}.${+tm + 1}.0-0` + } else if (tpr) { + to = `<=${tM}.${tm}.${tp}-${tpr}` + } else if (incPr) { + to = `<${tM}.${tm}.${+tp + 1}-0` + } else { + to = `<=${to}` + } + + return `${from} ${to}`.trim() +} + +const testSet = (set, version, options) => { + for (let i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (let i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === Comparator.ANY) { + continue + } + + if (set[i].semver.prerelease.length > 0) { + const allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } + } + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} diff --git a/node_modules/semver/classes/semver.js b/node_modules/semver/classes/semver.js new file mode 100644 index 0000000..92254be --- /dev/null +++ b/node_modules/semver/classes/semver.js @@ -0,0 +1,333 @@ +'use strict' + +const debug = require('../internal/debug') +const { MAX_LENGTH, MAX_SAFE_INTEGER } = require('../internal/constants') +const { safeRe: re, t } = require('../internal/re') + +const parseOptions = require('../internal/parse-options') +const { compareIdentifiers } = require('../internal/identifiers') +class SemVer { + constructor (version, options) { + options = parseOptions(options) + + if (version instanceof SemVer) { + if (version.loose === !!options.loose && + version.includePrerelease === !!options.includePrerelease) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError(`Invalid version. Must be a string. Got type "${typeof version}".`) + } + + if (version.length > MAX_LENGTH) { + throw new TypeError( + `version is longer than ${MAX_LENGTH} characters` + ) + } + + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + // this isn't actually relevant for versions, but keep it so that we + // don't run into trouble passing this.options around. + this.includePrerelease = !!options.includePrerelease + + const m = version.trim().match(options.loose ? re[t.LOOSE] : re[t.FULL]) + + if (!m) { + throw new TypeError(`Invalid Version: ${version}`) + } + + this.raw = version + + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] + + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } + + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } + + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map((id) => { + if (/^[0-9]+$/.test(id)) { + const num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } + + this.build = m[5] ? m[5].split('.') : [] + this.format() + } + + format () { + this.version = `${this.major}.${this.minor}.${this.patch}` + if (this.prerelease.length) { + this.version += `-${this.prerelease.join('.')}` + } + return this.version + } + + toString () { + return this.version + } + + compare (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + if (typeof other === 'string' && other === this.version) { + return 0 + } + other = new SemVer(other, this.options) + } + + if (other.version === this.version) { + return 0 + } + + return this.compareMain(other) || this.comparePre(other) + } + + compareMain (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + if (this.major < other.major) { + return -1 + } + if (this.major > other.major) { + return 1 + } + if (this.minor < other.minor) { + return -1 + } + if (this.minor > other.minor) { + return 1 + } + if (this.patch < other.patch) { + return -1 + } + if (this.patch > other.patch) { + return 1 + } + return 0 + } + + comparePre (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + let i = 0 + do { + const a = this.prerelease[i] + const b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + compareBuild (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + let i = 0 + do { + const a = this.build[i] + const b = other.build[i] + debug('build compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) + } + + // preminor will bump the version up to the next minor release, and immediately + // down to pre-release. premajor and prepatch work the same way. + inc (release, identifier, identifierBase) { + if (release.startsWith('pre')) { + if (!identifier && identifierBase === false) { + throw new Error('invalid increment argument: identifier is empty') + } + // Avoid an invalid semver results + if (identifier) { + const match = `-${identifier}`.match(this.options.loose ? re[t.PRERELEASELOOSE] : re[t.PRERELEASE]) + if (!match || match[1] !== identifier) { + throw new Error(`invalid identifier: ${identifier}`) + } + } + } + + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier, identifierBase) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier, identifierBase) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier, identifierBase) + this.inc('pre', identifier, identifierBase) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier, identifierBase) + } + this.inc('pre', identifier, identifierBase) + break + case 'release': + if (this.prerelease.length === 0) { + throw new Error(`version ${this.raw} is not a prerelease`) + } + this.prerelease.length = 0 + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if ( + this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0 + ) { + this.major++ + } + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ + } + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ + } + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 'pre' would become 1.0.0-0 which is the wrong direction. + case 'pre': { + const base = Number(identifierBase) ? 1 : 0 + + if (this.prerelease.length === 0) { + this.prerelease = [base] + } else { + let i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + if (identifier === this.prerelease.join('.') && identifierBase === false) { + throw new Error('invalid increment argument: identifier already exists') + } + this.prerelease.push(base) + } + } + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + let prerelease = [identifier, base] + if (identifierBase === false) { + prerelease = [identifier] + } + if (compareIdentifiers(this.prerelease[0], identifier) === 0) { + if (isNaN(this.prerelease[1])) { + this.prerelease = prerelease + } + } else { + this.prerelease = prerelease + } + } + break + } + default: + throw new Error(`invalid increment argument: ${release}`) + } + this.raw = this.format() + if (this.build.length) { + this.raw += `+${this.build.join('.')}` + } + return this + } +} + +module.exports = SemVer diff --git a/node_modules/semver/functions/clean.js b/node_modules/semver/functions/clean.js new file mode 100644 index 0000000..79703d6 --- /dev/null +++ b/node_modules/semver/functions/clean.js @@ -0,0 +1,8 @@ +'use strict' + +const parse = require('./parse') +const clean = (version, options) => { + const s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} +module.exports = clean diff --git a/node_modules/semver/functions/cmp.js b/node_modules/semver/functions/cmp.js new file mode 100644 index 0000000..77487dc --- /dev/null +++ b/node_modules/semver/functions/cmp.js @@ -0,0 +1,54 @@ +'use strict' + +const eq = require('./eq') +const neq = require('./neq') +const gt = require('./gt') +const gte = require('./gte') +const lt = require('./lt') +const lte = require('./lte') + +const cmp = (a, op, b, loose) => { + switch (op) { + case '===': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a === b + + case '!==': + if (typeof a === 'object') { + a = a.version + } + if (typeof b === 'object') { + b = b.version + } + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError(`Invalid operator: ${op}`) + } +} +module.exports = cmp diff --git a/node_modules/semver/functions/coerce.js b/node_modules/semver/functions/coerce.js new file mode 100644 index 0000000..cfe0275 --- /dev/null +++ b/node_modules/semver/functions/coerce.js @@ -0,0 +1,62 @@ +'use strict' + +const SemVer = require('../classes/semver') +const parse = require('./parse') +const { safeRe: re, t } = require('../internal/re') + +const coerce = (version, options) => { + if (version instanceof SemVer) { + return version + } + + if (typeof version === 'number') { + version = String(version) + } + + if (typeof version !== 'string') { + return null + } + + options = options || {} + + let match = null + if (!options.rtl) { + match = version.match(options.includePrerelease ? re[t.COERCEFULL] : re[t.COERCE]) + } else { + // Find the right-most coercible string that does not share + // a terminus with a more left-ward coercible string. + // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' + // With includePrerelease option set, '1.2.3.4-rc' wants to coerce '2.3.4-rc', not '2.3.4' + // + // Walk through the string checking with a /g regexp + // Manually set the index so as to pick up overlapping matches. + // Stop when we get a match that ends at the string end, since no + // coercible string can be more right-ward without the same terminus. + const coerceRtlRegex = options.includePrerelease ? re[t.COERCERTLFULL] : re[t.COERCERTL] + let next + while ((next = coerceRtlRegex.exec(version)) && + (!match || match.index + match[0].length !== version.length) + ) { + if (!match || + next.index + next[0].length !== match.index + match[0].length) { + match = next + } + coerceRtlRegex.lastIndex = next.index + next[1].length + next[2].length + } + // leave it in a clean state + coerceRtlRegex.lastIndex = -1 + } + + if (match === null) { + return null + } + + const major = match[2] + const minor = match[3] || '0' + const patch = match[4] || '0' + const prerelease = options.includePrerelease && match[5] ? `-${match[5]}` : '' + const build = options.includePrerelease && match[6] ? `+${match[6]}` : '' + + return parse(`${major}.${minor}.${patch}${prerelease}${build}`, options) +} +module.exports = coerce diff --git a/node_modules/semver/functions/compare-build.js b/node_modules/semver/functions/compare-build.js new file mode 100644 index 0000000..99157cf --- /dev/null +++ b/node_modules/semver/functions/compare-build.js @@ -0,0 +1,9 @@ +'use strict' + +const SemVer = require('../classes/semver') +const compareBuild = (a, b, loose) => { + const versionA = new SemVer(a, loose) + const versionB = new SemVer(b, loose) + return versionA.compare(versionB) || versionA.compareBuild(versionB) +} +module.exports = compareBuild diff --git a/node_modules/semver/functions/compare-loose.js b/node_modules/semver/functions/compare-loose.js new file mode 100644 index 0000000..7531634 --- /dev/null +++ b/node_modules/semver/functions/compare-loose.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const compareLoose = (a, b) => compare(a, b, true) +module.exports = compareLoose diff --git a/node_modules/semver/functions/compare.js b/node_modules/semver/functions/compare.js new file mode 100644 index 0000000..63d8090 --- /dev/null +++ b/node_modules/semver/functions/compare.js @@ -0,0 +1,7 @@ +'use strict' + +const SemVer = require('../classes/semver') +const compare = (a, b, loose) => + new SemVer(a, loose).compare(new SemVer(b, loose)) + +module.exports = compare diff --git a/node_modules/semver/functions/diff.js b/node_modules/semver/functions/diff.js new file mode 100644 index 0000000..04e064e --- /dev/null +++ b/node_modules/semver/functions/diff.js @@ -0,0 +1,60 @@ +'use strict' + +const parse = require('./parse.js') + +const diff = (version1, version2) => { + const v1 = parse(version1, null, true) + const v2 = parse(version2, null, true) + const comparison = v1.compare(v2) + + if (comparison === 0) { + return null + } + + const v1Higher = comparison > 0 + const highVersion = v1Higher ? v1 : v2 + const lowVersion = v1Higher ? v2 : v1 + const highHasPre = !!highVersion.prerelease.length + const lowHasPre = !!lowVersion.prerelease.length + + if (lowHasPre && !highHasPre) { + // Going from prerelease -> no prerelease requires some special casing + + // If the low version has only a major, then it will always be a major + // Some examples: + // 1.0.0-1 -> 1.0.0 + // 1.0.0-1 -> 1.1.1 + // 1.0.0-1 -> 2.0.0 + if (!lowVersion.patch && !lowVersion.minor) { + return 'major' + } + + // If the main part has no difference + if (lowVersion.compareMain(highVersion) === 0) { + if (lowVersion.minor && !lowVersion.patch) { + return 'minor' + } + return 'patch' + } + } + + // add the `pre` prefix if we are going to a prerelease version + const prefix = highHasPre ? 'pre' : '' + + if (v1.major !== v2.major) { + return prefix + 'major' + } + + if (v1.minor !== v2.minor) { + return prefix + 'minor' + } + + if (v1.patch !== v2.patch) { + return prefix + 'patch' + } + + // high and low are preleases + return 'prerelease' +} + +module.exports = diff diff --git a/node_modules/semver/functions/eq.js b/node_modules/semver/functions/eq.js new file mode 100644 index 0000000..5f0eead --- /dev/null +++ b/node_modules/semver/functions/eq.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const eq = (a, b, loose) => compare(a, b, loose) === 0 +module.exports = eq diff --git a/node_modules/semver/functions/gt.js b/node_modules/semver/functions/gt.js new file mode 100644 index 0000000..84a57dd --- /dev/null +++ b/node_modules/semver/functions/gt.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const gt = (a, b, loose) => compare(a, b, loose) > 0 +module.exports = gt diff --git a/node_modules/semver/functions/gte.js b/node_modules/semver/functions/gte.js new file mode 100644 index 0000000..7c52bdf --- /dev/null +++ b/node_modules/semver/functions/gte.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const gte = (a, b, loose) => compare(a, b, loose) >= 0 +module.exports = gte diff --git a/node_modules/semver/functions/inc.js b/node_modules/semver/functions/inc.js new file mode 100644 index 0000000..ff999e9 --- /dev/null +++ b/node_modules/semver/functions/inc.js @@ -0,0 +1,21 @@ +'use strict' + +const SemVer = require('../classes/semver') + +const inc = (version, release, options, identifier, identifierBase) => { + if (typeof (options) === 'string') { + identifierBase = identifier + identifier = options + options = undefined + } + + try { + return new SemVer( + version instanceof SemVer ? version.version : version, + options + ).inc(release, identifier, identifierBase).version + } catch (er) { + return null + } +} +module.exports = inc diff --git a/node_modules/semver/functions/lt.js b/node_modules/semver/functions/lt.js new file mode 100644 index 0000000..2fb32a0 --- /dev/null +++ b/node_modules/semver/functions/lt.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const lt = (a, b, loose) => compare(a, b, loose) < 0 +module.exports = lt diff --git a/node_modules/semver/functions/lte.js b/node_modules/semver/functions/lte.js new file mode 100644 index 0000000..da9ee8f --- /dev/null +++ b/node_modules/semver/functions/lte.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const lte = (a, b, loose) => compare(a, b, loose) <= 0 +module.exports = lte diff --git a/node_modules/semver/functions/major.js b/node_modules/semver/functions/major.js new file mode 100644 index 0000000..e6d08dc --- /dev/null +++ b/node_modules/semver/functions/major.js @@ -0,0 +1,5 @@ +'use strict' + +const SemVer = require('../classes/semver') +const major = (a, loose) => new SemVer(a, loose).major +module.exports = major diff --git a/node_modules/semver/functions/minor.js b/node_modules/semver/functions/minor.js new file mode 100644 index 0000000..9e70ffd --- /dev/null +++ b/node_modules/semver/functions/minor.js @@ -0,0 +1,5 @@ +'use strict' + +const SemVer = require('../classes/semver') +const minor = (a, loose) => new SemVer(a, loose).minor +module.exports = minor diff --git a/node_modules/semver/functions/neq.js b/node_modules/semver/functions/neq.js new file mode 100644 index 0000000..84326b7 --- /dev/null +++ b/node_modules/semver/functions/neq.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const neq = (a, b, loose) => compare(a, b, loose) !== 0 +module.exports = neq diff --git a/node_modules/semver/functions/parse.js b/node_modules/semver/functions/parse.js new file mode 100644 index 0000000..d544d33 --- /dev/null +++ b/node_modules/semver/functions/parse.js @@ -0,0 +1,18 @@ +'use strict' + +const SemVer = require('../classes/semver') +const parse = (version, options, throwErrors = false) => { + if (version instanceof SemVer) { + return version + } + try { + return new SemVer(version, options) + } catch (er) { + if (!throwErrors) { + return null + } + throw er + } +} + +module.exports = parse diff --git a/node_modules/semver/functions/patch.js b/node_modules/semver/functions/patch.js new file mode 100644 index 0000000..7675162 --- /dev/null +++ b/node_modules/semver/functions/patch.js @@ -0,0 +1,5 @@ +'use strict' + +const SemVer = require('../classes/semver') +const patch = (a, loose) => new SemVer(a, loose).patch +module.exports = patch diff --git a/node_modules/semver/functions/prerelease.js b/node_modules/semver/functions/prerelease.js new file mode 100644 index 0000000..b8fe1db --- /dev/null +++ b/node_modules/semver/functions/prerelease.js @@ -0,0 +1,8 @@ +'use strict' + +const parse = require('./parse') +const prerelease = (version, options) => { + const parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} +module.exports = prerelease diff --git a/node_modules/semver/functions/rcompare.js b/node_modules/semver/functions/rcompare.js new file mode 100644 index 0000000..8e1c222 --- /dev/null +++ b/node_modules/semver/functions/rcompare.js @@ -0,0 +1,5 @@ +'use strict' + +const compare = require('./compare') +const rcompare = (a, b, loose) => compare(b, a, loose) +module.exports = rcompare diff --git a/node_modules/semver/functions/rsort.js b/node_modules/semver/functions/rsort.js new file mode 100644 index 0000000..5d3d200 --- /dev/null +++ b/node_modules/semver/functions/rsort.js @@ -0,0 +1,5 @@ +'use strict' + +const compareBuild = require('./compare-build') +const rsort = (list, loose) => list.sort((a, b) => compareBuild(b, a, loose)) +module.exports = rsort diff --git a/node_modules/semver/functions/satisfies.js b/node_modules/semver/functions/satisfies.js new file mode 100644 index 0000000..a0264a2 --- /dev/null +++ b/node_modules/semver/functions/satisfies.js @@ -0,0 +1,12 @@ +'use strict' + +const Range = require('../classes/range') +const satisfies = (version, range, options) => { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} +module.exports = satisfies diff --git a/node_modules/semver/functions/sort.js b/node_modules/semver/functions/sort.js new file mode 100644 index 0000000..edb24b1 --- /dev/null +++ b/node_modules/semver/functions/sort.js @@ -0,0 +1,5 @@ +'use strict' + +const compareBuild = require('./compare-build') +const sort = (list, loose) => list.sort((a, b) => compareBuild(a, b, loose)) +module.exports = sort diff --git a/node_modules/semver/functions/valid.js b/node_modules/semver/functions/valid.js new file mode 100644 index 0000000..0db67ed --- /dev/null +++ b/node_modules/semver/functions/valid.js @@ -0,0 +1,8 @@ +'use strict' + +const parse = require('./parse') +const valid = (version, options) => { + const v = parse(version, options) + return v ? v.version : null +} +module.exports = valid diff --git a/node_modules/semver/index.js b/node_modules/semver/index.js new file mode 100644 index 0000000..285662a --- /dev/null +++ b/node_modules/semver/index.js @@ -0,0 +1,91 @@ +'use strict' + +// just pre-load all the stuff that index.js lazily exports +const internalRe = require('./internal/re') +const constants = require('./internal/constants') +const SemVer = require('./classes/semver') +const identifiers = require('./internal/identifiers') +const parse = require('./functions/parse') +const valid = require('./functions/valid') +const clean = require('./functions/clean') +const inc = require('./functions/inc') +const diff = require('./functions/diff') +const major = require('./functions/major') +const minor = require('./functions/minor') +const patch = require('./functions/patch') +const prerelease = require('./functions/prerelease') +const compare = require('./functions/compare') +const rcompare = require('./functions/rcompare') +const compareLoose = require('./functions/compare-loose') +const compareBuild = require('./functions/compare-build') +const sort = require('./functions/sort') +const rsort = require('./functions/rsort') +const gt = require('./functions/gt') +const lt = require('./functions/lt') +const eq = require('./functions/eq') +const neq = require('./functions/neq') +const gte = require('./functions/gte') +const lte = require('./functions/lte') +const cmp = require('./functions/cmp') +const coerce = require('./functions/coerce') +const Comparator = require('./classes/comparator') +const Range = require('./classes/range') +const satisfies = require('./functions/satisfies') +const toComparators = require('./ranges/to-comparators') +const maxSatisfying = require('./ranges/max-satisfying') +const minSatisfying = require('./ranges/min-satisfying') +const minVersion = require('./ranges/min-version') +const validRange = require('./ranges/valid') +const outside = require('./ranges/outside') +const gtr = require('./ranges/gtr') +const ltr = require('./ranges/ltr') +const intersects = require('./ranges/intersects') +const simplifyRange = require('./ranges/simplify') +const subset = require('./ranges/subset') +module.exports = { + parse, + valid, + clean, + inc, + diff, + major, + minor, + patch, + prerelease, + compare, + rcompare, + compareLoose, + compareBuild, + sort, + rsort, + gt, + lt, + eq, + neq, + gte, + lte, + cmp, + coerce, + Comparator, + Range, + satisfies, + toComparators, + maxSatisfying, + minSatisfying, + minVersion, + validRange, + outside, + gtr, + ltr, + intersects, + simplifyRange, + subset, + SemVer, + re: internalRe.re, + src: internalRe.src, + tokens: internalRe.t, + SEMVER_SPEC_VERSION: constants.SEMVER_SPEC_VERSION, + RELEASE_TYPES: constants.RELEASE_TYPES, + compareIdentifiers: identifiers.compareIdentifiers, + rcompareIdentifiers: identifiers.rcompareIdentifiers, +} diff --git a/node_modules/semver/internal/constants.js b/node_modules/semver/internal/constants.js new file mode 100644 index 0000000..6d1db91 --- /dev/null +++ b/node_modules/semver/internal/constants.js @@ -0,0 +1,37 @@ +'use strict' + +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +const SEMVER_SPEC_VERSION = '2.0.0' + +const MAX_LENGTH = 256 +const MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || +/* istanbul ignore next */ 9007199254740991 + +// Max safe segment length for coercion. +const MAX_SAFE_COMPONENT_LENGTH = 16 + +// Max safe length for a build identifier. The max length minus 6 characters for +// the shortest version with a build 0.0.0+BUILD. +const MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 + +const RELEASE_TYPES = [ + 'major', + 'premajor', + 'minor', + 'preminor', + 'patch', + 'prepatch', + 'prerelease', +] + +module.exports = { + MAX_LENGTH, + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_SAFE_INTEGER, + RELEASE_TYPES, + SEMVER_SPEC_VERSION, + FLAG_INCLUDE_PRERELEASE: 0b001, + FLAG_LOOSE: 0b010, +} diff --git a/node_modules/semver/internal/debug.js b/node_modules/semver/internal/debug.js new file mode 100644 index 0000000..20d1e9d --- /dev/null +++ b/node_modules/semver/internal/debug.js @@ -0,0 +1,11 @@ +'use strict' + +const debug = ( + typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG) +) ? (...args) => console.error('SEMVER', ...args) + : () => {} + +module.exports = debug diff --git a/node_modules/semver/internal/identifiers.js b/node_modules/semver/internal/identifiers.js new file mode 100644 index 0000000..d053472 --- /dev/null +++ b/node_modules/semver/internal/identifiers.js @@ -0,0 +1,29 @@ +'use strict' + +const numeric = /^[0-9]+$/ +const compareIdentifiers = (a, b) => { + if (typeof a === 'number' && typeof b === 'number') { + return a === b ? 0 : a < b ? -1 : 1 + } + + const anum = numeric.test(a) + const bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +const rcompareIdentifiers = (a, b) => compareIdentifiers(b, a) + +module.exports = { + compareIdentifiers, + rcompareIdentifiers, +} diff --git a/node_modules/semver/internal/lrucache.js b/node_modules/semver/internal/lrucache.js new file mode 100644 index 0000000..b8bf526 --- /dev/null +++ b/node_modules/semver/internal/lrucache.js @@ -0,0 +1,42 @@ +'use strict' + +class LRUCache { + constructor () { + this.max = 1000 + this.map = new Map() + } + + get (key) { + const value = this.map.get(key) + if (value === undefined) { + return undefined + } else { + // Remove the key from the map and add it to the end + this.map.delete(key) + this.map.set(key, value) + return value + } + } + + delete (key) { + return this.map.delete(key) + } + + set (key, value) { + const deleted = this.delete(key) + + if (!deleted && value !== undefined) { + // If cache is full, delete the least recently used item + if (this.map.size >= this.max) { + const firstKey = this.map.keys().next().value + this.delete(firstKey) + } + + this.map.set(key, value) + } + + return this + } +} + +module.exports = LRUCache diff --git a/node_modules/semver/internal/parse-options.js b/node_modules/semver/internal/parse-options.js new file mode 100644 index 0000000..5295454 --- /dev/null +++ b/node_modules/semver/internal/parse-options.js @@ -0,0 +1,17 @@ +'use strict' + +// parse out just the options we care about +const looseOption = Object.freeze({ loose: true }) +const emptyOpts = Object.freeze({ }) +const parseOptions = options => { + if (!options) { + return emptyOpts + } + + if (typeof options !== 'object') { + return looseOption + } + + return options +} +module.exports = parseOptions diff --git a/node_modules/semver/internal/re.js b/node_modules/semver/internal/re.js new file mode 100644 index 0000000..4758c58 --- /dev/null +++ b/node_modules/semver/internal/re.js @@ -0,0 +1,223 @@ +'use strict' + +const { + MAX_SAFE_COMPONENT_LENGTH, + MAX_SAFE_BUILD_LENGTH, + MAX_LENGTH, +} = require('./constants') +const debug = require('./debug') +exports = module.exports = {} + +// The actual regexps go on exports.re +const re = exports.re = [] +const safeRe = exports.safeRe = [] +const src = exports.src = [] +const safeSrc = exports.safeSrc = [] +const t = exports.t = {} +let R = 0 + +const LETTERDASHNUMBER = '[a-zA-Z0-9-]' + +// Replace some greedy regex tokens to prevent regex dos issues. These regex are +// used internally via the safeRe object since all inputs in this library get +// normalized first to trim and collapse all extra whitespace. The original +// regexes are exported for userland consumption and lower level usage. A +// future breaking change could export the safer regex only with a note that +// all input should have extra whitespace removed. +const safeRegexReplacements = [ + ['\\s', 1], + ['\\d', MAX_LENGTH], + [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], +] + +const makeSafeRegex = (value) => { + for (const [token, max] of safeRegexReplacements) { + value = value + .split(`${token}*`).join(`${token}{0,${max}}`) + .split(`${token}+`).join(`${token}{1,${max}}`) + } + return value +} + +const createToken = (name, value, isGlobal) => { + const safe = makeSafeRegex(value) + const index = R++ + debug(name, index, value) + t[name] = index + src[index] = value + safeSrc[index] = safe + re[index] = new RegExp(value, isGlobal ? 'g' : undefined) + safeRe[index] = new RegExp(safe, isGlobal ? 'g' : undefined) +} + +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. + +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +createToken('NUMERICIDENTIFIER', '0|[1-9]\\d*') +createToken('NUMERICIDENTIFIERLOOSE', '\\d+') + +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. + +createToken('NONNUMERICIDENTIFIER', `\\d*[a-zA-Z-]${LETTERDASHNUMBER}*`) + +// ## Main Version +// Three dot-separated numeric identifiers. + +createToken('MAINVERSION', `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})\\.` + + `(${src[t.NUMERICIDENTIFIER]})`) + +createToken('MAINVERSIONLOOSE', `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})\\.` + + `(${src[t.NUMERICIDENTIFIERLOOSE]})`) + +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. +// Non-numberic identifiers include numberic identifiers but can be longer. +// Therefore non-numberic identifiers must go first. + +createToken('PRERELEASEIDENTIFIER', `(?:${src[t.NONNUMERICIDENTIFIER] +}|${src[t.NUMERICIDENTIFIER]})`) + +createToken('PRERELEASEIDENTIFIERLOOSE', `(?:${src[t.NONNUMERICIDENTIFIER] +}|${src[t.NUMERICIDENTIFIERLOOSE]})`) + +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. + +createToken('PRERELEASE', `(?:-(${src[t.PRERELEASEIDENTIFIER] +}(?:\\.${src[t.PRERELEASEIDENTIFIER]})*))`) + +createToken('PRERELEASELOOSE', `(?:-?(${src[t.PRERELEASEIDENTIFIERLOOSE] +}(?:\\.${src[t.PRERELEASEIDENTIFIERLOOSE]})*))`) + +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. + +createToken('BUILDIDENTIFIER', `${LETTERDASHNUMBER}+`) + +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. + +createToken('BUILD', `(?:\\+(${src[t.BUILDIDENTIFIER] +}(?:\\.${src[t.BUILDIDENTIFIER]})*))`) + +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. + +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. + +createToken('FULLPLAIN', `v?${src[t.MAINVERSION] +}${src[t.PRERELEASE]}?${ + src[t.BUILD]}?`) + +createToken('FULL', `^${src[t.FULLPLAIN]}$`) + +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +createToken('LOOSEPLAIN', `[v=\\s]*${src[t.MAINVERSIONLOOSE] +}${src[t.PRERELEASELOOSE]}?${ + src[t.BUILD]}?`) + +createToken('LOOSE', `^${src[t.LOOSEPLAIN]}$`) + +createToken('GTLT', '((?:<|>)?=?)') + +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +createToken('XRANGEIDENTIFIERLOOSE', `${src[t.NUMERICIDENTIFIERLOOSE]}|x|X|\\*`) +createToken('XRANGEIDENTIFIER', `${src[t.NUMERICIDENTIFIER]}|x|X|\\*`) + +createToken('XRANGEPLAIN', `[v=\\s]*(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIER]})` + + `(?:${src[t.PRERELEASE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGEPLAINLOOSE', `[v=\\s]*(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:\\.(${src[t.XRANGEIDENTIFIERLOOSE]})` + + `(?:${src[t.PRERELEASELOOSE]})?${ + src[t.BUILD]}?` + + `)?)?`) + +createToken('XRANGE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAIN]}$`) +createToken('XRANGELOOSE', `^${src[t.GTLT]}\\s*${src[t.XRANGEPLAINLOOSE]}$`) + +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +createToken('COERCEPLAIN', `${'(^|[^\\d])' + + '(\\d{1,'}${MAX_SAFE_COMPONENT_LENGTH}})` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?` + + `(?:\\.(\\d{1,${MAX_SAFE_COMPONENT_LENGTH}}))?`) +createToken('COERCE', `${src[t.COERCEPLAIN]}(?:$|[^\\d])`) +createToken('COERCEFULL', src[t.COERCEPLAIN] + + `(?:${src[t.PRERELEASE]})?` + + `(?:${src[t.BUILD]})?` + + `(?:$|[^\\d])`) +createToken('COERCERTL', src[t.COERCE], true) +createToken('COERCERTLFULL', src[t.COERCEFULL], true) + +// Tilde ranges. +// Meaning is "reasonably at or greater than" +createToken('LONETILDE', '(?:~>?)') + +createToken('TILDETRIM', `(\\s*)${src[t.LONETILDE]}\\s+`, true) +exports.tildeTrimReplace = '$1~' + +createToken('TILDE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAIN]}$`) +createToken('TILDELOOSE', `^${src[t.LONETILDE]}${src[t.XRANGEPLAINLOOSE]}$`) + +// Caret ranges. +// Meaning is "at least and backwards compatible with" +createToken('LONECARET', '(?:\\^)') + +createToken('CARETTRIM', `(\\s*)${src[t.LONECARET]}\\s+`, true) +exports.caretTrimReplace = '$1^' + +createToken('CARET', `^${src[t.LONECARET]}${src[t.XRANGEPLAIN]}$`) +createToken('CARETLOOSE', `^${src[t.LONECARET]}${src[t.XRANGEPLAINLOOSE]}$`) + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +createToken('COMPARATORLOOSE', `^${src[t.GTLT]}\\s*(${src[t.LOOSEPLAIN]})$|^$`) +createToken('COMPARATOR', `^${src[t.GTLT]}\\s*(${src[t.FULLPLAIN]})$|^$`) + +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +createToken('COMPARATORTRIM', `(\\s*)${src[t.GTLT] +}\\s*(${src[t.LOOSEPLAIN]}|${src[t.XRANGEPLAIN]})`, true) +exports.comparatorTrimReplace = '$1$2$3' + +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +createToken('HYPHENRANGE', `^\\s*(${src[t.XRANGEPLAIN]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAIN]})` + + `\\s*$`) + +createToken('HYPHENRANGELOOSE', `^\\s*(${src[t.XRANGEPLAINLOOSE]})` + + `\\s+-\\s+` + + `(${src[t.XRANGEPLAINLOOSE]})` + + `\\s*$`) + +// Star ranges basically just allow anything at all. +createToken('STAR', '(<|>)?=?\\s*\\*') +// >=0.0.0 is like a star +createToken('GTE0', '^\\s*>=\\s*0\\.0\\.0\\s*$') +createToken('GTE0PRE', '^\\s*>=\\s*0\\.0\\.0-0\\s*$') diff --git a/node_modules/semver/package.json b/node_modules/semver/package.json new file mode 100644 index 0000000..2b8cada --- /dev/null +++ b/node_modules/semver/package.json @@ -0,0 +1,78 @@ +{ + "name": "semver", + "version": "7.7.3", + "description": "The semantic version parser used by npm.", + "main": "index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.25.1", + "benchmark": "^2.1.4", + "tap": "^16.0.0" + }, + "license": "ISC", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/node-semver.git" + }, + "bin": { + "semver": "bin/semver.js" + }, + "files": [ + "bin/", + "lib/", + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "tap": { + "timeout": 30, + "coverage-map": "map.js", + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": ">=10" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.1", + "engines": ">=10", + "distPaths": [ + "classes/", + "functions/", + "internal/", + "ranges/", + "index.js", + "preload.js", + "range.bnf" + ], + "allowPaths": [ + "/classes/", + "/functions/", + "/internal/", + "/ranges/", + "/index.js", + "/preload.js", + "/range.bnf", + "/benchmarks" + ], + "publish": "true" + } +} diff --git a/node_modules/semver/preload.js b/node_modules/semver/preload.js new file mode 100644 index 0000000..e6c47b9 --- /dev/null +++ b/node_modules/semver/preload.js @@ -0,0 +1,4 @@ +'use strict' + +// XXX remove in v8 or beyond +module.exports = require('./index.js') diff --git a/node_modules/semver/range.bnf b/node_modules/semver/range.bnf new file mode 100644 index 0000000..d4c6ae0 --- /dev/null +++ b/node_modules/semver/range.bnf @@ -0,0 +1,16 @@ +range-set ::= range ( logical-or range ) * +logical-or ::= ( ' ' ) * '||' ( ' ' ) * +range ::= hyphen | simple ( ' ' simple ) * | '' +hyphen ::= partial ' - ' partial +simple ::= primitive | partial | tilde | caret +primitive ::= ( '<' | '>' | '>=' | '<=' | '=' ) partial +partial ::= xr ( '.' xr ( '.' xr qualifier ? )? )? +xr ::= 'x' | 'X' | '*' | nr +nr ::= '0' | [1-9] ( [0-9] ) * +tilde ::= '~' partial +caret ::= '^' partial +qualifier ::= ( '-' pre )? ( '+' build )? +pre ::= parts +build ::= parts +parts ::= part ( '.' part ) * +part ::= nr | [-0-9A-Za-z]+ diff --git a/node_modules/semver/ranges/gtr.js b/node_modules/semver/ranges/gtr.js new file mode 100644 index 0000000..0e7601f --- /dev/null +++ b/node_modules/semver/ranges/gtr.js @@ -0,0 +1,6 @@ +'use strict' + +// Determine if version is greater than all the versions possible in the range. +const outside = require('./outside') +const gtr = (version, range, options) => outside(version, range, '>', options) +module.exports = gtr diff --git a/node_modules/semver/ranges/intersects.js b/node_modules/semver/ranges/intersects.js new file mode 100644 index 0000000..917be7e --- /dev/null +++ b/node_modules/semver/ranges/intersects.js @@ -0,0 +1,9 @@ +'use strict' + +const Range = require('../classes/range') +const intersects = (r1, r2, options) => { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2, options) +} +module.exports = intersects diff --git a/node_modules/semver/ranges/ltr.js b/node_modules/semver/ranges/ltr.js new file mode 100644 index 0000000..aa5e568 --- /dev/null +++ b/node_modules/semver/ranges/ltr.js @@ -0,0 +1,6 @@ +'use strict' + +const outside = require('./outside') +// Determine if version is less than all the versions possible in the range +const ltr = (version, range, options) => outside(version, range, '<', options) +module.exports = ltr diff --git a/node_modules/semver/ranges/max-satisfying.js b/node_modules/semver/ranges/max-satisfying.js new file mode 100644 index 0000000..01fe5ae --- /dev/null +++ b/node_modules/semver/ranges/max-satisfying.js @@ -0,0 +1,27 @@ +'use strict' + +const SemVer = require('../classes/semver') +const Range = require('../classes/range') + +const maxSatisfying = (versions, range, options) => { + let max = null + let maxSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) + } + } + }) + return max +} +module.exports = maxSatisfying diff --git a/node_modules/semver/ranges/min-satisfying.js b/node_modules/semver/ranges/min-satisfying.js new file mode 100644 index 0000000..af89c8e --- /dev/null +++ b/node_modules/semver/ranges/min-satisfying.js @@ -0,0 +1,26 @@ +'use strict' + +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const minSatisfying = (versions, range, options) => { + let min = null + let minSV = null + let rangeObj = null + try { + rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach((v) => { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) + } + } + }) + return min +} +module.exports = minSatisfying diff --git a/node_modules/semver/ranges/min-version.js b/node_modules/semver/ranges/min-version.js new file mode 100644 index 0000000..09a65aa --- /dev/null +++ b/node_modules/semver/ranges/min-version.js @@ -0,0 +1,63 @@ +'use strict' + +const SemVer = require('../classes/semver') +const Range = require('../classes/range') +const gt = require('../functions/gt') + +const minVersion = (range, loose) => { + range = new Range(range, loose) + + let minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let setMin = null + comparators.forEach((comparator) => { + // Clone to avoid manipulating the comparator's semver object. + const compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!setMin || gt(compver, setMin)) { + setMin = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error(`Unexpected operation: ${comparator.operator}`) + } + }) + if (setMin && (!minver || gt(minver, setMin))) { + minver = setMin + } + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} +module.exports = minVersion diff --git a/node_modules/semver/ranges/outside.js b/node_modules/semver/ranges/outside.js new file mode 100644 index 0000000..ca74421 --- /dev/null +++ b/node_modules/semver/ranges/outside.js @@ -0,0 +1,82 @@ +'use strict' + +const SemVer = require('../classes/semver') +const Comparator = require('../classes/comparator') +const { ANY } = Comparator +const Range = require('../classes/range') +const satisfies = require('../functions/satisfies') +const gt = require('../functions/gt') +const lt = require('../functions/lt') +const lte = require('../functions/lte') +const gte = require('../functions/gte') + +const outside = (version, range, hilo, options) => { + version = new SemVer(version, options) + range = new Range(range, options) + + let gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisfies the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (let i = 0; i < range.set.length; ++i) { + const comparators = range.set[i] + + let high = null + let low = null + + comparators.forEach((comparator) => { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') + } + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator + } + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +module.exports = outside diff --git a/node_modules/semver/ranges/simplify.js b/node_modules/semver/ranges/simplify.js new file mode 100644 index 0000000..262732e --- /dev/null +++ b/node_modules/semver/ranges/simplify.js @@ -0,0 +1,49 @@ +'use strict' + +// given a set of versions and a range, create a "simplified" range +// that includes the same versions that the original range does +// If the original range is shorter than the simplified one, return that. +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') +module.exports = (versions, range, options) => { + const set = [] + let first = null + let prev = null + const v = versions.sort((a, b) => compare(a, b, options)) + for (const version of v) { + const included = satisfies(version, range, options) + if (included) { + prev = version + if (!first) { + first = version + } + } else { + if (prev) { + set.push([first, prev]) + } + prev = null + first = null + } + } + if (first) { + set.push([first, null]) + } + + const ranges = [] + for (const [min, max] of set) { + if (min === max) { + ranges.push(min) + } else if (!max && min === v[0]) { + ranges.push('*') + } else if (!max) { + ranges.push(`>=${min}`) + } else if (min === v[0]) { + ranges.push(`<=${max}`) + } else { + ranges.push(`${min} - ${max}`) + } + } + const simplified = ranges.join(' || ') + const original = typeof range.raw === 'string' ? range.raw : String(range) + return simplified.length < original.length ? simplified : range +} diff --git a/node_modules/semver/ranges/subset.js b/node_modules/semver/ranges/subset.js new file mode 100644 index 0000000..2c49aef --- /dev/null +++ b/node_modules/semver/ranges/subset.js @@ -0,0 +1,249 @@ +'use strict' + +const Range = require('../classes/range.js') +const Comparator = require('../classes/comparator.js') +const { ANY } = Comparator +const satisfies = require('../functions/satisfies.js') +const compare = require('../functions/compare.js') + +// Complex range `r1 || r2 || ...` is a subset of `R1 || R2 || ...` iff: +// - Every simple range `r1, r2, ...` is a null set, OR +// - Every simple range `r1, r2, ...` which is not a null set is a subset of +// some `R1, R2, ...` +// +// Simple range `c1 c2 ...` is a subset of simple range `C1 C2 ...` iff: +// - If c is only the ANY comparator +// - If C is only the ANY comparator, return true +// - Else if in prerelease mode, return false +// - else replace c with `[>=0.0.0]` +// - If C is only the ANY comparator +// - if in prerelease mode, return true +// - else replace C with `[>=0.0.0]` +// - Let EQ be the set of = comparators in c +// - If EQ is more than one, return true (null set) +// - Let GT be the highest > or >= comparator in c +// - Let LT be the lowest < or <= comparator in c +// - If GT and LT, and GT.semver > LT.semver, return true (null set) +// - If any C is a = range, and GT or LT are set, return false +// - If EQ +// - If GT, and EQ does not satisfy GT, return true (null set) +// - If LT, and EQ does not satisfy LT, return true (null set) +// - If EQ satisfies every C, return true +// - Else return false +// - If GT +// - If GT.semver is lower than any > or >= comp in C, return false +// - If GT is >=, and GT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the GT.semver tuple, return false +// - If LT +// - If LT.semver is greater than any < or <= comp in C, return false +// - If LT is <=, and LT.semver does not satisfy every C, return false +// - If GT.semver has a prerelease, and not in prerelease mode +// - If no C has a prerelease and the LT.semver tuple, return false +// - Else return true + +const subset = (sub, dom, options = {}) => { + if (sub === dom) { + return true + } + + sub = new Range(sub, options) + dom = new Range(dom, options) + let sawNonNull = false + + OUTER: for (const simpleSub of sub.set) { + for (const simpleDom of dom.set) { + const isSub = simpleSubset(simpleSub, simpleDom, options) + sawNonNull = sawNonNull || isSub !== null + if (isSub) { + continue OUTER + } + } + // the null set is a subset of everything, but null simple ranges in + // a complex range should be ignored. so if we saw a non-null range, + // then we know this isn't a subset, but if EVERY simple range was null, + // then it is a subset. + if (sawNonNull) { + return false + } + } + return true +} + +const minimumVersionWithPreRelease = [new Comparator('>=0.0.0-0')] +const minimumVersion = [new Comparator('>=0.0.0')] + +const simpleSubset = (sub, dom, options) => { + if (sub === dom) { + return true + } + + if (sub.length === 1 && sub[0].semver === ANY) { + if (dom.length === 1 && dom[0].semver === ANY) { + return true + } else if (options.includePrerelease) { + sub = minimumVersionWithPreRelease + } else { + sub = minimumVersion + } + } + + if (dom.length === 1 && dom[0].semver === ANY) { + if (options.includePrerelease) { + return true + } else { + dom = minimumVersion + } + } + + const eqSet = new Set() + let gt, lt + for (const c of sub) { + if (c.operator === '>' || c.operator === '>=') { + gt = higherGT(gt, c, options) + } else if (c.operator === '<' || c.operator === '<=') { + lt = lowerLT(lt, c, options) + } else { + eqSet.add(c.semver) + } + } + + if (eqSet.size > 1) { + return null + } + + let gtltComp + if (gt && lt) { + gtltComp = compare(gt.semver, lt.semver, options) + if (gtltComp > 0) { + return null + } else if (gtltComp === 0 && (gt.operator !== '>=' || lt.operator !== '<=')) { + return null + } + } + + // will iterate one or zero times + for (const eq of eqSet) { + if (gt && !satisfies(eq, String(gt), options)) { + return null + } + + if (lt && !satisfies(eq, String(lt), options)) { + return null + } + + for (const c of dom) { + if (!satisfies(eq, String(c), options)) { + return false + } + } + + return true + } + + let higher, lower + let hasDomLT, hasDomGT + // if the subset has a prerelease, we need a comparator in the superset + // with the same tuple and a prerelease, or it's not a subset + let needDomLTPre = lt && + !options.includePrerelease && + lt.semver.prerelease.length ? lt.semver : false + let needDomGTPre = gt && + !options.includePrerelease && + gt.semver.prerelease.length ? gt.semver : false + // exception: <1.2.3-0 is the same as <1.2.3 + if (needDomLTPre && needDomLTPre.prerelease.length === 1 && + lt.operator === '<' && needDomLTPre.prerelease[0] === 0) { + needDomLTPre = false + } + + for (const c of dom) { + hasDomGT = hasDomGT || c.operator === '>' || c.operator === '>=' + hasDomLT = hasDomLT || c.operator === '<' || c.operator === '<=' + if (gt) { + if (needDomGTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomGTPre.major && + c.semver.minor === needDomGTPre.minor && + c.semver.patch === needDomGTPre.patch) { + needDomGTPre = false + } + } + if (c.operator === '>' || c.operator === '>=') { + higher = higherGT(gt, c, options) + if (higher === c && higher !== gt) { + return false + } + } else if (gt.operator === '>=' && !satisfies(gt.semver, String(c), options)) { + return false + } + } + if (lt) { + if (needDomLTPre) { + if (c.semver.prerelease && c.semver.prerelease.length && + c.semver.major === needDomLTPre.major && + c.semver.minor === needDomLTPre.minor && + c.semver.patch === needDomLTPre.patch) { + needDomLTPre = false + } + } + if (c.operator === '<' || c.operator === '<=') { + lower = lowerLT(lt, c, options) + if (lower === c && lower !== lt) { + return false + } + } else if (lt.operator === '<=' && !satisfies(lt.semver, String(c), options)) { + return false + } + } + if (!c.operator && (lt || gt) && gtltComp !== 0) { + return false + } + } + + // if there was a < or >, and nothing in the dom, then must be false + // UNLESS it was limited by another range in the other direction. + // Eg, >1.0.0 <1.0.1 is still a subset of <2.0.0 + if (gt && hasDomLT && !lt && gtltComp !== 0) { + return false + } + + if (lt && hasDomGT && !gt && gtltComp !== 0) { + return false + } + + // we needed a prerelease range in a specific tuple, but didn't get one + // then this isn't a subset. eg >=1.2.3-pre is not a subset of >=1.0.0, + // because it includes prereleases in the 1.2.3 tuple + if (needDomGTPre || needDomLTPre) { + return false + } + + return true +} + +// >=1.2.3 is lower than >1.2.3 +const higherGT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp > 0 ? a + : comp < 0 ? b + : b.operator === '>' && a.operator === '>=' ? b + : a +} + +// <=1.2.3 is higher than <1.2.3 +const lowerLT = (a, b, options) => { + if (!a) { + return b + } + const comp = compare(a.semver, b.semver, options) + return comp < 0 ? a + : comp > 0 ? b + : b.operator === '<' && a.operator === '<=' ? b + : a +} + +module.exports = subset diff --git a/node_modules/semver/ranges/to-comparators.js b/node_modules/semver/ranges/to-comparators.js new file mode 100644 index 0000000..5be2519 --- /dev/null +++ b/node_modules/semver/ranges/to-comparators.js @@ -0,0 +1,10 @@ +'use strict' + +const Range = require('../classes/range') + +// Mostly just for testing and legacy API reasons +const toComparators = (range, options) => + new Range(range, options).set + .map(comp => comp.map(c => c.value).join(' ').trim().split(' ')) + +module.exports = toComparators diff --git a/node_modules/semver/ranges/valid.js b/node_modules/semver/ranges/valid.js new file mode 100644 index 0000000..cc6b0e9 --- /dev/null +++ b/node_modules/semver/ranges/valid.js @@ -0,0 +1,13 @@ +'use strict' + +const Range = require('../classes/range') +const validRange = (range, options) => { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} +module.exports = validRange diff --git a/node_modules/set-blocking/CHANGELOG.md b/node_modules/set-blocking/CHANGELOG.md new file mode 100644 index 0000000..03bf591 --- /dev/null +++ b/node_modules/set-blocking/CHANGELOG.md @@ -0,0 +1,26 @@ +# Change Log + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + + +# [2.0.0](https://github.com/yargs/set-blocking/compare/v1.0.0...v2.0.0) (2016-05-17) + + +### Features + +* add an isTTY check ([#3](https://github.com/yargs/set-blocking/issues/3)) ([66ce277](https://github.com/yargs/set-blocking/commit/66ce277)) + + +### BREAKING CHANGES + +* stdio/stderr will not be set to blocking if isTTY === false + + + + +# 1.0.0 (2016-05-14) + + +### Features + +* implemented shim for stream._handle.setBlocking ([6bde0c0](https://github.com/yargs/set-blocking/commit/6bde0c0)) diff --git a/node_modules/set-blocking/LICENSE.txt b/node_modules/set-blocking/LICENSE.txt new file mode 100644 index 0000000..836440b --- /dev/null +++ b/node_modules/set-blocking/LICENSE.txt @@ -0,0 +1,14 @@ +Copyright (c) 2016, Contributors + +Permission to use, copy, modify, and/or distribute this software +for any purpose with or without fee is hereby granted, provided +that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/set-blocking/README.md b/node_modules/set-blocking/README.md new file mode 100644 index 0000000..e93b420 --- /dev/null +++ b/node_modules/set-blocking/README.md @@ -0,0 +1,31 @@ +# set-blocking + +[![Build Status](https://travis-ci.org/yargs/set-blocking.svg)](https://travis-ci.org/yargs/set-blocking) +[![NPM version](https://img.shields.io/npm/v/set-blocking.svg)](https://www.npmjs.com/package/set-blocking) +[![Coverage Status](https://coveralls.io/repos/yargs/set-blocking/badge.svg?branch=)](https://coveralls.io/r/yargs/set-blocking?branch=master) +[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) + +set blocking `stdio` and `stderr` ensuring that terminal output does not truncate. + +```js +const setBlocking = require('set-blocking') +setBlocking(true) +console.log(someLargeStringToOutput) +``` + +## Historical Context/Word of Warning + +This was created as a shim to address the bug discussed in [node #6456](https://github.com/nodejs/node/issues/6456). This bug crops up on +newer versions of Node.js (`0.12+`), truncating terminal output. + +You should be mindful of the side-effects caused by using `set-blocking`: + +* if your module sets blocking to `true`, it will effect other modules + consuming your library. In [yargs](https://github.com/yargs/yargs/blob/master/yargs.js#L653) we only call + `setBlocking(true)` once we already know we are about to call `process.exit(code)`. +* this patch will not apply to subprocesses spawned with `isTTY = true`, this is + the [default `spawn()` behavior](https://nodejs.org/api/child_process.html#child_process_child_process_spawn_command_args_options). + +## License + +ISC diff --git a/node_modules/set-blocking/index.js b/node_modules/set-blocking/index.js new file mode 100644 index 0000000..6f78774 --- /dev/null +++ b/node_modules/set-blocking/index.js @@ -0,0 +1,7 @@ +module.exports = function (blocking) { + [process.stdout, process.stderr].forEach(function (stream) { + if (stream._handle && stream.isTTY && typeof stream._handle.setBlocking === 'function') { + stream._handle.setBlocking(blocking) + } + }) +} diff --git a/node_modules/set-blocking/package.json b/node_modules/set-blocking/package.json new file mode 100644 index 0000000..c082db7 --- /dev/null +++ b/node_modules/set-blocking/package.json @@ -0,0 +1,42 @@ +{ + "name": "set-blocking", + "version": "2.0.0", + "description": "set blocking stdio and stderr ensuring that terminal output does not truncate", + "main": "index.js", + "scripts": { + "pretest": "standard", + "test": "nyc mocha ./test/*.js", + "coverage": "nyc report --reporter=text-lcov | coveralls", + "version": "standard-version" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/yargs/set-blocking.git" + }, + "keywords": [ + "flush", + "terminal", + "blocking", + "shim", + "stdio", + "stderr" + ], + "author": "Ben Coe ", + "license": "ISC", + "bugs": { + "url": "https://github.com/yargs/set-blocking/issues" + }, + "homepage": "https://github.com/yargs/set-blocking#readme", + "devDependencies": { + "chai": "^3.5.0", + "coveralls": "^2.11.9", + "mocha": "^2.4.5", + "nyc": "^6.4.4", + "standard": "^7.0.1", + "standard-version": "^2.2.1" + }, + "files": [ + "index.js", + "LICENSE.txt" + ] +} \ No newline at end of file diff --git a/node_modules/signal-exit/LICENSE.txt b/node_modules/signal-exit/LICENSE.txt new file mode 100644 index 0000000..eead04a --- /dev/null +++ b/node_modules/signal-exit/LICENSE.txt @@ -0,0 +1,16 @@ +The ISC License + +Copyright (c) 2015, Contributors + +Permission to use, copy, modify, and/or distribute this software +for any purpose with or without fee is hereby granted, provided +that the above copyright notice and this permission notice +appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE +LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES +OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, +ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/signal-exit/README.md b/node_modules/signal-exit/README.md new file mode 100644 index 0000000..f9c7c00 --- /dev/null +++ b/node_modules/signal-exit/README.md @@ -0,0 +1,39 @@ +# signal-exit + +[![Build Status](https://travis-ci.org/tapjs/signal-exit.png)](https://travis-ci.org/tapjs/signal-exit) +[![Coverage](https://coveralls.io/repos/tapjs/signal-exit/badge.svg?branch=master)](https://coveralls.io/r/tapjs/signal-exit?branch=master) +[![NPM version](https://img.shields.io/npm/v/signal-exit.svg)](https://www.npmjs.com/package/signal-exit) +[![Standard Version](https://img.shields.io/badge/release-standard%20version-brightgreen.svg)](https://github.com/conventional-changelog/standard-version) + +When you want to fire an event no matter how a process exits: + +* reaching the end of execution. +* explicitly having `process.exit(code)` called. +* having `process.kill(pid, sig)` called. +* receiving a fatal signal from outside the process + +Use `signal-exit`. + +```js +var onExit = require('signal-exit') + +onExit(function (code, signal) { + console.log('process exited!') +}) +``` + +## API + +`var remove = onExit(function (code, signal) {}, options)` + +The return value of the function is a function that will remove the +handler. + +Note that the function *only* fires for signals if the signal would +cause the process to exit. That is, there are no other listeners, and +it is a fatal signal. + +## Options + +* `alwaysLast`: Run this handler after any other signal or exit + handlers. This causes `process.emit` to be monkeypatched. diff --git a/node_modules/signal-exit/index.js b/node_modules/signal-exit/index.js new file mode 100644 index 0000000..93703f3 --- /dev/null +++ b/node_modules/signal-exit/index.js @@ -0,0 +1,202 @@ +// Note: since nyc uses this module to output coverage, any lines +// that are in the direct sync flow of nyc's outputCoverage are +// ignored, since we can never get coverage for them. +// grab a reference to node's real process object right away +var process = global.process + +const processOk = function (process) { + return process && + typeof process === 'object' && + typeof process.removeListener === 'function' && + typeof process.emit === 'function' && + typeof process.reallyExit === 'function' && + typeof process.listeners === 'function' && + typeof process.kill === 'function' && + typeof process.pid === 'number' && + typeof process.on === 'function' +} + +// some kind of non-node environment, just no-op +/* istanbul ignore if */ +if (!processOk(process)) { + module.exports = function () { + return function () {} + } +} else { + var assert = require('assert') + var signals = require('./signals.js') + var isWin = /^win/i.test(process.platform) + + var EE = require('events') + /* istanbul ignore if */ + if (typeof EE !== 'function') { + EE = EE.EventEmitter + } + + var emitter + if (process.__signal_exit_emitter__) { + emitter = process.__signal_exit_emitter__ + } else { + emitter = process.__signal_exit_emitter__ = new EE() + emitter.count = 0 + emitter.emitted = {} + } + + // Because this emitter is a global, we have to check to see if a + // previous version of this library failed to enable infinite listeners. + // I know what you're about to say. But literally everything about + // signal-exit is a compromise with evil. Get used to it. + if (!emitter.infinite) { + emitter.setMaxListeners(Infinity) + emitter.infinite = true + } + + module.exports = function (cb, opts) { + /* istanbul ignore if */ + if (!processOk(global.process)) { + return function () {} + } + assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') + + if (loaded === false) { + load() + } + + var ev = 'exit' + if (opts && opts.alwaysLast) { + ev = 'afterexit' + } + + var remove = function () { + emitter.removeListener(ev, cb) + if (emitter.listeners('exit').length === 0 && + emitter.listeners('afterexit').length === 0) { + unload() + } + } + emitter.on(ev, cb) + + return remove + } + + var unload = function unload () { + if (!loaded || !processOk(global.process)) { + return + } + loaded = false + + signals.forEach(function (sig) { + try { + process.removeListener(sig, sigListeners[sig]) + } catch (er) {} + }) + process.emit = originalProcessEmit + process.reallyExit = originalProcessReallyExit + emitter.count -= 1 + } + module.exports.unload = unload + + var emit = function emit (event, code, signal) { + /* istanbul ignore if */ + if (emitter.emitted[event]) { + return + } + emitter.emitted[event] = true + emitter.emit(event, code, signal) + } + + // { : , ... } + var sigListeners = {} + signals.forEach(function (sig) { + sigListeners[sig] = function listener () { + /* istanbul ignore if */ + if (!processOk(global.process)) { + return + } + // If there are no other listeners, an exit is coming! + // Simplest way: remove us and then re-send the signal. + // We know that this will kill the process, so we can + // safely emit now. + var listeners = process.listeners(sig) + if (listeners.length === emitter.count) { + unload() + emit('exit', null, sig) + /* istanbul ignore next */ + emit('afterexit', null, sig) + /* istanbul ignore next */ + if (isWin && sig === 'SIGHUP') { + // "SIGHUP" throws an `ENOSYS` error on Windows, + // so use a supported signal instead + sig = 'SIGINT' + } + /* istanbul ignore next */ + process.kill(process.pid, sig) + } + } + }) + + module.exports.signals = function () { + return signals + } + + var loaded = false + + var load = function load () { + if (loaded || !processOk(global.process)) { + return + } + loaded = true + + // This is the number of onSignalExit's that are in play. + // It's important so that we can count the correct number of + // listeners on signals, and don't wait for the other one to + // handle it instead of us. + emitter.count += 1 + + signals = signals.filter(function (sig) { + try { + process.on(sig, sigListeners[sig]) + return true + } catch (er) { + return false + } + }) + + process.emit = processEmit + process.reallyExit = processReallyExit + } + module.exports.load = load + + var originalProcessReallyExit = process.reallyExit + var processReallyExit = function processReallyExit (code) { + /* istanbul ignore if */ + if (!processOk(global.process)) { + return + } + process.exitCode = code || /* istanbul ignore next */ 0 + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + /* istanbul ignore next */ + originalProcessReallyExit.call(process, process.exitCode) + } + + var originalProcessEmit = process.emit + var processEmit = function processEmit (ev, arg) { + if (ev === 'exit' && processOk(global.process)) { + /* istanbul ignore else */ + if (arg !== undefined) { + process.exitCode = arg + } + var ret = originalProcessEmit.apply(this, arguments) + /* istanbul ignore next */ + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + /* istanbul ignore next */ + return ret + } else { + return originalProcessEmit.apply(this, arguments) + } + } +} diff --git a/node_modules/signal-exit/package.json b/node_modules/signal-exit/package.json new file mode 100644 index 0000000..e1a0031 --- /dev/null +++ b/node_modules/signal-exit/package.json @@ -0,0 +1,38 @@ +{ + "name": "signal-exit", + "version": "3.0.7", + "description": "when you want to fire an event no matter how a process exits.", + "main": "index.js", + "scripts": { + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "files": [ + "index.js", + "signals.js" + ], + "repository": { + "type": "git", + "url": "https://github.com/tapjs/signal-exit.git" + }, + "keywords": [ + "signal", + "exit" + ], + "author": "Ben Coe ", + "license": "ISC", + "bugs": { + "url": "https://github.com/tapjs/signal-exit/issues" + }, + "homepage": "https://github.com/tapjs/signal-exit", + "devDependencies": { + "chai": "^3.5.0", + "coveralls": "^3.1.1", + "nyc": "^15.1.0", + "standard-version": "^9.3.1", + "tap": "^15.1.1" + } +} diff --git a/node_modules/signal-exit/signals.js b/node_modules/signal-exit/signals.js new file mode 100644 index 0000000..3bd67a8 --- /dev/null +++ b/node_modules/signal-exit/signals.js @@ -0,0 +1,53 @@ +// This is not the set of all possible signals. +// +// It IS, however, the set of all signals that trigger +// an exit on either Linux or BSD systems. Linux is a +// superset of the signal names supported on BSD, and +// the unknown signals just fail to register, so we can +// catch that easily enough. +// +// Don't bother with SIGKILL. It's uncatchable, which +// means that we can't fire any callbacks anyway. +// +// If a user does happen to register a handler on a non- +// fatal signal like SIGWINCH or something, and then +// exit, it'll end up firing `process.emit('exit')`, so +// the handler will be fired anyway. +// +// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised +// artificially, inherently leave the process in a +// state from which it is not safe to try and enter JS +// listeners. +module.exports = [ + 'SIGABRT', + 'SIGALRM', + 'SIGHUP', + 'SIGINT', + 'SIGTERM' +] + +if (process.platform !== 'win32') { + module.exports.push( + 'SIGVTALRM', + 'SIGXCPU', + 'SIGXFSZ', + 'SIGUSR2', + 'SIGTRAP', + 'SIGSYS', + 'SIGQUIT', + 'SIGIOT' + // should detect profiler and enable/disable accordingly. + // see #21 + // 'SIGPROF' + ) +} + +if (process.platform === 'linux') { + module.exports.push( + 'SIGIO', + 'SIGPOLL', + 'SIGPWR', + 'SIGSTKFLT', + 'SIGUNUSED' + ) +} diff --git a/node_modules/string-width/index.d.ts b/node_modules/string-width/index.d.ts new file mode 100644 index 0000000..12b5309 --- /dev/null +++ b/node_modules/string-width/index.d.ts @@ -0,0 +1,29 @@ +declare const stringWidth: { + /** + Get the visual width of a string - the number of columns required to display it. + + Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) are stripped and doesn't affect the width. + + @example + ``` + import stringWidth = require('string-width'); + + stringWidth('a'); + //=> 1 + + stringWidth('古'); + //=> 2 + + stringWidth('\u001B[1m古\u001B[22m'); + //=> 2 + ``` + */ + (string: string): number; + + // TODO: remove this in the next major version, refactor the whole definition to: + // declare function stringWidth(string: string): number; + // export = stringWidth; + default: typeof stringWidth; +} + +export = stringWidth; diff --git a/node_modules/string-width/index.js b/node_modules/string-width/index.js new file mode 100644 index 0000000..f4d261a --- /dev/null +++ b/node_modules/string-width/index.js @@ -0,0 +1,47 @@ +'use strict'; +const stripAnsi = require('strip-ansi'); +const isFullwidthCodePoint = require('is-fullwidth-code-point'); +const emojiRegex = require('emoji-regex'); + +const stringWidth = string => { + if (typeof string !== 'string' || string.length === 0) { + return 0; + } + + string = stripAnsi(string); + + if (string.length === 0) { + return 0; + } + + string = string.replace(emojiRegex(), ' '); + + let width = 0; + + for (let i = 0; i < string.length; i++) { + const code = string.codePointAt(i); + + // Ignore control characters + if (code <= 0x1F || (code >= 0x7F && code <= 0x9F)) { + continue; + } + + // Ignore combining characters + if (code >= 0x300 && code <= 0x36F) { + continue; + } + + // Surrogates + if (code > 0xFFFF) { + i++; + } + + width += isFullwidthCodePoint(code) ? 2 : 1; + } + + return width; +}; + +module.exports = stringWidth; +// TODO: remove this in the next major version +module.exports.default = stringWidth; diff --git a/node_modules/string-width/license b/node_modules/string-width/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/string-width/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/string-width/package.json b/node_modules/string-width/package.json new file mode 100644 index 0000000..28ba7b4 --- /dev/null +++ b/node_modules/string-width/package.json @@ -0,0 +1,56 @@ +{ + "name": "string-width", + "version": "4.2.3", + "description": "Get the visual width of a string - the number of columns required to display it", + "license": "MIT", + "repository": "sindresorhus/string-width", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "string", + "character", + "unicode", + "width", + "visual", + "column", + "columns", + "fullwidth", + "full-width", + "full", + "ansi", + "escape", + "codes", + "cli", + "command-line", + "terminal", + "console", + "cjk", + "chinese", + "japanese", + "korean", + "fixed-width" + ], + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "devDependencies": { + "ava": "^1.4.1", + "tsd": "^0.7.1", + "xo": "^0.24.0" + } +} diff --git a/node_modules/string-width/readme.md b/node_modules/string-width/readme.md new file mode 100644 index 0000000..bdd3141 --- /dev/null +++ b/node_modules/string-width/readme.md @@ -0,0 +1,50 @@ +# string-width + +> Get the visual width of a string - the number of columns required to display it + +Some Unicode characters are [fullwidth](https://en.wikipedia.org/wiki/Halfwidth_and_fullwidth_forms) and use double the normal width. [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) are stripped and doesn't affect the width. + +Useful to be able to measure the actual width of command-line output. + + +## Install + +``` +$ npm install string-width +``` + + +## Usage + +```js +const stringWidth = require('string-width'); + +stringWidth('a'); +//=> 1 + +stringWidth('古'); +//=> 2 + +stringWidth('\u001B[1m古\u001B[22m'); +//=> 2 +``` + + +## Related + +- [string-width-cli](https://github.com/sindresorhus/string-width-cli) - CLI for this module +- [string-length](https://github.com/sindresorhus/string-length) - Get the real length of a string +- [widest-line](https://github.com/sindresorhus/widest-line) - Get the visual width of the widest line in a string + + +--- + +
+ + Get professional support for this package with a Tidelift subscription + +
+ + Tidelift helps make open source sustainable for maintainers while giving companies
assurances about security, maintenance, and licensing for their dependencies. +
+
diff --git a/node_modules/string_decoder/LICENSE b/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000..778edb2 --- /dev/null +++ b/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/string_decoder/README.md b/node_modules/string_decoder/README.md new file mode 100644 index 0000000..5fd5831 --- /dev/null +++ b/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/string_decoder/lib/string_decoder.js b/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000..2e89e63 --- /dev/null +++ b/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/string_decoder/package.json b/node_modules/string_decoder/package.json new file mode 100644 index 0000000..b2bb141 --- /dev/null +++ b/node_modules/string_decoder/package.json @@ -0,0 +1,34 @@ +{ + "name": "string_decoder", + "version": "1.3.0", + "description": "The string_decoder module from Node core", + "main": "lib/string_decoder.js", + "files": [ + "lib" + ], + "dependencies": { + "safe-buffer": "~5.2.0" + }, + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "scripts": { + "test": "tap test/parallel/*.js && node test/verify-dependencies", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT" +} diff --git a/node_modules/strip-ansi/index.d.ts b/node_modules/strip-ansi/index.d.ts new file mode 100644 index 0000000..907fccc --- /dev/null +++ b/node_modules/strip-ansi/index.d.ts @@ -0,0 +1,17 @@ +/** +Strip [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) from a string. + +@example +``` +import stripAnsi = require('strip-ansi'); + +stripAnsi('\u001B[4mUnicorn\u001B[0m'); +//=> 'Unicorn' + +stripAnsi('\u001B]8;;https://github.com\u0007Click\u001B]8;;\u0007'); +//=> 'Click' +``` +*/ +declare function stripAnsi(string: string): string; + +export = stripAnsi; diff --git a/node_modules/strip-ansi/index.js b/node_modules/strip-ansi/index.js new file mode 100644 index 0000000..9a593df --- /dev/null +++ b/node_modules/strip-ansi/index.js @@ -0,0 +1,4 @@ +'use strict'; +const ansiRegex = require('ansi-regex'); + +module.exports = string => typeof string === 'string' ? string.replace(ansiRegex(), '') : string; diff --git a/node_modules/strip-ansi/license b/node_modules/strip-ansi/license new file mode 100644 index 0000000..e7af2f7 --- /dev/null +++ b/node_modules/strip-ansi/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/strip-ansi/package.json b/node_modules/strip-ansi/package.json new file mode 100644 index 0000000..1a41108 --- /dev/null +++ b/node_modules/strip-ansi/package.json @@ -0,0 +1,54 @@ +{ + "name": "strip-ansi", + "version": "6.0.1", + "description": "Strip ANSI escape codes from a string", + "license": "MIT", + "repository": "chalk/strip-ansi", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=8" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "strip", + "trim", + "remove", + "ansi", + "styles", + "color", + "colour", + "colors", + "terminal", + "console", + "string", + "tty", + "escape", + "formatting", + "rgb", + "256", + "shell", + "xterm", + "log", + "logging", + "command-line", + "text" + ], + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "devDependencies": { + "ava": "^2.4.0", + "tsd": "^0.10.0", + "xo": "^0.25.3" + } +} diff --git a/node_modules/strip-ansi/readme.md b/node_modules/strip-ansi/readme.md new file mode 100644 index 0000000..7c4b56d --- /dev/null +++ b/node_modules/strip-ansi/readme.md @@ -0,0 +1,46 @@ +# strip-ansi [![Build Status](https://travis-ci.org/chalk/strip-ansi.svg?branch=master)](https://travis-ci.org/chalk/strip-ansi) + +> Strip [ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) from a string + + +## Install + +``` +$ npm install strip-ansi +``` + + +## Usage + +```js +const stripAnsi = require('strip-ansi'); + +stripAnsi('\u001B[4mUnicorn\u001B[0m'); +//=> 'Unicorn' + +stripAnsi('\u001B]8;;https://github.com\u0007Click\u001B]8;;\u0007'); +//=> 'Click' +``` + + +## strip-ansi for enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of strip-ansi and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source dependencies you use to build your applications. Save time, reduce risk, and improve code health, while paying the maintainers of the exact dependencies you use. [Learn more.](https://tidelift.com/subscription/pkg/npm-strip-ansi?utm_source=npm-strip-ansi&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) + + +## Related + +- [strip-ansi-cli](https://github.com/chalk/strip-ansi-cli) - CLI for this module +- [strip-ansi-stream](https://github.com/chalk/strip-ansi-stream) - Streaming version of this module +- [has-ansi](https://github.com/chalk/has-ansi) - Check if a string has ANSI escape codes +- [ansi-regex](https://github.com/chalk/ansi-regex) - Regular expression for matching ANSI escape codes +- [chalk](https://github.com/chalk/chalk) - Terminal string styling done right + + +## Maintainers + +- [Sindre Sorhus](https://github.com/sindresorhus) +- [Josh Junon](https://github.com/qix-) + diff --git a/node_modules/tar/LICENSE b/node_modules/tar/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/tar/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/tar/README.md b/node_modules/tar/README.md new file mode 100644 index 0000000..f620568 --- /dev/null +++ b/node_modules/tar/README.md @@ -0,0 +1,1080 @@ +# node-tar + +Fast and full-featured Tar for Node.js + +The API is designed to mimic the behavior of `tar(1)` on unix systems. +If you are familiar with how tar works, most of this will hopefully be +straightforward for you. If not, then hopefully this module can teach +you useful unix skills that may come in handy someday :) + +## Background + +A "tar file" or "tarball" is an archive of file system entries +(directories, files, links, etc.) The name comes from "tape archive". +If you run `man tar` on almost any Unix command line, you'll learn +quite a bit about what it can do, and its history. + +Tar has 5 main top-level commands: + +* `c` Create an archive +* `r` Replace entries within an archive +* `u` Update entries within an archive (ie, replace if they're newer) +* `t` List out the contents of an archive +* `x` Extract an archive to disk + +The other flags and options modify how this top level function works. + +## High-Level API + +These 5 functions are the high-level API. All of them have a +single-character name (for unix nerds familiar with `tar(1)`) as well +as a long name (for everyone else). + +All the high-level functions take the following arguments, all three +of which are optional and may be omitted. + +1. `options` - An optional object specifying various options +2. `paths` - An array of paths to add or extract +3. `callback` - Called when the command is completed, if async. (If + sync or no file specified, providing a callback throws a + `TypeError`.) + +If the command is sync (ie, if `options.sync=true`), then the +callback is not allowed, since the action will be completed immediately. + +If a `file` argument is specified, and the command is async, then a +`Promise` is returned. In this case, if async, a callback may be +provided which is called when the command is completed. + +If a `file` option is not specified, then a stream is returned. For +`create`, this is a readable stream of the generated archive. For +`list` and `extract` this is a writable stream that an archive should +be written into. If a file is not specified, then a callback is not +allowed, because you're already getting a stream to work with. + +`replace` and `update` only work on existing archives, and so require +a `file` argument. + +Sync commands without a file argument return a stream that acts on its +input immediately in the same tick. For readable streams, this means +that all of the data is immediately available by calling +`stream.read()`. For writable streams, it will be acted upon as soon +as it is provided, but this can be at any time. + +### Warnings and Errors + +Tar emits warnings and errors for recoverable and unrecoverable situations, +respectively. In many cases, a warning only affects a single entry in an +archive, or is simply informing you that it's modifying an entry to comply +with the settings provided. + +Unrecoverable warnings will always raise an error (ie, emit `'error'` on +streaming actions, throw for non-streaming sync actions, reject the +returned Promise for non-streaming async operations, or call a provided +callback with an `Error` as the first argument). Recoverable errors will +raise an error only if `strict: true` is set in the options. + +Respond to (recoverable) warnings by listening to the `warn` event. +Handlers receive 3 arguments: + +- `code` String. One of the error codes below. This may not match + `data.code`, which preserves the original error code from fs and zlib. +- `message` String. More details about the error. +- `data` Metadata about the error. An `Error` object for errors raised by + fs and zlib. All fields are attached to errors raisd by tar. Typically + contains the following fields, as relevant: + - `tarCode` The tar error code. + - `code` Either the tar error code, or the error code set by the + underlying system. + - `file` The archive file being read or written. + - `cwd` Working directory for creation and extraction operations. + - `entry` The entry object (if it could be created) for `TAR_ENTRY_INFO`, + `TAR_ENTRY_INVALID`, and `TAR_ENTRY_ERROR` warnings. + - `header` The header object (if it could be created, and the entry could + not be created) for `TAR_ENTRY_INFO` and `TAR_ENTRY_INVALID` warnings. + - `recoverable` Boolean. If `false`, then the warning will emit an + `error`, even in non-strict mode. + +#### Error Codes + +* `TAR_ENTRY_INFO` An informative error indicating that an entry is being + modified, but otherwise processed normally. For example, removing `/` or + `C:\` from absolute paths if `preservePaths` is not set. + +* `TAR_ENTRY_INVALID` An indication that a given entry is not a valid tar + archive entry, and will be skipped. This occurs when: + - a checksum fails, + - a `linkpath` is missing for a link type, or + - a `linkpath` is provided for a non-link type. + + If every entry in a parsed archive raises an `TAR_ENTRY_INVALID` error, + then the archive is presumed to be unrecoverably broken, and + `TAR_BAD_ARCHIVE` will be raised. + +* `TAR_ENTRY_ERROR` The entry appears to be a valid tar archive entry, but + encountered an error which prevented it from being unpacked. This occurs + when: + - an unrecoverable fs error happens during unpacking, + - an entry is trying to extract into an excessively deep + location (by default, limited to 1024 subfolders), + - an entry has `..` in the path and `preservePaths` is not set, or + - an entry is extracting through a symbolic link, when `preservePaths` is + not set. + +* `TAR_ENTRY_UNSUPPORTED` An indication that a given entry is + a valid archive entry, but of a type that is unsupported, and so will be + skipped in archive creation or extracting. + +* `TAR_ABORT` When parsing gzipped-encoded archives, the parser will + abort the parse process raise a warning for any zlib errors encountered. + Aborts are considered unrecoverable for both parsing and unpacking. + +* `TAR_BAD_ARCHIVE` The archive file is totally hosed. This can happen for + a number of reasons, and always occurs at the end of a parse or extract: + + - An entry body was truncated before seeing the full number of bytes. + - The archive contained only invalid entries, indicating that it is + likely not an archive, or at least, not an archive this library can + parse. + + `TAR_BAD_ARCHIVE` is considered informative for parse operations, but + unrecoverable for extraction. Note that, if encountered at the end of an + extraction, tar WILL still have extracted as much it could from the + archive, so there may be some garbage files to clean up. + +Errors that occur deeper in the system (ie, either the filesystem or zlib) +will have their error codes left intact, and a `tarCode` matching one of +the above will be added to the warning metadata or the raised error object. + +Errors generated by tar will have one of the above codes set as the +`error.code` field as well, but since errors originating in zlib or fs will +have their original codes, it's better to read `error.tarCode` if you wish +to see how tar is handling the issue. + +### Examples + +The API mimics the `tar(1)` command line functionality, with aliases +for more human-readable option and function names. The goal is that +if you know how to use `tar(1)` in Unix, then you know how to use +`require('tar')` in JavaScript. + +To replicate `tar czf my-tarball.tgz files and folders`, you'd do: + +```js +tar.c( + { + gzip: , + file: 'my-tarball.tgz' + }, + ['some', 'files', 'and', 'folders'] +).then(_ => { .. tarball has been created .. }) +``` + +To replicate `tar cz files and folders > my-tarball.tgz`, you'd do: + +```js +tar.c( // or tar.create + { + gzip: + }, + ['some', 'files', 'and', 'folders'] +).pipe(fs.createWriteStream('my-tarball.tgz')) +``` + +To replicate `tar xf my-tarball.tgz` you'd do: + +```js +tar.x( // or tar.extract( + { + file: 'my-tarball.tgz' + } +).then(_=> { .. tarball has been dumped in cwd .. }) +``` + +To replicate `cat my-tarball.tgz | tar x -C some-dir --strip=1`: + +```js +fs.createReadStream('my-tarball.tgz').pipe( + tar.x({ + strip: 1, + C: 'some-dir' // alias for cwd:'some-dir', also ok + }) +) +``` + +To replicate `tar tf my-tarball.tgz`, do this: + +```js +tar.t({ + file: 'my-tarball.tgz', + onentry: entry => { .. do whatever with it .. } +}) +``` + +For example, to just get the list of filenames from an archive: + +```js +const getEntryFilenames = async tarballFilename => { + const filenames = [] + await tar.t({ + file: tarballFilename, + onentry: entry => filenames.push(entry.path), + }) + return filenames +} +``` + +To replicate `cat my-tarball.tgz | tar t` do: + +```js +fs.createReadStream('my-tarball.tgz') + .pipe(tar.t()) + .on('entry', entry => { .. do whatever with it .. }) +``` + +To do anything synchronous, add `sync: true` to the options. Note +that sync functions don't take a callback and don't return a promise. +When the function returns, it's already done. Sync methods without a +file argument return a sync stream, which flushes immediately. But, +of course, it still won't be done until you `.end()` it. + +```js +const getEntryFilenamesSync = tarballFilename => { + const filenames = [] + tar.t({ + file: tarballFilename, + onentry: entry => filenames.push(entry.path), + sync: true, + }) + return filenames +} +``` + +To filter entries, add `filter: ` to the options. +Tar-creating methods call the filter with `filter(path, stat)`. +Tar-reading methods (including extraction) call the filter with +`filter(path, entry)`. The filter is called in the `this`-context of +the `Pack` or `Unpack` stream object. + +The arguments list to `tar t` and `tar x` specify a list of filenames +to extract or list, so they're equivalent to a filter that tests if +the file is in the list. + +For those who _aren't_ fans of tar's single-character command names: + +``` +tar.c === tar.create +tar.r === tar.replace (appends to archive, file is required) +tar.u === tar.update (appends if newer, file is required) +tar.x === tar.extract +tar.t === tar.list +``` + +Keep reading for all the command descriptions and options, as well as +the low-level API that they are built on. + +### tar.c(options, fileList, callback) [alias: tar.create] + +Create a tarball archive. + +The `fileList` is an array of paths to add to the tarball. Adding a +directory also adds its children recursively. + +An entry in `fileList` that starts with an `@` symbol is a tar archive +whose entries will be added. To add a file that starts with `@`, +prepend it with `./`. + +The following options are supported: + +- `file` Write the tarball archive to the specified filename. If this + is specified, then the callback will be fired when the file has been + written, and a promise will be returned that resolves when the file + is written. If a filename is not specified, then a Readable Stream + will be returned which will emit the file data. [Alias: `f`] +- `sync` Act synchronously. If this is set, then any provided file + will be fully written after the call to `tar.c`. If this is set, + and a file is not provided, then the resulting stream will already + have the data ready to `read` or `emit('data')` as soon as you + request it. +- `onwarn` A function that will get called with `(code, message, data)` for + any warnings encountered. (See "Warnings and Errors") +- `strict` Treat warnings as crash-worthy errors. Default false. +- `cwd` The current working directory for creating the archive. + Defaults to `process.cwd()`. [Alias: `C`] +- `prefix` A path portion to prefix onto the entries in the archive. +- `gzip` Set to any truthy value to create a gzipped archive, or an + object with settings for `zlib.Gzip()` [Alias: `z`] +- `filter` A function that gets called with `(path, stat)` for each + entry being added. Return `true` to add the entry to the archive, + or `false` to omit it. +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary for other + time-based operations. Additionally, `mode` is set to a "reasonable + default" for most unix systems, based on a `umask` value of `0o22`. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. [Alias: `P`] +- `mode` The mode to set on the created file archive +- `noDirRecurse` Do not recursively archive the contents of + directories. [Alias: `n`] +- `follow` Set to true to pack the targets of symbolic links. Without + this option, symbolic links are archived as such. [Alias: `L`, `h`] +- `noPax` Suppress pax extended headers. Note that this means that + long paths and linkpaths will be truncated, and large or negative + numeric values may be interpreted incorrectly. +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. + [Alias: `m`, `no-mtime`] +- `mtime` Set to a `Date` object to force a specific `mtime` for + everything added to the archive. Overridden by `noMtime`. + +The following options are mostly internal, but can be modified in some +advanced use cases, such as re-using caches between runs. + +- `linkCache` A Map object containing the device and inode value for + any file whose nlink is > 1, to identify hard links. +- `statCache` A Map object that caches calls `lstat`. +- `readdirCache` A Map object that caches calls to `readdir`. +- `jobs` A number specifying how many concurrent jobs to run. + Defaults to 4. +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 16 MB. + +### tar.x(options, fileList, callback) [alias: tar.extract] + +Extract a tarball archive. + +The `fileList` is an array of paths to extract from the tarball. If +no paths are provided, then all the entries are extracted. + +If the archive is gzipped, then tar will detect this and unzip it. + +Note that all directories that are created will be forced to be +writable, readable, and listable by their owner, to avoid cases where +a directory prevents extraction of child entries by virtue of its +mode. + +Most extraction errors will cause a `warn` event to be emitted. If +the `cwd` is missing, or not a directory, then the extraction will +fail completely. + +The following options are supported: + +- `cwd` Extract files relative to the specified directory. Defaults + to `process.cwd()`. If provided, this must exist and must be a + directory. [Alias: `C`] +- `file` The archive file to extract. If not specified, then a + Writable stream is returned where the archive data should be + written. [Alias: `f`] +- `sync` Create files and directories synchronously. +- `strict` Treat warnings as crash-worthy errors. Default false. +- `filter` A function that gets called with `(path, entry)` for each + entry being unpacked. Return `true` to unpack the entry from the + archive, or `false` to skip it. +- `newer` Set to true to keep the existing file on disk if it's newer + than the file in the archive. [Alias: `keep-newer`, + `keep-newer-files`] +- `keep` Do not overwrite existing files. In particular, if a file + appears more than once in an archive, later copies will not + overwrite earlier copies. [Alias: `k`, `keep-existing`] +- `preservePaths` Allow absolute paths, paths containing `..`, and + extracting through symbolic links. By default, `/` is stripped from + absolute paths, `..` paths are not extracted, and any file whose + location would be modified by a symbolic link is not extracted. + [Alias: `P`] +- `unlink` Unlink files before creating them. Without this option, + tar overwrites existing files, which preserves existing hardlinks. + With this option, existing hardlinks will be broken, as will any + symlink that would affect the location of an extracted file. [Alias: + `U`] +- `strip` Remove the specified number of leading path elements. + Pathnames with fewer elements will be silently skipped. Note that + the pathname is edited after applying the filter, but before + security checks. [Alias: `strip-components`, `stripComponents`] +- `onwarn` A function that will get called with `(code, message, data)` for + any warnings encountered. (See "Warnings and Errors") +- `preserveOwner` If true, tar will set the `uid` and `gid` of + extracted entries to the `uid` and `gid` fields in the archive. + This defaults to true when run as root, and false otherwise. If + false, then files and directories will be set with the owner and + group of the user running the process. This is similar to `-p` in + `tar(1)`, but ACLs and other system-specific data is never unpacked + in this implementation, and modes are set by default already. + [Alias: `p`] +- `uid` Set to a number to force ownership of all extracted files and + folders, and all implicitly created directories, to be owned by the + specified user id, regardless of the `uid` field in the archive. + Cannot be used along with `preserveOwner`. Requires also setting a + `gid` option. +- `gid` Set to a number to force ownership of all extracted files and + folders, and all implicitly created directories, to be owned by the + specified group id, regardless of the `gid` field in the archive. + Cannot be used along with `preserveOwner`. Requires also setting a + `uid` option. +- `noMtime` Set to true to omit writing `mtime` value for extracted + entries. [Alias: `m`, `no-mtime`] +- `transform` Provide a function that takes an `entry` object, and + returns a stream, or any falsey value. If a stream is provided, + then that stream's data will be written instead of the contents of + the archive entry. If a falsey value is provided, then the entry is + written to disk as normal. (To exclude items from extraction, use + the `filter` option described above.) +- `onentry` A function that gets called with `(entry)` for each entry + that passes the filter. +- `onwarn` A function that will get called with `(code, message, data)` for + any warnings encountered. (See "Warnings and Errors") +- `noChmod` Set to true to omit calling `fs.chmod()` to ensure that the + extracted file matches the entry mode. This also suppresses the call to + `process.umask()` to determine the default umask value, since tar will + extract with whatever mode is provided, and let the process `umask` apply + normally. +- `maxDepth` The maximum depth of subfolders to extract into. This + defaults to 1024. Anything deeper than the limit will raise a + warning and skip the entry. Set to `Infinity` to remove the + limitation. + +The following options are mostly internal, but can be modified in some +advanced use cases, such as re-using caches between runs. + +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 16 MB. +- `umask` Filter the modes of entries like `process.umask()`. +- `dmode` Default mode for directories +- `fmode` Default mode for files +- `dirCache` A Map object of which directories exist. +- `maxMetaEntrySize` The maximum size of meta entries that is + supported. Defaults to 1 MB. + +Note that using an asynchronous stream type with the `transform` +option will cause undefined behavior in sync extractions. +[MiniPass](http://npm.im/minipass)-based streams are designed for this +use case. + +### tar.t(options, fileList, callback) [alias: tar.list] + +List the contents of a tarball archive. + +The `fileList` is an array of paths to list from the tarball. If +no paths are provided, then all the entries are listed. + +If the archive is gzipped, then tar will detect this and unzip it. + +If the `file` option is _not_ provided, then returns an event emitter that +emits `entry` events with `tar.ReadEntry` objects. However, they don't +emit `'data'` or `'end'` events. (If you want to get actual readable +entries, use the `tar.Parse` class instead.) + +If a `file` option _is_ provided, then the return value will be a promise +that resolves when the file has been fully traversed in async mode, or +`undefined` if `sync: true` is set. Thus, you _must_ specify an `onentry` +method in order to do anything useful with the data it parses. + +The following options are supported: + +- `file` The archive file to list. If not specified, then a + Writable stream is returned where the archive data should be + written. [Alias: `f`] +- `sync` Read the specified file synchronously. (This has no effect + when a file option isn't specified, because entries are emitted as + fast as they are parsed from the stream anyway.) +- `strict` Treat warnings as crash-worthy errors. Default false. +- `filter` A function that gets called with `(path, entry)` for each + entry being listed. Return `true` to emit the entry from the + archive, or `false` to skip it. +- `onentry` A function that gets called with `(entry)` for each entry + that passes the filter. This is important for when `file` is set, + because there is no other way to do anything useful with this method. +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 16 MB. +- `noResume` By default, `entry` streams are resumed immediately after + the call to `onentry`. Set `noResume: true` to suppress this + behavior. Note that by opting into this, the stream will never + complete until the entry data is consumed. +- `onwarn` A function that will get called with `(code, message, data)` for + any warnings encountered. (See "Warnings and Errors") + +### tar.u(options, fileList, callback) [alias: tar.update] + +Add files to an archive if they are newer than the entry already in +the tarball archive. + +The `fileList` is an array of paths to add to the tarball. Adding a +directory also adds its children recursively. + +An entry in `fileList` that starts with an `@` symbol is a tar archive +whose entries will be added. To add a file that starts with `@`, +prepend it with `./`. + +The following options are supported: + +- `file` Required. Write the tarball archive to the specified + filename. [Alias: `f`] +- `sync` Act synchronously. If this is set, then any provided file + will be fully written after the call to `tar.c`. +- `onwarn` A function that will get called with `(code, message, data)` for + any warnings encountered. (See "Warnings and Errors") +- `strict` Treat warnings as crash-worthy errors. Default false. +- `cwd` The current working directory for adding entries to the + archive. Defaults to `process.cwd()`. [Alias: `C`] +- `prefix` A path portion to prefix onto the entries in the archive. +- `gzip` Set to any truthy value to create a gzipped archive, or an + object with settings for `zlib.Gzip()` [Alias: `z`] +- `filter` A function that gets called with `(path, stat)` for each + entry being added. Return `true` to add the entry to the archive, + or `false` to omit it. +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary for other + time-based operations. Additionally, `mode` is set to a "reasonable + default" for most unix systems, based on a `umask` value of `0o22`. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. [Alias: `P`] +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 16 MB. +- `noDirRecurse` Do not recursively archive the contents of + directories. [Alias: `n`] +- `follow` Set to true to pack the targets of symbolic links. Without + this option, symbolic links are archived as such. [Alias: `L`, `h`] +- `noPax` Suppress pax extended headers. Note that this means that + long paths and linkpaths will be truncated, and large or negative + numeric values may be interpreted incorrectly. +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. + [Alias: `m`, `no-mtime`] +- `mtime` Set to a `Date` object to force a specific `mtime` for + everything added to the archive. Overridden by `noMtime`. + +### tar.r(options, fileList, callback) [alias: tar.replace] + +Add files to an existing archive. Because later entries override +earlier entries, this effectively replaces any existing entries. + +The `fileList` is an array of paths to add to the tarball. Adding a +directory also adds its children recursively. + +An entry in `fileList` that starts with an `@` symbol is a tar archive +whose entries will be added. To add a file that starts with `@`, +prepend it with `./`. + +The following options are supported: + +- `file` Required. Write the tarball archive to the specified + filename. [Alias: `f`] +- `sync` Act synchronously. If this is set, then any provided file + will be fully written after the call to `tar.c`. +- `onwarn` A function that will get called with `(code, message, data)` for + any warnings encountered. (See "Warnings and Errors") +- `strict` Treat warnings as crash-worthy errors. Default false. +- `cwd` The current working directory for adding entries to the + archive. Defaults to `process.cwd()`. [Alias: `C`] +- `prefix` A path portion to prefix onto the entries in the archive. +- `gzip` Set to any truthy value to create a gzipped archive, or an + object with settings for `zlib.Gzip()` [Alias: `z`] +- `filter` A function that gets called with `(path, stat)` for each + entry being added. Return `true` to add the entry to the archive, + or `false` to omit it. +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary for other + time-based operations. Additionally, `mode` is set to a "reasonable + default" for most unix systems, based on a `umask` value of `0o22`. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. [Alias: `P`] +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 16 MB. +- `noDirRecurse` Do not recursively archive the contents of + directories. [Alias: `n`] +- `follow` Set to true to pack the targets of symbolic links. Without + this option, symbolic links are archived as such. [Alias: `L`, `h`] +- `noPax` Suppress pax extended headers. Note that this means that + long paths and linkpaths will be truncated, and large or negative + numeric values may be interpreted incorrectly. +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. + [Alias: `m`, `no-mtime`] +- `mtime` Set to a `Date` object to force a specific `mtime` for + everything added to the archive. Overridden by `noMtime`. + + +## Low-Level API + +### class tar.Pack + +A readable tar stream. + +Has all the standard readable stream interface stuff. `'data'` and +`'end'` events, `read()` method, `pause()` and `resume()`, etc. + +#### constructor(options) + +The following options are supported: + +- `onwarn` A function that will get called with `(code, message, data)` for + any warnings encountered. (See "Warnings and Errors") +- `strict` Treat warnings as crash-worthy errors. Default false. +- `cwd` The current working directory for creating the archive. + Defaults to `process.cwd()`. +- `prefix` A path portion to prefix onto the entries in the archive. +- `gzip` Set to any truthy value to create a gzipped archive, or an + object with settings for `zlib.Gzip()` +- `filter` A function that gets called with `(path, stat)` for each + entry being added. Return `true` to add the entry to the archive, + or `false` to omit it. +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary for other + time-based operations. Additionally, `mode` is set to a "reasonable + default" for most unix systems, based on a `umask` value of `0o22`. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. +- `linkCache` A Map object containing the device and inode value for + any file whose nlink is > 1, to identify hard links. +- `statCache` A Map object that caches calls `lstat`. +- `readdirCache` A Map object that caches calls to `readdir`. +- `jobs` A number specifying how many concurrent jobs to run. + Defaults to 4. +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 16 MB. +- `noDirRecurse` Do not recursively archive the contents of + directories. +- `follow` Set to true to pack the targets of symbolic links. Without + this option, symbolic links are archived as such. +- `noPax` Suppress pax extended headers. Note that this means that + long paths and linkpaths will be truncated, and large or negative + numeric values may be interpreted incorrectly. +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. +- `mtime` Set to a `Date` object to force a specific `mtime` for + everything added to the archive. Overridden by `noMtime`. + +#### add(path) + +Adds an entry to the archive. Returns the Pack stream. + +#### write(path) + +Adds an entry to the archive. Returns true if flushed. + +#### end() + +Finishes the archive. + +### class tar.Pack.Sync + +Synchronous version of `tar.Pack`. + +### class tar.Unpack + +A writable stream that unpacks a tar archive onto the file system. + +All the normal writable stream stuff is supported. `write()` and +`end()` methods, `'drain'` events, etc. + +Note that all directories that are created will be forced to be +writable, readable, and listable by their owner, to avoid cases where +a directory prevents extraction of child entries by virtue of its +mode. + +`'close'` is emitted when it's done writing stuff to the file system. + +Most unpack errors will cause a `warn` event to be emitted. If the +`cwd` is missing, or not a directory, then an error will be emitted. + +#### constructor(options) + +- `cwd` Extract files relative to the specified directory. Defaults + to `process.cwd()`. If provided, this must exist and must be a + directory. +- `filter` A function that gets called with `(path, entry)` for each + entry being unpacked. Return `true` to unpack the entry from the + archive, or `false` to skip it. +- `newer` Set to true to keep the existing file on disk if it's newer + than the file in the archive. +- `keep` Do not overwrite existing files. In particular, if a file + appears more than once in an archive, later copies will not + overwrite earlier copies. +- `preservePaths` Allow absolute paths, paths containing `..`, and + extracting through symbolic links. By default, `/` is stripped from + absolute paths, `..` paths are not extracted, and any file whose + location would be modified by a symbolic link is not extracted. +- `unlink` Unlink files before creating them. Without this option, + tar overwrites existing files, which preserves existing hardlinks. + With this option, existing hardlinks will be broken, as will any + symlink that would affect the location of an extracted file. +- `strip` Remove the specified number of leading path elements. + Pathnames with fewer elements will be silently skipped. Note that + the pathname is edited after applying the filter, but before + security checks. +- `onwarn` A function that will get called with `(code, message, data)` for + any warnings encountered. (See "Warnings and Errors") +- `umask` Filter the modes of entries like `process.umask()`. +- `dmode` Default mode for directories +- `fmode` Default mode for files +- `dirCache` A Map object of which directories exist. +- `maxMetaEntrySize` The maximum size of meta entries that is + supported. Defaults to 1 MB. +- `preserveOwner` If true, tar will set the `uid` and `gid` of + extracted entries to the `uid` and `gid` fields in the archive. + This defaults to true when run as root, and false otherwise. If + false, then files and directories will be set with the owner and + group of the user running the process. This is similar to `-p` in + `tar(1)`, but ACLs and other system-specific data is never unpacked + in this implementation, and modes are set by default already. +- `win32` True if on a windows platform. Causes behavior where + filenames containing `<|>?` chars are converted to + windows-compatible values while being unpacked. +- `uid` Set to a number to force ownership of all extracted files and + folders, and all implicitly created directories, to be owned by the + specified user id, regardless of the `uid` field in the archive. + Cannot be used along with `preserveOwner`. Requires also setting a + `gid` option. +- `gid` Set to a number to force ownership of all extracted files and + folders, and all implicitly created directories, to be owned by the + specified group id, regardless of the `gid` field in the archive. + Cannot be used along with `preserveOwner`. Requires also setting a + `uid` option. +- `noMtime` Set to true to omit writing `mtime` value for extracted + entries. +- `transform` Provide a function that takes an `entry` object, and + returns a stream, or any falsey value. If a stream is provided, + then that stream's data will be written instead of the contents of + the archive entry. If a falsey value is provided, then the entry is + written to disk as normal. (To exclude items from extraction, use + the `filter` option described above.) +- `strict` Treat warnings as crash-worthy errors. Default false. +- `onentry` A function that gets called with `(entry)` for each entry + that passes the filter. +- `onwarn` A function that will get called with `(code, message, data)` for + any warnings encountered. (See "Warnings and Errors") +- `noChmod` Set to true to omit calling `fs.chmod()` to ensure that the + extracted file matches the entry mode. This also suppresses the call to + `process.umask()` to determine the default umask value, since tar will + extract with whatever mode is provided, and let the process `umask` apply + normally. +- `maxDepth` The maximum depth of subfolders to extract into. This + defaults to 1024. Anything deeper than the limit will raise a + warning and skip the entry. Set to `Infinity` to remove the + limitation. + +### class tar.Unpack.Sync + +Synchronous version of `tar.Unpack`. + +Note that using an asynchronous stream type with the `transform` +option will cause undefined behavior in sync unpack streams. +[MiniPass](http://npm.im/minipass)-based streams are designed for this +use case. + +### class tar.Parse + +A writable stream that parses a tar archive stream. All the standard +writable stream stuff is supported. + +If the archive is gzipped, then tar will detect this and unzip it. + +Emits `'entry'` events with `tar.ReadEntry` objects, which are +themselves readable streams that you can pipe wherever. + +Each `entry` will not emit until the one before it is flushed through, +so make sure to either consume the data (with `on('data', ...)` or +`.pipe(...)`) or throw it away with `.resume()` to keep the stream +flowing. + +#### constructor(options) + +Returns an event emitter that emits `entry` events with +`tar.ReadEntry` objects. + +The following options are supported: + +- `strict` Treat warnings as crash-worthy errors. Default false. +- `filter` A function that gets called with `(path, entry)` for each + entry being listed. Return `true` to emit the entry from the + archive, or `false` to skip it. +- `onentry` A function that gets called with `(entry)` for each entry + that passes the filter. +- `onwarn` A function that will get called with `(code, message, data)` for + any warnings encountered. (See "Warnings and Errors") + +#### abort(error) + +Stop all parsing activities. This is called when there are zlib +errors. It also emits an unrecoverable warning with the error provided. + +### class tar.ReadEntry extends [MiniPass](http://npm.im/minipass) + +A representation of an entry that is being read out of a tar archive. + +It has the following fields: + +- `extended` The extended metadata object provided to the constructor. +- `globalExtended` The global extended metadata object provided to the + constructor. +- `remain` The number of bytes remaining to be written into the + stream. +- `blockRemain` The number of 512-byte blocks remaining to be written + into the stream. +- `ignore` Whether this entry should be ignored. +- `meta` True if this represents metadata about the next entry, false + if it represents a filesystem object. +- All the fields from the header, extended header, and global extended + header are added to the ReadEntry object. So it has `path`, `type`, + `size`, `mode`, and so on. + +#### constructor(header, extended, globalExtended) + +Create a new ReadEntry object with the specified header, extended +header, and global extended header values. + +### class tar.WriteEntry extends [MiniPass](http://npm.im/minipass) + +A representation of an entry that is being written from the file +system into a tar archive. + +Emits data for the Header, and for the Pax Extended Header if one is +required, as well as any body data. + +Creating a WriteEntry for a directory does not also create +WriteEntry objects for all of the directory contents. + +It has the following fields: + +- `path` The path field that will be written to the archive. By + default, this is also the path from the cwd to the file system + object. +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary for other + time-based operations. Additionally, `mode` is set to a "reasonable + default" for most unix systems, based on a `umask` value of `0o22`. +- `myuid` If supported, the uid of the user running the current + process. +- `myuser` The `env.USER` string if set, or `''`. Set as the entry + `uname` field if the file's `uid` matches `this.myuid`. +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 1 MB. +- `linkCache` A Map object containing the device and inode value for + any file whose nlink is > 1, to identify hard links. +- `statCache` A Map object that caches calls `lstat`. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. +- `cwd` The current working directory for creating the archive. + Defaults to `process.cwd()`. +- `absolute` The absolute path to the entry on the filesystem. By + default, this is `path.resolve(this.cwd, this.path)`, but it can be + overridden explicitly. +- `strict` Treat warnings as crash-worthy errors. Default false. +- `win32` True if on a windows platform. Causes behavior where paths + replace `\` with `/` and filenames containing the windows-compatible + forms of `<|>?:` characters are converted to actual `<|>?:` characters + in the archive. +- `noPax` Suppress pax extended headers. Note that this means that + long paths and linkpaths will be truncated, and large or negative + numeric values may be interpreted incorrectly. +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. + + +#### constructor(path, options) + +`path` is the path of the entry as it is written in the archive. + +The following options are supported: + +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary for other + time-based operations. Additionally, `mode` is set to a "reasonable + default" for most unix systems, based on a `umask` value of `0o22`. +- `maxReadSize` The maximum buffer size for `fs.read()` operations. + Defaults to 1 MB. +- `linkCache` A Map object containing the device and inode value for + any file whose nlink is > 1, to identify hard links. +- `statCache` A Map object that caches calls `lstat`. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. +- `cwd` The current working directory for creating the archive. + Defaults to `process.cwd()`. +- `absolute` The absolute path to the entry on the filesystem. By + default, this is `path.resolve(this.cwd, this.path)`, but it can be + overridden explicitly. +- `strict` Treat warnings as crash-worthy errors. Default false. +- `win32` True if on a windows platform. Causes behavior where paths + replace `\` with `/`. +- `onwarn` A function that will get called with `(code, message, data)` for + any warnings encountered. (See "Warnings and Errors") +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. +- `umask` Set to restrict the modes on the entries in the archive, + somewhat like how umask works on file creation. Defaults to + `process.umask()` on unix systems, or `0o22` on Windows. + +#### warn(message, data) + +If strict, emit an error with the provided message. + +Othewise, emit a `'warn'` event with the provided message and data. + +### class tar.WriteEntry.Sync + +Synchronous version of tar.WriteEntry + +### class tar.WriteEntry.Tar + +A version of tar.WriteEntry that gets its data from a tar.ReadEntry +instead of from the filesystem. + +#### constructor(readEntry, options) + +`readEntry` is the entry being read out of another archive. + +The following options are supported: + +- `portable` Omit metadata that is system-specific: `ctime`, `atime`, + `uid`, `gid`, `uname`, `gname`, `dev`, `ino`, and `nlink`. Note + that `mtime` is still included, because this is necessary for other + time-based operations. Additionally, `mode` is set to a "reasonable + default" for most unix systems, based on a `umask` value of `0o22`. +- `preservePaths` Allow absolute paths. By default, `/` is stripped + from absolute paths. +- `strict` Treat warnings as crash-worthy errors. Default false. +- `onwarn` A function that will get called with `(code, message, data)` for + any warnings encountered. (See "Warnings and Errors") +- `noMtime` Set to true to omit writing `mtime` values for entries. + Note that this prevents using other mtime-based features like + `tar.update` or the `keepNewer` option with the resulting tar archive. + +### class tar.Header + +A class for reading and writing header blocks. + +It has the following fields: + +- `nullBlock` True if decoding a block which is entirely composed of + `0x00` null bytes. (Useful because tar files are terminated by + at least 2 null blocks.) +- `cksumValid` True if the checksum in the header is valid, false + otherwise. +- `needPax` True if the values, as encoded, will require a Pax + extended header. +- `path` The path of the entry. +- `mode` The 4 lowest-order octal digits of the file mode. That is, + read/write/execute permissions for world, group, and owner, and the + setuid, setgid, and sticky bits. +- `uid` Numeric user id of the file owner +- `gid` Numeric group id of the file owner +- `size` Size of the file in bytes +- `mtime` Modified time of the file +- `cksum` The checksum of the header. This is generated by adding all + the bytes of the header block, treating the checksum field itself as + all ascii space characters (that is, `0x20`). +- `type` The human-readable name of the type of entry this represents, + or the alphanumeric key if unknown. +- `typeKey` The alphanumeric key for the type of entry this header + represents. +- `linkpath` The target of Link and SymbolicLink entries. +- `uname` Human-readable user name of the file owner +- `gname` Human-readable group name of the file owner +- `devmaj` The major portion of the device number. Always `0` for + files, directories, and links. +- `devmin` The minor portion of the device number. Always `0` for + files, directories, and links. +- `atime` File access time. +- `ctime` File change time. + +#### constructor(data, [offset=0]) + +`data` is optional. It is either a Buffer that should be interpreted +as a tar Header starting at the specified offset and continuing for +512 bytes, or a data object of keys and values to set on the header +object, and eventually encode as a tar Header. + +#### decode(block, offset) + +Decode the provided buffer starting at the specified offset. + +Buffer length must be greater than 512 bytes. + +#### set(data) + +Set the fields in the data object. + +#### encode(buffer, offset) + +Encode the header fields into the buffer at the specified offset. + +Returns `this.needPax` to indicate whether a Pax Extended Header is +required to properly encode the specified data. + +### class tar.Pax + +An object representing a set of key-value pairs in an Pax extended +header entry. + +It has the following fields. Where the same name is used, they have +the same semantics as the tar.Header field of the same name. + +- `global` True if this represents a global extended header, or false + if it is for a single entry. +- `atime` +- `charset` +- `comment` +- `ctime` +- `gid` +- `gname` +- `linkpath` +- `mtime` +- `path` +- `size` +- `uid` +- `uname` +- `dev` +- `ino` +- `nlink` + +#### constructor(object, global) + +Set the fields set in the object. `global` is a boolean that defaults +to false. + +#### encode() + +Return a Buffer containing the header and body for the Pax extended +header entry, or `null` if there is nothing to encode. + +#### encodeBody() + +Return a string representing the body of the pax extended header +entry. + +#### encodeField(fieldName) + +Return a string representing the key/value encoding for the specified +fieldName, or `''` if the field is unset. + +### tar.Pax.parse(string, extended, global) + +Return a new Pax object created by parsing the contents of the string +provided. + +If the `extended` object is set, then also add the fields from that +object. (This is necessary because multiple metadata entries can +occur in sequence.) + +### tar.types + +A translation table for the `type` field in tar headers. + +#### tar.types.name.get(code) + +Get the human-readable name for a given alphanumeric code. + +#### tar.types.code.get(name) + +Get the alphanumeric code for a given human-readable name. diff --git a/node_modules/tar/index.js b/node_modules/tar/index.js new file mode 100644 index 0000000..c9ae06e --- /dev/null +++ b/node_modules/tar/index.js @@ -0,0 +1,18 @@ +'use strict' + +// high-level commands +exports.c = exports.create = require('./lib/create.js') +exports.r = exports.replace = require('./lib/replace.js') +exports.t = exports.list = require('./lib/list.js') +exports.u = exports.update = require('./lib/update.js') +exports.x = exports.extract = require('./lib/extract.js') + +// classes +exports.Pack = require('./lib/pack.js') +exports.Unpack = require('./lib/unpack.js') +exports.Parse = require('./lib/parse.js') +exports.ReadEntry = require('./lib/read-entry.js') +exports.WriteEntry = require('./lib/write-entry.js') +exports.Header = require('./lib/header.js') +exports.Pax = require('./lib/pax.js') +exports.types = require('./lib/types.js') diff --git a/node_modules/tar/lib/create.js b/node_modules/tar/lib/create.js new file mode 100644 index 0000000..9c860d4 --- /dev/null +++ b/node_modules/tar/lib/create.js @@ -0,0 +1,111 @@ +'use strict' + +// tar -c +const hlo = require('./high-level-opt.js') + +const Pack = require('./pack.js') +const fsm = require('fs-minipass') +const t = require('./list.js') +const path = require('path') + +module.exports = (opt_, files, cb) => { + if (typeof files === 'function') { + cb = files + } + + if (Array.isArray(opt_)) { + files = opt_, opt_ = {} + } + + if (!files || !Array.isArray(files) || !files.length) { + throw new TypeError('no files or directories specified') + } + + files = Array.from(files) + + const opt = hlo(opt_) + + if (opt.sync && typeof cb === 'function') { + throw new TypeError('callback not supported for sync tar functions') + } + + if (!opt.file && typeof cb === 'function') { + throw new TypeError('callback only supported with file option') + } + + return opt.file && opt.sync ? createFileSync(opt, files) + : opt.file ? createFile(opt, files, cb) + : opt.sync ? createSync(opt, files) + : create(opt, files) +} + +const createFileSync = (opt, files) => { + const p = new Pack.Sync(opt) + const stream = new fsm.WriteStreamSync(opt.file, { + mode: opt.mode || 0o666, + }) + p.pipe(stream) + addFilesSync(p, files) +} + +const createFile = (opt, files, cb) => { + const p = new Pack(opt) + const stream = new fsm.WriteStream(opt.file, { + mode: opt.mode || 0o666, + }) + p.pipe(stream) + + const promise = new Promise((res, rej) => { + stream.on('error', rej) + stream.on('close', res) + p.on('error', rej) + }) + + addFilesAsync(p, files) + + return cb ? promise.then(cb, cb) : promise +} + +const addFilesSync = (p, files) => { + files.forEach(file => { + if (file.charAt(0) === '@') { + t({ + file: path.resolve(p.cwd, file.slice(1)), + sync: true, + noResume: true, + onentry: entry => p.add(entry), + }) + } else { + p.add(file) + } + }) + p.end() +} + +const addFilesAsync = (p, files) => { + while (files.length) { + const file = files.shift() + if (file.charAt(0) === '@') { + return t({ + file: path.resolve(p.cwd, file.slice(1)), + noResume: true, + onentry: entry => p.add(entry), + }).then(_ => addFilesAsync(p, files)) + } else { + p.add(file) + } + } + p.end() +} + +const createSync = (opt, files) => { + const p = new Pack.Sync(opt) + addFilesSync(p, files) + return p +} + +const create = (opt, files) => { + const p = new Pack(opt) + addFilesAsync(p, files) + return p +} diff --git a/node_modules/tar/lib/extract.js b/node_modules/tar/lib/extract.js new file mode 100644 index 0000000..5476798 --- /dev/null +++ b/node_modules/tar/lib/extract.js @@ -0,0 +1,113 @@ +'use strict' + +// tar -x +const hlo = require('./high-level-opt.js') +const Unpack = require('./unpack.js') +const fs = require('fs') +const fsm = require('fs-minipass') +const path = require('path') +const stripSlash = require('./strip-trailing-slashes.js') + +module.exports = (opt_, files, cb) => { + if (typeof opt_ === 'function') { + cb = opt_, files = null, opt_ = {} + } else if (Array.isArray(opt_)) { + files = opt_, opt_ = {} + } + + if (typeof files === 'function') { + cb = files, files = null + } + + if (!files) { + files = [] + } else { + files = Array.from(files) + } + + const opt = hlo(opt_) + + if (opt.sync && typeof cb === 'function') { + throw new TypeError('callback not supported for sync tar functions') + } + + if (!opt.file && typeof cb === 'function') { + throw new TypeError('callback only supported with file option') + } + + if (files.length) { + filesFilter(opt, files) + } + + return opt.file && opt.sync ? extractFileSync(opt) + : opt.file ? extractFile(opt, cb) + : opt.sync ? extractSync(opt) + : extract(opt) +} + +// construct a filter that limits the file entries listed +// include child entries if a dir is included +const filesFilter = (opt, files) => { + const map = new Map(files.map(f => [stripSlash(f), true])) + const filter = opt.filter + + const mapHas = (file, r) => { + const root = r || path.parse(file).root || '.' + const ret = file === root ? false + : map.has(file) ? map.get(file) + : mapHas(path.dirname(file), root) + + map.set(file, ret) + return ret + } + + opt.filter = filter + ? (file, entry) => filter(file, entry) && mapHas(stripSlash(file)) + : file => mapHas(stripSlash(file)) +} + +const extractFileSync = opt => { + const u = new Unpack.Sync(opt) + + const file = opt.file + const stat = fs.statSync(file) + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + const readSize = opt.maxReadSize || 16 * 1024 * 1024 + const stream = new fsm.ReadStreamSync(file, { + readSize: readSize, + size: stat.size, + }) + stream.pipe(u) +} + +const extractFile = (opt, cb) => { + const u = new Unpack(opt) + const readSize = opt.maxReadSize || 16 * 1024 * 1024 + + const file = opt.file + const p = new Promise((resolve, reject) => { + u.on('error', reject) + u.on('close', resolve) + + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + fs.stat(file, (er, stat) => { + if (er) { + reject(er) + } else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }) + stream.on('error', reject) + stream.pipe(u) + } + }) + }) + return cb ? p.then(cb, cb) : p +} + +const extractSync = opt => new Unpack.Sync(opt) + +const extract = opt => new Unpack(opt) diff --git a/node_modules/tar/lib/get-write-flag.js b/node_modules/tar/lib/get-write-flag.js new file mode 100644 index 0000000..e869599 --- /dev/null +++ b/node_modules/tar/lib/get-write-flag.js @@ -0,0 +1,20 @@ +// Get the appropriate flag to use for creating files +// We use fmap on Windows platforms for files less than +// 512kb. This is a fairly low limit, but avoids making +// things slower in some cases. Since most of what this +// library is used for is extracting tarballs of many +// relatively small files in npm packages and the like, +// it can be a big boost on Windows platforms. +// Only supported in Node v12.9.0 and above. +const platform = process.env.__FAKE_PLATFORM__ || process.platform +const isWindows = platform === 'win32' +const fs = global.__FAKE_TESTING_FS__ || require('fs') + +/* istanbul ignore next */ +const { O_CREAT, O_TRUNC, O_WRONLY, UV_FS_O_FILEMAP = 0 } = fs.constants + +const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP +const fMapLimit = 512 * 1024 +const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY +module.exports = !fMapEnabled ? () => 'w' + : size => size < fMapLimit ? fMapFlag : 'w' diff --git a/node_modules/tar/lib/header.js b/node_modules/tar/lib/header.js new file mode 100644 index 0000000..411d5e4 --- /dev/null +++ b/node_modules/tar/lib/header.js @@ -0,0 +1,304 @@ +'use strict' +// parse a 512-byte header block to a data object, or vice-versa +// encode returns `true` if a pax extended header is needed, because +// the data could not be faithfully encoded in a simple header. +// (Also, check header.needPax to see if it needs a pax header.) + +const types = require('./types.js') +const pathModule = require('path').posix +const large = require('./large-numbers.js') + +const SLURP = Symbol('slurp') +const TYPE = Symbol('type') + +class Header { + constructor (data, off, ex, gex) { + this.cksumValid = false + this.needPax = false + this.nullBlock = false + + this.block = null + this.path = null + this.mode = null + this.uid = null + this.gid = null + this.size = null + this.mtime = null + this.cksum = null + this[TYPE] = '0' + this.linkpath = null + this.uname = null + this.gname = null + this.devmaj = 0 + this.devmin = 0 + this.atime = null + this.ctime = null + + if (Buffer.isBuffer(data)) { + this.decode(data, off || 0, ex, gex) + } else if (data) { + this.set(data) + } + } + + decode (buf, off, ex, gex) { + if (!off) { + off = 0 + } + + if (!buf || !(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header') + } + + this.path = decString(buf, off, 100) + this.mode = decNumber(buf, off + 100, 8) + this.uid = decNumber(buf, off + 108, 8) + this.gid = decNumber(buf, off + 116, 8) + this.size = decNumber(buf, off + 124, 12) + this.mtime = decDate(buf, off + 136, 12) + this.cksum = decNumber(buf, off + 148, 12) + + // if we have extended or global extended headers, apply them now + // See https://github.com/npm/node-tar/pull/187 + this[SLURP](ex) + this[SLURP](gex, true) + + // old tar versions marked dirs as a file with a trailing / + this[TYPE] = decString(buf, off + 156, 1) + if (this[TYPE] === '') { + this[TYPE] = '0' + } + if (this[TYPE] === '0' && this.path.slice(-1) === '/') { + this[TYPE] = '5' + } + + // tar implementations sometimes incorrectly put the stat(dir).size + // as the size in the tarball, even though Directory entries are + // not able to have any body at all. In the very rare chance that + // it actually DOES have a body, we weren't going to do anything with + // it anyway, and it'll just be a warning about an invalid header. + if (this[TYPE] === '5') { + this.size = 0 + } + + this.linkpath = decString(buf, off + 157, 100) + if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') { + this.uname = decString(buf, off + 265, 32) + this.gname = decString(buf, off + 297, 32) + this.devmaj = decNumber(buf, off + 329, 8) + this.devmin = decNumber(buf, off + 337, 8) + if (buf[off + 475] !== 0) { + // definitely a prefix, definitely >130 chars. + const prefix = decString(buf, off + 345, 155) + this.path = prefix + '/' + this.path + } else { + const prefix = decString(buf, off + 345, 130) + if (prefix) { + this.path = prefix + '/' + this.path + } + this.atime = decDate(buf, off + 476, 12) + this.ctime = decDate(buf, off + 488, 12) + } + } + + let sum = 8 * 0x20 + for (let i = off; i < off + 148; i++) { + sum += buf[i] + } + + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i] + } + + this.cksumValid = sum === this.cksum + if (this.cksum === null && sum === 8 * 0x20) { + this.nullBlock = true + } + } + + [SLURP] (ex, global) { + for (const k in ex) { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. + if (ex[k] !== null && ex[k] !== undefined && + !(global && k === 'path')) { + this[k] = ex[k] + } + } + } + + encode (buf, off) { + if (!buf) { + buf = this.block = Buffer.alloc(512) + off = 0 + } + + if (!off) { + off = 0 + } + + if (!(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header') + } + + const prefixSize = this.ctime || this.atime ? 130 : 155 + const split = splitPrefix(this.path || '', prefixSize) + const path = split[0] + const prefix = split[1] + this.needPax = split[2] + + this.needPax = encString(buf, off, 100, path) || this.needPax + this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax + this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax + this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax + this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax + this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax + buf[off + 156] = this[TYPE].charCodeAt(0) + this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax + buf.write('ustar\u000000', off + 257, 8) + this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax + this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax + this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax + this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax + this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax + if (buf[off + 475] !== 0) { + this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax + } else { + this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax + this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax + this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax + } + + let sum = 8 * 0x20 + for (let i = off; i < off + 148; i++) { + sum += buf[i] + } + + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i] + } + + this.cksum = sum + encNumber(buf, off + 148, 8, this.cksum) + this.cksumValid = true + + return this.needPax + } + + set (data) { + for (const i in data) { + if (data[i] !== null && data[i] !== undefined) { + this[i] = data[i] + } + } + } + + get type () { + return types.name.get(this[TYPE]) || this[TYPE] + } + + get typeKey () { + return this[TYPE] + } + + set type (type) { + if (types.code.has(type)) { + this[TYPE] = types.code.get(type) + } else { + this[TYPE] = type + } + } +} + +const splitPrefix = (p, prefixSize) => { + const pathSize = 100 + let pp = p + let prefix = '' + let ret + const root = pathModule.parse(p).root || '.' + + if (Buffer.byteLength(pp) < pathSize) { + ret = [pp, prefix, false] + } else { + // first set prefix to the dir, and path to the base + prefix = pathModule.dirname(pp) + pp = pathModule.basename(pp) + + do { + if (Buffer.byteLength(pp) <= pathSize && + Buffer.byteLength(prefix) <= prefixSize) { + // both fit! + ret = [pp, prefix, false] + } else if (Buffer.byteLength(pp) > pathSize && + Buffer.byteLength(prefix) <= prefixSize) { + // prefix fits in prefix, but path doesn't fit in path + ret = [pp.slice(0, pathSize - 1), prefix, true] + } else { + // make path take a bit from prefix + pp = pathModule.join(pathModule.basename(prefix), pp) + prefix = pathModule.dirname(prefix) + } + } while (prefix !== root && !ret) + + // at this point, found no resolution, just truncate + if (!ret) { + ret = [p.slice(0, pathSize - 1), '', true] + } + } + return ret +} + +const decString = (buf, off, size) => + buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '') + +const decDate = (buf, off, size) => + numToDate(decNumber(buf, off, size)) + +const numToDate = num => num === null ? null : new Date(num * 1000) + +const decNumber = (buf, off, size) => + buf[off] & 0x80 ? large.parse(buf.slice(off, off + size)) + : decSmallNumber(buf, off, size) + +const nanNull = value => isNaN(value) ? null : value + +const decSmallNumber = (buf, off, size) => + nanNull(parseInt( + buf.slice(off, off + size) + .toString('utf8').replace(/\0.*$/, '').trim(), 8)) + +// the maximum encodable as a null-terminated octal, by field size +const MAXNUM = { + 12: 0o77777777777, + 8: 0o7777777, +} + +const encNumber = (buf, off, size, number) => + number === null ? false : + number > MAXNUM[size] || number < 0 + ? (large.encode(number, buf.slice(off, off + size)), true) + : (encSmallNumber(buf, off, size, number), false) + +const encSmallNumber = (buf, off, size, number) => + buf.write(octalString(number, size), off, size, 'ascii') + +const octalString = (number, size) => + padOctal(Math.floor(number).toString(8), size) + +const padOctal = (string, size) => + (string.length === size - 1 ? string + : new Array(size - string.length - 1).join('0') + string + ' ') + '\0' + +const encDate = (buf, off, size, date) => + date === null ? false : + encNumber(buf, off, size, date.getTime() / 1000) + +// enough to fill the longest string we've got +const NULLS = new Array(156).join('\0') +// pad with nulls, return true if it's longer or non-ascii +const encString = (buf, off, size, string) => + string === null ? false : + (buf.write(string + NULLS, off, size, 'utf8'), + string.length !== Buffer.byteLength(string) || string.length > size) + +module.exports = Header diff --git a/node_modules/tar/lib/high-level-opt.js b/node_modules/tar/lib/high-level-opt.js new file mode 100644 index 0000000..40e4418 --- /dev/null +++ b/node_modules/tar/lib/high-level-opt.js @@ -0,0 +1,29 @@ +'use strict' + +// turn tar(1) style args like `C` into the more verbose things like `cwd` + +const argmap = new Map([ + ['C', 'cwd'], + ['f', 'file'], + ['z', 'gzip'], + ['P', 'preservePaths'], + ['U', 'unlink'], + ['strip-components', 'strip'], + ['stripComponents', 'strip'], + ['keep-newer', 'newer'], + ['keepNewer', 'newer'], + ['keep-newer-files', 'newer'], + ['keepNewerFiles', 'newer'], + ['k', 'keep'], + ['keep-existing', 'keep'], + ['keepExisting', 'keep'], + ['m', 'noMtime'], + ['no-mtime', 'noMtime'], + ['p', 'preserveOwner'], + ['L', 'follow'], + ['h', 'follow'], +]) + +module.exports = opt => opt ? Object.keys(opt).map(k => [ + argmap.has(k) ? argmap.get(k) : k, opt[k], +]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {} diff --git a/node_modules/tar/lib/large-numbers.js b/node_modules/tar/lib/large-numbers.js new file mode 100644 index 0000000..b11e72d --- /dev/null +++ b/node_modules/tar/lib/large-numbers.js @@ -0,0 +1,104 @@ +'use strict' +// Tar can encode large and negative numbers using a leading byte of +// 0xff for negative, and 0x80 for positive. + +const encode = (num, buf) => { + if (!Number.isSafeInteger(num)) { + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error('cannot encode number outside of javascript safe integer range') + } else if (num < 0) { + encodeNegative(num, buf) + } else { + encodePositive(num, buf) + } + return buf +} + +const encodePositive = (num, buf) => { + buf[0] = 0x80 + + for (var i = buf.length; i > 1; i--) { + buf[i - 1] = num & 0xff + num = Math.floor(num / 0x100) + } +} + +const encodeNegative = (num, buf) => { + buf[0] = 0xff + var flipped = false + num = num * -1 + for (var i = buf.length; i > 1; i--) { + var byte = num & 0xff + num = Math.floor(num / 0x100) + if (flipped) { + buf[i - 1] = onesComp(byte) + } else if (byte === 0) { + buf[i - 1] = 0 + } else { + flipped = true + buf[i - 1] = twosComp(byte) + } + } +} + +const parse = (buf) => { + const pre = buf[0] + const value = pre === 0x80 ? pos(buf.slice(1, buf.length)) + : pre === 0xff ? twos(buf) + : null + if (value === null) { + throw Error('invalid base256 encoding') + } + + if (!Number.isSafeInteger(value)) { + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error('parsed number outside of javascript safe integer range') + } + + return value +} + +const twos = (buf) => { + var len = buf.length + var sum = 0 + var flipped = false + for (var i = len - 1; i > -1; i--) { + var byte = buf[i] + var f + if (flipped) { + f = onesComp(byte) + } else if (byte === 0) { + f = byte + } else { + flipped = true + f = twosComp(byte) + } + if (f !== 0) { + sum -= f * Math.pow(256, len - i - 1) + } + } + return sum +} + +const pos = (buf) => { + var len = buf.length + var sum = 0 + for (var i = len - 1; i > -1; i--) { + var byte = buf[i] + if (byte !== 0) { + sum += byte * Math.pow(256, len - i - 1) + } + } + return sum +} + +const onesComp = byte => (0xff ^ byte) & 0xff + +const twosComp = byte => ((0xff ^ byte) + 1) & 0xff + +module.exports = { + encode, + parse, +} diff --git a/node_modules/tar/lib/list.js b/node_modules/tar/lib/list.js new file mode 100644 index 0000000..f2358c2 --- /dev/null +++ b/node_modules/tar/lib/list.js @@ -0,0 +1,139 @@ +'use strict' + +// XXX: This shares a lot in common with extract.js +// maybe some DRY opportunity here? + +// tar -t +const hlo = require('./high-level-opt.js') +const Parser = require('./parse.js') +const fs = require('fs') +const fsm = require('fs-minipass') +const path = require('path') +const stripSlash = require('./strip-trailing-slashes.js') + +module.exports = (opt_, files, cb) => { + if (typeof opt_ === 'function') { + cb = opt_, files = null, opt_ = {} + } else if (Array.isArray(opt_)) { + files = opt_, opt_ = {} + } + + if (typeof files === 'function') { + cb = files, files = null + } + + if (!files) { + files = [] + } else { + files = Array.from(files) + } + + const opt = hlo(opt_) + + if (opt.sync && typeof cb === 'function') { + throw new TypeError('callback not supported for sync tar functions') + } + + if (!opt.file && typeof cb === 'function') { + throw new TypeError('callback only supported with file option') + } + + if (files.length) { + filesFilter(opt, files) + } + + if (!opt.noResume) { + onentryFunction(opt) + } + + return opt.file && opt.sync ? listFileSync(opt) + : opt.file ? listFile(opt, cb) + : list(opt) +} + +const onentryFunction = opt => { + const onentry = opt.onentry + opt.onentry = onentry ? e => { + onentry(e) + e.resume() + } : e => e.resume() +} + +// construct a filter that limits the file entries listed +// include child entries if a dir is included +const filesFilter = (opt, files) => { + const map = new Map(files.map(f => [stripSlash(f), true])) + const filter = opt.filter + + const mapHas = (file, r) => { + const root = r || path.parse(file).root || '.' + const ret = file === root ? false + : map.has(file) ? map.get(file) + : mapHas(path.dirname(file), root) + + map.set(file, ret) + return ret + } + + opt.filter = filter + ? (file, entry) => filter(file, entry) && mapHas(stripSlash(file)) + : file => mapHas(stripSlash(file)) +} + +const listFileSync = opt => { + const p = list(opt) + const file = opt.file + let threw = true + let fd + try { + const stat = fs.statSync(file) + const readSize = opt.maxReadSize || 16 * 1024 * 1024 + if (stat.size < readSize) { + p.end(fs.readFileSync(file)) + } else { + let pos = 0 + const buf = Buffer.allocUnsafe(readSize) + fd = fs.openSync(file, 'r') + while (pos < stat.size) { + const bytesRead = fs.readSync(fd, buf, 0, readSize, pos) + pos += bytesRead + p.write(buf.slice(0, bytesRead)) + } + p.end() + } + threw = false + } finally { + if (threw && fd) { + try { + fs.closeSync(fd) + } catch (er) {} + } + } +} + +const listFile = (opt, cb) => { + const parse = new Parser(opt) + const readSize = opt.maxReadSize || 16 * 1024 * 1024 + + const file = opt.file + const p = new Promise((resolve, reject) => { + parse.on('error', reject) + parse.on('end', resolve) + + fs.stat(file, (er, stat) => { + if (er) { + reject(er) + } else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }) + stream.on('error', reject) + stream.pipe(parse) + } + }) + }) + return cb ? p.then(cb, cb) : p +} + +const list = opt => new Parser(opt) diff --git a/node_modules/tar/lib/mkdir.js b/node_modules/tar/lib/mkdir.js new file mode 100644 index 0000000..8ee8de7 --- /dev/null +++ b/node_modules/tar/lib/mkdir.js @@ -0,0 +1,229 @@ +'use strict' +// wrapper around mkdirp for tar's needs. + +// TODO: This should probably be a class, not functionally +// passing around state in a gazillion args. + +const mkdirp = require('mkdirp') +const fs = require('fs') +const path = require('path') +const chownr = require('chownr') +const normPath = require('./normalize-windows-path.js') + +class SymlinkError extends Error { + constructor (symlink, path) { + super('Cannot extract through symbolic link') + this.path = path + this.symlink = symlink + } + + get name () { + return 'SylinkError' + } +} + +class CwdError extends Error { + constructor (path, code) { + super(code + ': Cannot cd into \'' + path + '\'') + this.path = path + this.code = code + } + + get name () { + return 'CwdError' + } +} + +const cGet = (cache, key) => cache.get(normPath(key)) +const cSet = (cache, key, val) => cache.set(normPath(key), val) + +const checkCwd = (dir, cb) => { + fs.stat(dir, (er, st) => { + if (er || !st.isDirectory()) { + er = new CwdError(dir, er && er.code || 'ENOTDIR') + } + cb(er) + }) +} + +module.exports = (dir, opt, cb) => { + dir = normPath(dir) + + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + const umask = opt.umask + const mode = opt.mode | 0o0700 + const needChmod = (mode & umask) !== 0 + + const uid = opt.uid + const gid = opt.gid + const doChown = typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid) + + const preserve = opt.preserve + const unlink = opt.unlink + const cache = opt.cache + const cwd = normPath(opt.cwd) + + const done = (er, created) => { + if (er) { + cb(er) + } else { + cSet(cache, dir, true) + if (created && doChown) { + chownr(created, uid, gid, er => done(er)) + } else if (needChmod) { + fs.chmod(dir, mode, cb) + } else { + cb() + } + } + } + + if (cache && cGet(cache, dir) === true) { + return done() + } + + if (dir === cwd) { + return checkCwd(dir, done) + } + + if (preserve) { + return mkdirp(dir, { mode }).then(made => done(null, made), done) + } + + const sub = normPath(path.relative(cwd, dir)) + const parts = sub.split('/') + mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done) +} + +const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { + if (!parts.length) { + return cb(null, created) + } + const p = parts.shift() + const part = normPath(path.resolve(base + '/' + p)) + if (cGet(cache, part)) { + return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) + } + fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) +} + +const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => { + if (er) { + fs.lstat(part, (statEr, st) => { + if (statEr) { + statEr.path = statEr.path && normPath(statEr.path) + cb(statEr) + } else if (st.isDirectory()) { + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) + } else if (unlink) { + fs.unlink(part, er => { + if (er) { + return cb(er) + } + fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) + }) + } else if (st.isSymbolicLink()) { + return cb(new SymlinkError(part, part + '/' + parts.join('/'))) + } else { + cb(er) + } + }) + } else { + created = created || part + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) + } +} + +const checkCwdSync = dir => { + let ok = false + let code = 'ENOTDIR' + try { + ok = fs.statSync(dir).isDirectory() + } catch (er) { + code = er.code + } finally { + if (!ok) { + throw new CwdError(dir, code) + } + } +} + +module.exports.sync = (dir, opt) => { + dir = normPath(dir) + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + const umask = opt.umask + const mode = opt.mode | 0o0700 + const needChmod = (mode & umask) !== 0 + + const uid = opt.uid + const gid = opt.gid + const doChown = typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid) + + const preserve = opt.preserve + const unlink = opt.unlink + const cache = opt.cache + const cwd = normPath(opt.cwd) + + const done = (created) => { + cSet(cache, dir, true) + if (created && doChown) { + chownr.sync(created, uid, gid) + } + if (needChmod) { + fs.chmodSync(dir, mode) + } + } + + if (cache && cGet(cache, dir) === true) { + return done() + } + + if (dir === cwd) { + checkCwdSync(cwd) + return done() + } + + if (preserve) { + return done(mkdirp.sync(dir, mode)) + } + + const sub = normPath(path.relative(cwd, dir)) + const parts = sub.split('/') + let created = null + for (let p = parts.shift(), part = cwd; + p && (part += '/' + p); + p = parts.shift()) { + part = normPath(path.resolve(part)) + if (cGet(cache, part)) { + continue + } + + try { + fs.mkdirSync(part, mode) + created = created || part + cSet(cache, part, true) + } catch (er) { + const st = fs.lstatSync(part) + if (st.isDirectory()) { + cSet(cache, part, true) + continue + } else if (unlink) { + fs.unlinkSync(part) + fs.mkdirSync(part, mode) + created = created || part + cSet(cache, part, true) + continue + } else if (st.isSymbolicLink()) { + return new SymlinkError(part, part + '/' + parts.join('/')) + } + } + } + + return done(created) +} diff --git a/node_modules/tar/lib/mode-fix.js b/node_modules/tar/lib/mode-fix.js new file mode 100644 index 0000000..42f1d6e --- /dev/null +++ b/node_modules/tar/lib/mode-fix.js @@ -0,0 +1,27 @@ +'use strict' +module.exports = (mode, isDir, portable) => { + mode &= 0o7777 + + // in portable mode, use the minimum reasonable umask + // if this system creates files with 0o664 by default + // (as some linux distros do), then we'll write the + // archive with 0o644 instead. Also, don't ever create + // a file that is not readable/writable by the owner. + if (portable) { + mode = (mode | 0o600) & ~0o22 + } + + // if dirs are readable, then they should be listable + if (isDir) { + if (mode & 0o400) { + mode |= 0o100 + } + if (mode & 0o40) { + mode |= 0o10 + } + if (mode & 0o4) { + mode |= 0o1 + } + } + return mode +} diff --git a/node_modules/tar/lib/normalize-unicode.js b/node_modules/tar/lib/normalize-unicode.js new file mode 100644 index 0000000..79e285a --- /dev/null +++ b/node_modules/tar/lib/normalize-unicode.js @@ -0,0 +1,12 @@ +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. +const normalizeCache = Object.create(null) +const { hasOwnProperty } = Object.prototype +module.exports = s => { + if (!hasOwnProperty.call(normalizeCache, s)) { + normalizeCache[s] = s.normalize('NFD') + } + return normalizeCache[s] +} diff --git a/node_modules/tar/lib/normalize-windows-path.js b/node_modules/tar/lib/normalize-windows-path.js new file mode 100644 index 0000000..eb13ba0 --- /dev/null +++ b/node_modules/tar/lib/normalize-windows-path.js @@ -0,0 +1,8 @@ +// on windows, either \ or / are valid directory separators. +// on unix, \ is a valid character in filenames. +// so, on windows, and only on windows, we replace all \ chars with /, +// so that we can use / as our one and only directory separator char. + +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform +module.exports = platform !== 'win32' ? p => p + : p => p && p.replace(/\\/g, '/') diff --git a/node_modules/tar/lib/pack.js b/node_modules/tar/lib/pack.js new file mode 100644 index 0000000..d533a06 --- /dev/null +++ b/node_modules/tar/lib/pack.js @@ -0,0 +1,432 @@ +'use strict' + +// A readable tar stream creator +// Technically, this is a transform stream that you write paths into, +// and tar format comes out of. +// The `add()` method is like `write()` but returns this, +// and end() return `this` as well, so you can +// do `new Pack(opt).add('files').add('dir').end().pipe(output) +// You could also do something like: +// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) + +class PackJob { + constructor (path, absolute) { + this.path = path || './' + this.absolute = absolute + this.entry = null + this.stat = null + this.readdir = null + this.pending = false + this.ignore = false + this.piped = false + } +} + +const { Minipass } = require('minipass') +const zlib = require('minizlib') +const ReadEntry = require('./read-entry.js') +const WriteEntry = require('./write-entry.js') +const WriteEntrySync = WriteEntry.Sync +const WriteEntryTar = WriteEntry.Tar +const Yallist = require('yallist') +const EOF = Buffer.alloc(1024) +const ONSTAT = Symbol('onStat') +const ENDED = Symbol('ended') +const QUEUE = Symbol('queue') +const CURRENT = Symbol('current') +const PROCESS = Symbol('process') +const PROCESSING = Symbol('processing') +const PROCESSJOB = Symbol('processJob') +const JOBS = Symbol('jobs') +const JOBDONE = Symbol('jobDone') +const ADDFSENTRY = Symbol('addFSEntry') +const ADDTARENTRY = Symbol('addTarEntry') +const STAT = Symbol('stat') +const READDIR = Symbol('readdir') +const ONREADDIR = Symbol('onreaddir') +const PIPE = Symbol('pipe') +const ENTRY = Symbol('entry') +const ENTRYOPT = Symbol('entryOpt') +const WRITEENTRYCLASS = Symbol('writeEntryClass') +const WRITE = Symbol('write') +const ONDRAIN = Symbol('ondrain') + +const fs = require('fs') +const path = require('path') +const warner = require('./warn-mixin.js') +const normPath = require('./normalize-windows-path.js') + +const Pack = warner(class Pack extends Minipass { + constructor (opt) { + super(opt) + opt = opt || Object.create(null) + this.opt = opt + this.file = opt.file || '' + this.cwd = opt.cwd || process.cwd() + this.maxReadSize = opt.maxReadSize + this.preservePaths = !!opt.preservePaths + this.strict = !!opt.strict + this.noPax = !!opt.noPax + this.prefix = normPath(opt.prefix || '') + this.linkCache = opt.linkCache || new Map() + this.statCache = opt.statCache || new Map() + this.readdirCache = opt.readdirCache || new Map() + + this[WRITEENTRYCLASS] = WriteEntry + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn) + } + + this.portable = !!opt.portable + this.zip = null + + if (opt.gzip || opt.brotli) { + if (opt.gzip && opt.brotli) { + throw new TypeError('gzip and brotli are mutually exclusive') + } + if (opt.gzip) { + if (typeof opt.gzip !== 'object') { + opt.gzip = {} + } + if (this.portable) { + opt.gzip.portable = true + } + this.zip = new zlib.Gzip(opt.gzip) + } + if (opt.brotli) { + if (typeof opt.brotli !== 'object') { + opt.brotli = {} + } + this.zip = new zlib.BrotliCompress(opt.brotli) + } + this.zip.on('data', chunk => super.write(chunk)) + this.zip.on('end', _ => super.end()) + this.zip.on('drain', _ => this[ONDRAIN]()) + this.on('resume', _ => this.zip.resume()) + } else { + this.on('drain', this[ONDRAIN]) + } + + this.noDirRecurse = !!opt.noDirRecurse + this.follow = !!opt.follow + this.noMtime = !!opt.noMtime + this.mtime = opt.mtime || null + + this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true + + this[QUEUE] = new Yallist() + this[JOBS] = 0 + this.jobs = +opt.jobs || 4 + this[PROCESSING] = false + this[ENDED] = false + } + + [WRITE] (chunk) { + return super.write(chunk) + } + + add (path) { + this.write(path) + return this + } + + end (path) { + if (path) { + this.write(path) + } + this[ENDED] = true + this[PROCESS]() + return this + } + + write (path) { + if (this[ENDED]) { + throw new Error('write after end') + } + + if (path instanceof ReadEntry) { + this[ADDTARENTRY](path) + } else { + this[ADDFSENTRY](path) + } + return this.flowing + } + + [ADDTARENTRY] (p) { + const absolute = normPath(path.resolve(this.cwd, p.path)) + // in this case, we don't have to wait for the stat + if (!this.filter(p.path, p)) { + p.resume() + } else { + const job = new PackJob(p.path, absolute, false) + job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)) + job.entry.on('end', _ => this[JOBDONE](job)) + this[JOBS] += 1 + this[QUEUE].push(job) + } + + this[PROCESS]() + } + + [ADDFSENTRY] (p) { + const absolute = normPath(path.resolve(this.cwd, p)) + this[QUEUE].push(new PackJob(p, absolute)) + this[PROCESS]() + } + + [STAT] (job) { + job.pending = true + this[JOBS] += 1 + const stat = this.follow ? 'stat' : 'lstat' + fs[stat](job.absolute, (er, stat) => { + job.pending = false + this[JOBS] -= 1 + if (er) { + this.emit('error', er) + } else { + this[ONSTAT](job, stat) + } + }) + } + + [ONSTAT] (job, stat) { + this.statCache.set(job.absolute, stat) + job.stat = stat + + // now we have the stat, we can filter it. + if (!this.filter(job.path, stat)) { + job.ignore = true + } + + this[PROCESS]() + } + + [READDIR] (job) { + job.pending = true + this[JOBS] += 1 + fs.readdir(job.absolute, (er, entries) => { + job.pending = false + this[JOBS] -= 1 + if (er) { + return this.emit('error', er) + } + this[ONREADDIR](job, entries) + }) + } + + [ONREADDIR] (job, entries) { + this.readdirCache.set(job.absolute, entries) + job.readdir = entries + this[PROCESS]() + } + + [PROCESS] () { + if (this[PROCESSING]) { + return + } + + this[PROCESSING] = true + for (let w = this[QUEUE].head; + w !== null && this[JOBS] < this.jobs; + w = w.next) { + this[PROCESSJOB](w.value) + if (w.value.ignore) { + const p = w.next + this[QUEUE].removeNode(w) + w.next = p + } + } + + this[PROCESSING] = false + + if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { + if (this.zip) { + this.zip.end(EOF) + } else { + super.write(EOF) + super.end() + } + } + } + + get [CURRENT] () { + return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value + } + + [JOBDONE] (job) { + this[QUEUE].shift() + this[JOBS] -= 1 + this[PROCESS]() + } + + [PROCESSJOB] (job) { + if (job.pending) { + return + } + + if (job.entry) { + if (job === this[CURRENT] && !job.piped) { + this[PIPE](job) + } + return + } + + if (!job.stat) { + if (this.statCache.has(job.absolute)) { + this[ONSTAT](job, this.statCache.get(job.absolute)) + } else { + this[STAT](job) + } + } + if (!job.stat) { + return + } + + // filtered out! + if (job.ignore) { + return + } + + if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) { + if (this.readdirCache.has(job.absolute)) { + this[ONREADDIR](job, this.readdirCache.get(job.absolute)) + } else { + this[READDIR](job) + } + if (!job.readdir) { + return + } + } + + // we know it doesn't have an entry, because that got checked above + job.entry = this[ENTRY](job) + if (!job.entry) { + job.ignore = true + return + } + + if (job === this[CURRENT] && !job.piped) { + this[PIPE](job) + } + } + + [ENTRYOPT] (job) { + return { + onwarn: (code, msg, data) => this.warn(code, msg, data), + noPax: this.noPax, + cwd: this.cwd, + absolute: job.absolute, + preservePaths: this.preservePaths, + maxReadSize: this.maxReadSize, + strict: this.strict, + portable: this.portable, + linkCache: this.linkCache, + statCache: this.statCache, + noMtime: this.noMtime, + mtime: this.mtime, + prefix: this.prefix, + } + } + + [ENTRY] (job) { + this[JOBS] += 1 + try { + return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)) + .on('end', () => this[JOBDONE](job)) + .on('error', er => this.emit('error', er)) + } catch (er) { + this.emit('error', er) + } + } + + [ONDRAIN] () { + if (this[CURRENT] && this[CURRENT].entry) { + this[CURRENT].entry.resume() + } + } + + // like .pipe() but using super, because our write() is special + [PIPE] (job) { + job.piped = true + + if (job.readdir) { + job.readdir.forEach(entry => { + const p = job.path + const base = p === './' ? '' : p.replace(/\/*$/, '/') + this[ADDFSENTRY](base + entry) + }) + } + + const source = job.entry + const zip = this.zip + + if (zip) { + source.on('data', chunk => { + if (!zip.write(chunk)) { + source.pause() + } + }) + } else { + source.on('data', chunk => { + if (!super.write(chunk)) { + source.pause() + } + }) + } + } + + pause () { + if (this.zip) { + this.zip.pause() + } + return super.pause() + } +}) + +class PackSync extends Pack { + constructor (opt) { + super(opt) + this[WRITEENTRYCLASS] = WriteEntrySync + } + + // pause/resume are no-ops in sync streams. + pause () {} + resume () {} + + [STAT] (job) { + const stat = this.follow ? 'statSync' : 'lstatSync' + this[ONSTAT](job, fs[stat](job.absolute)) + } + + [READDIR] (job, stat) { + this[ONREADDIR](job, fs.readdirSync(job.absolute)) + } + + // gotta get it all in this tick + [PIPE] (job) { + const source = job.entry + const zip = this.zip + + if (job.readdir) { + job.readdir.forEach(entry => { + const p = job.path + const base = p === './' ? '' : p.replace(/\/*$/, '/') + this[ADDFSENTRY](base + entry) + }) + } + + if (zip) { + source.on('data', chunk => { + zip.write(chunk) + }) + } else { + source.on('data', chunk => { + super[WRITE](chunk) + }) + } + } +} + +Pack.Sync = PackSync + +module.exports = Pack diff --git a/node_modules/tar/lib/parse.js b/node_modules/tar/lib/parse.js new file mode 100644 index 0000000..94e5304 --- /dev/null +++ b/node_modules/tar/lib/parse.js @@ -0,0 +1,552 @@ +'use strict' + +// this[BUFFER] is the remainder of a chunk if we're waiting for +// the full 512 bytes of a header to come in. We will Buffer.concat() +// it to the next write(), which is a mem copy, but a small one. +// +// this[QUEUE] is a Yallist of entries that haven't been emitted +// yet this can only get filled up if the user keeps write()ing after +// a write() returns false, or does a write() with more than one entry +// +// We don't buffer chunks, we always parse them and either create an +// entry, or push it into the active entry. The ReadEntry class knows +// to throw data away if .ignore=true +// +// Shift entry off the buffer when it emits 'end', and emit 'entry' for +// the next one in the list. +// +// At any time, we're pushing body chunks into the entry at WRITEENTRY, +// and waiting for 'end' on the entry at READENTRY +// +// ignored entries get .resume() called on them straight away + +const warner = require('./warn-mixin.js') +const Header = require('./header.js') +const EE = require('events') +const Yallist = require('yallist') +const maxMetaEntrySize = 1024 * 1024 +const Entry = require('./read-entry.js') +const Pax = require('./pax.js') +const zlib = require('minizlib') +const { nextTick } = require('process') + +const gzipHeader = Buffer.from([0x1f, 0x8b]) +const STATE = Symbol('state') +const WRITEENTRY = Symbol('writeEntry') +const READENTRY = Symbol('readEntry') +const NEXTENTRY = Symbol('nextEntry') +const PROCESSENTRY = Symbol('processEntry') +const EX = Symbol('extendedHeader') +const GEX = Symbol('globalExtendedHeader') +const META = Symbol('meta') +const EMITMETA = Symbol('emitMeta') +const BUFFER = Symbol('buffer') +const QUEUE = Symbol('queue') +const ENDED = Symbol('ended') +const EMITTEDEND = Symbol('emittedEnd') +const EMIT = Symbol('emit') +const UNZIP = Symbol('unzip') +const CONSUMECHUNK = Symbol('consumeChunk') +const CONSUMECHUNKSUB = Symbol('consumeChunkSub') +const CONSUMEBODY = Symbol('consumeBody') +const CONSUMEMETA = Symbol('consumeMeta') +const CONSUMEHEADER = Symbol('consumeHeader') +const CONSUMING = Symbol('consuming') +const BUFFERCONCAT = Symbol('bufferConcat') +const MAYBEEND = Symbol('maybeEnd') +const WRITING = Symbol('writing') +const ABORTED = Symbol('aborted') +const DONE = Symbol('onDone') +const SAW_VALID_ENTRY = Symbol('sawValidEntry') +const SAW_NULL_BLOCK = Symbol('sawNullBlock') +const SAW_EOF = Symbol('sawEOF') +const CLOSESTREAM = Symbol('closeStream') + +const noop = _ => true + +module.exports = warner(class Parser extends EE { + constructor (opt) { + opt = opt || {} + super(opt) + + this.file = opt.file || '' + + // set to boolean false when an entry starts. 1024 bytes of \0 + // is technically a valid tarball, albeit a boring one. + this[SAW_VALID_ENTRY] = null + + // these BADARCHIVE errors can't be detected early. listen on DONE. + this.on(DONE, _ => { + if (this[STATE] === 'begin' || this[SAW_VALID_ENTRY] === false) { + // either less than 1 block of data, or all entries were invalid. + // Either way, probably not even a tarball. + this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format') + } + }) + + if (opt.ondone) { + this.on(DONE, opt.ondone) + } else { + this.on(DONE, _ => { + this.emit('prefinish') + this.emit('finish') + this.emit('end') + }) + } + + this.strict = !!opt.strict + this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize + this.filter = typeof opt.filter === 'function' ? opt.filter : noop + // Unlike gzip, brotli doesn't have any magic bytes to identify it + // Users need to explicitly tell us they're extracting a brotli file + // Or we infer from the file extension + const isTBR = (opt.file && ( + opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'))) + // if it's a tbr file it MIGHT be brotli, but we don't know until + // we look at it and verify it's not a valid tar file. + this.brotli = !opt.gzip && opt.brotli !== undefined ? opt.brotli + : isTBR ? undefined + : false + + // have to set this so that streams are ok piping into it + this.writable = true + this.readable = false + + this[QUEUE] = new Yallist() + this[BUFFER] = null + this[READENTRY] = null + this[WRITEENTRY] = null + this[STATE] = 'begin' + this[META] = '' + this[EX] = null + this[GEX] = null + this[ENDED] = false + this[UNZIP] = null + this[ABORTED] = false + this[SAW_NULL_BLOCK] = false + this[SAW_EOF] = false + + this.on('end', () => this[CLOSESTREAM]()) + + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn) + } + if (typeof opt.onentry === 'function') { + this.on('entry', opt.onentry) + } + } + + [CONSUMEHEADER] (chunk, position) { + if (this[SAW_VALID_ENTRY] === null) { + this[SAW_VALID_ENTRY] = false + } + let header + try { + header = new Header(chunk, position, this[EX], this[GEX]) + } catch (er) { + return this.warn('TAR_ENTRY_INVALID', er) + } + + if (header.nullBlock) { + if (this[SAW_NULL_BLOCK]) { + this[SAW_EOF] = true + // ending an archive with no entries. pointless, but legal. + if (this[STATE] === 'begin') { + this[STATE] = 'header' + } + this[EMIT]('eof') + } else { + this[SAW_NULL_BLOCK] = true + this[EMIT]('nullBlock') + } + } else { + this[SAW_NULL_BLOCK] = false + if (!header.cksumValid) { + this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header }) + } else if (!header.path) { + this.warn('TAR_ENTRY_INVALID', 'path is required', { header }) + } else { + const type = header.type + if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) { + this.warn('TAR_ENTRY_INVALID', 'linkpath required', { header }) + } else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath) { + this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { header }) + } else { + const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX]) + + // we do this for meta & ignored entries as well, because they + // are still valid tar, or else we wouldn't know to ignore them + if (!this[SAW_VALID_ENTRY]) { + if (entry.remain) { + // this might be the one! + const onend = () => { + if (!entry.invalid) { + this[SAW_VALID_ENTRY] = true + } + } + entry.on('end', onend) + } else { + this[SAW_VALID_ENTRY] = true + } + } + + if (entry.meta) { + if (entry.size > this.maxMetaEntrySize) { + entry.ignore = true + this[EMIT]('ignoredEntry', entry) + this[STATE] = 'ignore' + entry.resume() + } else if (entry.size > 0) { + this[META] = '' + entry.on('data', c => this[META] += c) + this[STATE] = 'meta' + } + } else { + this[EX] = null + entry.ignore = entry.ignore || !this.filter(entry.path, entry) + + if (entry.ignore) { + // probably valid, just not something we care about + this[EMIT]('ignoredEntry', entry) + this[STATE] = entry.remain ? 'ignore' : 'header' + entry.resume() + } else { + if (entry.remain) { + this[STATE] = 'body' + } else { + this[STATE] = 'header' + entry.end() + } + + if (!this[READENTRY]) { + this[QUEUE].push(entry) + this[NEXTENTRY]() + } else { + this[QUEUE].push(entry) + } + } + } + } + } + } + } + + [CLOSESTREAM] () { + nextTick(() => this.emit('close')) + } + + [PROCESSENTRY] (entry) { + let go = true + + if (!entry) { + this[READENTRY] = null + go = false + } else if (Array.isArray(entry)) { + this.emit.apply(this, entry) + } else { + this[READENTRY] = entry + this.emit('entry', entry) + if (!entry.emittedEnd) { + entry.on('end', _ => this[NEXTENTRY]()) + go = false + } + } + + return go + } + + [NEXTENTRY] () { + do {} while (this[PROCESSENTRY](this[QUEUE].shift())) + + if (!this[QUEUE].length) { + // At this point, there's nothing in the queue, but we may have an + // entry which is being consumed (readEntry). + // If we don't, then we definitely can handle more data. + // If we do, and either it's flowing, or it has never had any data + // written to it, then it needs more. + // The only other possibility is that it has returned false from a + // write() call, so we wait for the next drain to continue. + const re = this[READENTRY] + const drainNow = !re || re.flowing || re.size === re.remain + if (drainNow) { + if (!this[WRITING]) { + this.emit('drain') + } + } else { + re.once('drain', _ => this.emit('drain')) + } + } + } + + [CONSUMEBODY] (chunk, position) { + // write up to but no more than writeEntry.blockRemain + const entry = this[WRITEENTRY] + const br = entry.blockRemain + const c = (br >= chunk.length && position === 0) ? chunk + : chunk.slice(position, position + br) + + entry.write(c) + + if (!entry.blockRemain) { + this[STATE] = 'header' + this[WRITEENTRY] = null + entry.end() + } + + return c.length + } + + [CONSUMEMETA] (chunk, position) { + const entry = this[WRITEENTRY] + const ret = this[CONSUMEBODY](chunk, position) + + // if we finished, then the entry is reset + if (!this[WRITEENTRY]) { + this[EMITMETA](entry) + } + + return ret + } + + [EMIT] (ev, data, extra) { + if (!this[QUEUE].length && !this[READENTRY]) { + this.emit(ev, data, extra) + } else { + this[QUEUE].push([ev, data, extra]) + } + } + + [EMITMETA] (entry) { + this[EMIT]('meta', this[META]) + switch (entry.type) { + case 'ExtendedHeader': + case 'OldExtendedHeader': + this[EX] = Pax.parse(this[META], this[EX], false) + break + + case 'GlobalExtendedHeader': + this[GEX] = Pax.parse(this[META], this[GEX], true) + break + + case 'NextFileHasLongPath': + case 'OldGnuLongPath': + this[EX] = this[EX] || Object.create(null) + this[EX].path = this[META].replace(/\0.*/, '') + break + + case 'NextFileHasLongLinkpath': + this[EX] = this[EX] || Object.create(null) + this[EX].linkpath = this[META].replace(/\0.*/, '') + break + + /* istanbul ignore next */ + default: throw new Error('unknown meta: ' + entry.type) + } + } + + abort (error) { + this[ABORTED] = true + this.emit('abort', error) + // always throws, even in non-strict mode + this.warn('TAR_ABORT', error, { recoverable: false }) + } + + write (chunk) { + if (this[ABORTED]) { + return + } + + // first write, might be gzipped + const needSniff = this[UNZIP] === null || + this.brotli === undefined && this[UNZIP] === false + if (needSniff && chunk) { + if (this[BUFFER]) { + chunk = Buffer.concat([this[BUFFER], chunk]) + this[BUFFER] = null + } + if (chunk.length < gzipHeader.length) { + this[BUFFER] = chunk + return true + } + + // look for gzip header + for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) { + if (chunk[i] !== gzipHeader[i]) { + this[UNZIP] = false + } + } + + const maybeBrotli = this.brotli === undefined + if (this[UNZIP] === false && maybeBrotli) { + // read the first header to see if it's a valid tar file. If so, + // we can safely assume that it's not actually brotli, despite the + // .tbr or .tar.br file extension. + // if we ended before getting a full chunk, yes, def brotli + if (chunk.length < 512) { + if (this[ENDED]) { + this.brotli = true + } else { + this[BUFFER] = chunk + return true + } + } else { + // if it's tar, it's pretty reliably not brotli, chances of + // that happening are astronomical. + try { + new Header(chunk.slice(0, 512)) + this.brotli = false + } catch (_) { + this.brotli = true + } + } + } + + if (this[UNZIP] === null || (this[UNZIP] === false && this.brotli)) { + const ended = this[ENDED] + this[ENDED] = false + this[UNZIP] = this[UNZIP] === null + ? new zlib.Unzip() + : new zlib.BrotliDecompress() + this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk)) + this[UNZIP].on('error', er => this.abort(er)) + this[UNZIP].on('end', _ => { + this[ENDED] = true + this[CONSUMECHUNK]() + }) + this[WRITING] = true + const ret = this[UNZIP][ended ? 'end' : 'write'](chunk) + this[WRITING] = false + return ret + } + } + + this[WRITING] = true + if (this[UNZIP]) { + this[UNZIP].write(chunk) + } else { + this[CONSUMECHUNK](chunk) + } + this[WRITING] = false + + // return false if there's a queue, or if the current entry isn't flowing + const ret = + this[QUEUE].length ? false : + this[READENTRY] ? this[READENTRY].flowing : + true + + // if we have no queue, then that means a clogged READENTRY + if (!ret && !this[QUEUE].length) { + this[READENTRY].once('drain', _ => this.emit('drain')) + } + + return ret + } + + [BUFFERCONCAT] (c) { + if (c && !this[ABORTED]) { + this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c + } + } + + [MAYBEEND] () { + if (this[ENDED] && + !this[EMITTEDEND] && + !this[ABORTED] && + !this[CONSUMING]) { + this[EMITTEDEND] = true + const entry = this[WRITEENTRY] + if (entry && entry.blockRemain) { + // truncated, likely a damaged file + const have = this[BUFFER] ? this[BUFFER].length : 0 + this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${ + entry.blockRemain} more bytes, only ${have} available)`, { entry }) + if (this[BUFFER]) { + entry.write(this[BUFFER]) + } + entry.end() + } + this[EMIT](DONE) + } + } + + [CONSUMECHUNK] (chunk) { + if (this[CONSUMING]) { + this[BUFFERCONCAT](chunk) + } else if (!chunk && !this[BUFFER]) { + this[MAYBEEND]() + } else { + this[CONSUMING] = true + if (this[BUFFER]) { + this[BUFFERCONCAT](chunk) + const c = this[BUFFER] + this[BUFFER] = null + this[CONSUMECHUNKSUB](c) + } else { + this[CONSUMECHUNKSUB](chunk) + } + + while (this[BUFFER] && + this[BUFFER].length >= 512 && + !this[ABORTED] && + !this[SAW_EOF]) { + const c = this[BUFFER] + this[BUFFER] = null + this[CONSUMECHUNKSUB](c) + } + this[CONSUMING] = false + } + + if (!this[BUFFER] || this[ENDED]) { + this[MAYBEEND]() + } + } + + [CONSUMECHUNKSUB] (chunk) { + // we know that we are in CONSUMING mode, so anything written goes into + // the buffer. Advance the position and put any remainder in the buffer. + let position = 0 + const length = chunk.length + while (position + 512 <= length && !this[ABORTED] && !this[SAW_EOF]) { + switch (this[STATE]) { + case 'begin': + case 'header': + this[CONSUMEHEADER](chunk, position) + position += 512 + break + + case 'ignore': + case 'body': + position += this[CONSUMEBODY](chunk, position) + break + + case 'meta': + position += this[CONSUMEMETA](chunk, position) + break + + /* istanbul ignore next */ + default: + throw new Error('invalid state: ' + this[STATE]) + } + } + + if (position < length) { + if (this[BUFFER]) { + this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]]) + } else { + this[BUFFER] = chunk.slice(position) + } + } + } + + end (chunk) { + if (!this[ABORTED]) { + if (this[UNZIP]) { + this[UNZIP].end(chunk) + } else { + this[ENDED] = true + if (this.brotli === undefined) chunk = chunk || Buffer.alloc(0) + this.write(chunk) + } + } + } +}) diff --git a/node_modules/tar/lib/path-reservations.js b/node_modules/tar/lib/path-reservations.js new file mode 100644 index 0000000..8d349d5 --- /dev/null +++ b/node_modules/tar/lib/path-reservations.js @@ -0,0 +1,156 @@ +// A path exclusive reservation system +// reserve([list, of, paths], fn) +// When the fn is first in line for all its paths, it +// is called with a cb that clears the reservation. +// +// Used by async unpack to avoid clobbering paths in use, +// while still allowing maximal safe parallelization. + +const assert = require('assert') +const normalize = require('./normalize-unicode.js') +const stripSlashes = require('./strip-trailing-slashes.js') +const { join } = require('path') + +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform +const isWindows = platform === 'win32' + +module.exports = () => { + // path => [function or Set] + // A Set object means a directory reservation + // A fn is a direct reservation on that path + const queues = new Map() + + // fn => {paths:[path,...], dirs:[path, ...]} + const reservations = new Map() + + // return a set of parent dirs for a given path + // '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d'] + const getDirs = path => { + const dirs = path.split('/').slice(0, -1).reduce((set, path) => { + if (set.length) { + path = join(set[set.length - 1], path) + } + set.push(path || '/') + return set + }, []) + return dirs + } + + // functions currently running + const running = new Set() + + // return the queues for each path the function cares about + // fn => {paths, dirs} + const getQueues = fn => { + const res = reservations.get(fn) + /* istanbul ignore if - unpossible */ + if (!res) { + throw new Error('function does not have any path reservations') + } + return { + paths: res.paths.map(path => queues.get(path)), + dirs: [...res.dirs].map(path => queues.get(path)), + } + } + + // check if fn is first in line for all its paths, and is + // included in the first set for all its dir queues + const check = fn => { + const { paths, dirs } = getQueues(fn) + return paths.every(q => q[0] === fn) && + dirs.every(q => q[0] instanceof Set && q[0].has(fn)) + } + + // run the function if it's first in line and not already running + const run = fn => { + if (running.has(fn) || !check(fn)) { + return false + } + running.add(fn) + fn(() => clear(fn)) + return true + } + + const clear = fn => { + if (!running.has(fn)) { + return false + } + + const { paths, dirs } = reservations.get(fn) + const next = new Set() + + paths.forEach(path => { + const q = queues.get(path) + assert.equal(q[0], fn) + if (q.length === 1) { + queues.delete(path) + } else { + q.shift() + if (typeof q[0] === 'function') { + next.add(q[0]) + } else { + q[0].forEach(fn => next.add(fn)) + } + } + }) + + dirs.forEach(dir => { + const q = queues.get(dir) + assert(q[0] instanceof Set) + if (q[0].size === 1 && q.length === 1) { + queues.delete(dir) + } else if (q[0].size === 1) { + q.shift() + + // must be a function or else the Set would've been reused + next.add(q[0]) + } else { + q[0].delete(fn) + } + }) + running.delete(fn) + + next.forEach(fn => run(fn)) + return true + } + + const reserve = (paths, fn) => { + // collide on matches across case and unicode normalization + // On windows, thanks to the magic of 8.3 shortnames, it is fundamentally + // impossible to determine whether two paths refer to the same thing on + // disk, without asking the kernel for a shortname. + // So, we just pretend that every path matches every other path here, + // effectively removing all parallelization on windows. + paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => { + // don't need normPath, because we skip this entirely for windows + return stripSlashes(join(normalize(p))).toLowerCase() + }) + + const dirs = new Set( + paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)) + ) + reservations.set(fn, { dirs, paths }) + paths.forEach(path => { + const q = queues.get(path) + if (!q) { + queues.set(path, [fn]) + } else { + q.push(fn) + } + }) + dirs.forEach(dir => { + const q = queues.get(dir) + if (!q) { + queues.set(dir, [new Set([fn])]) + } else if (q[q.length - 1] instanceof Set) { + q[q.length - 1].add(fn) + } else { + q.push(new Set([fn])) + } + }) + + return run(fn) + } + + return { check, reserve } +} diff --git a/node_modules/tar/lib/pax.js b/node_modules/tar/lib/pax.js new file mode 100644 index 0000000..4a7ca85 --- /dev/null +++ b/node_modules/tar/lib/pax.js @@ -0,0 +1,150 @@ +'use strict' +const Header = require('./header.js') +const path = require('path') + +class Pax { + constructor (obj, global) { + this.atime = obj.atime || null + this.charset = obj.charset || null + this.comment = obj.comment || null + this.ctime = obj.ctime || null + this.gid = obj.gid || null + this.gname = obj.gname || null + this.linkpath = obj.linkpath || null + this.mtime = obj.mtime || null + this.path = obj.path || null + this.size = obj.size || null + this.uid = obj.uid || null + this.uname = obj.uname || null + this.dev = obj.dev || null + this.ino = obj.ino || null + this.nlink = obj.nlink || null + this.global = global || false + } + + encode () { + const body = this.encodeBody() + if (body === '') { + return null + } + + const bodyLen = Buffer.byteLength(body) + // round up to 512 bytes + // add 512 for header + const bufLen = 512 * Math.ceil(1 + bodyLen / 512) + const buf = Buffer.allocUnsafe(bufLen) + + // 0-fill the header section, it might not hit every field + for (let i = 0; i < 512; i++) { + buf[i] = 0 + } + + new Header({ + // XXX split the path + // then the path should be PaxHeader + basename, but less than 99, + // prepend with the dirname + path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99), + mode: this.mode || 0o644, + uid: this.uid || null, + gid: this.gid || null, + size: bodyLen, + mtime: this.mtime || null, + type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader', + linkpath: '', + uname: this.uname || '', + gname: this.gname || '', + devmaj: 0, + devmin: 0, + atime: this.atime || null, + ctime: this.ctime || null, + }).encode(buf) + + buf.write(body, 512, bodyLen, 'utf8') + + // null pad after the body + for (let i = bodyLen + 512; i < buf.length; i++) { + buf[i] = 0 + } + + return buf + } + + encodeBody () { + return ( + this.encodeField('path') + + this.encodeField('ctime') + + this.encodeField('atime') + + this.encodeField('dev') + + this.encodeField('ino') + + this.encodeField('nlink') + + this.encodeField('charset') + + this.encodeField('comment') + + this.encodeField('gid') + + this.encodeField('gname') + + this.encodeField('linkpath') + + this.encodeField('mtime') + + this.encodeField('size') + + this.encodeField('uid') + + this.encodeField('uname') + ) + } + + encodeField (field) { + if (this[field] === null || this[field] === undefined) { + return '' + } + const v = this[field] instanceof Date ? this[field].getTime() / 1000 + : this[field] + const s = ' ' + + (field === 'dev' || field === 'ino' || field === 'nlink' + ? 'SCHILY.' : '') + + field + '=' + v + '\n' + const byteLen = Buffer.byteLength(s) + // the digits includes the length of the digits in ascii base-10 + // so if it's 9 characters, then adding 1 for the 9 makes it 10 + // which makes it 11 chars. + let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1 + if (byteLen + digits >= Math.pow(10, digits)) { + digits += 1 + } + const len = digits + byteLen + return len + s + } +} + +Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g) + +const merge = (a, b) => + b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a + +const parseKV = string => + string + .replace(/\n$/, '') + .split('\n') + .reduce(parseKVLine, Object.create(null)) + +const parseKVLine = (set, line) => { + const n = parseInt(line, 10) + + // XXX Values with \n in them will fail this. + // Refactor to not be a naive line-by-line parse. + if (n !== Buffer.byteLength(line) + 1) { + return set + } + + line = line.slice((n + ' ').length) + const kv = line.split('=') + const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1') + if (!k) { + return set + } + + const v = kv.join('=') + set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) + ? new Date(v * 1000) + : /^[0-9]+$/.test(v) ? +v + : v + return set +} + +module.exports = Pax diff --git a/node_modules/tar/lib/read-entry.js b/node_modules/tar/lib/read-entry.js new file mode 100644 index 0000000..6186266 --- /dev/null +++ b/node_modules/tar/lib/read-entry.js @@ -0,0 +1,107 @@ +'use strict' +const { Minipass } = require('minipass') +const normPath = require('./normalize-windows-path.js') + +const SLURP = Symbol('slurp') +module.exports = class ReadEntry extends Minipass { + constructor (header, ex, gex) { + super() + // read entries always start life paused. this is to avoid the + // situation where Minipass's auto-ending empty streams results + // in an entry ending before we're ready for it. + this.pause() + this.extended = ex + this.globalExtended = gex + this.header = header + this.startBlockSize = 512 * Math.ceil(header.size / 512) + this.blockRemain = this.startBlockSize + this.remain = header.size + this.type = header.type + this.meta = false + this.ignore = false + switch (this.type) { + case 'File': + case 'OldFile': + case 'Link': + case 'SymbolicLink': + case 'CharacterDevice': + case 'BlockDevice': + case 'Directory': + case 'FIFO': + case 'ContiguousFile': + case 'GNUDumpDir': + break + + case 'NextFileHasLongLinkpath': + case 'NextFileHasLongPath': + case 'OldGnuLongPath': + case 'GlobalExtendedHeader': + case 'ExtendedHeader': + case 'OldExtendedHeader': + this.meta = true + break + + // NOTE: gnutar and bsdtar treat unrecognized types as 'File' + // it may be worth doing the same, but with a warning. + default: + this.ignore = true + } + + this.path = normPath(header.path) + this.mode = header.mode + if (this.mode) { + this.mode = this.mode & 0o7777 + } + this.uid = header.uid + this.gid = header.gid + this.uname = header.uname + this.gname = header.gname + this.size = header.size + this.mtime = header.mtime + this.atime = header.atime + this.ctime = header.ctime + this.linkpath = normPath(header.linkpath) + this.uname = header.uname + this.gname = header.gname + + if (ex) { + this[SLURP](ex) + } + if (gex) { + this[SLURP](gex, true) + } + } + + write (data) { + const writeLen = data.length + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate') + } + + const r = this.remain + const br = this.blockRemain + this.remain = Math.max(0, r - writeLen) + this.blockRemain = Math.max(0, br - writeLen) + if (this.ignore) { + return true + } + + if (r >= writeLen) { + return super.write(data) + } + + // r < writeLen + return super.write(data.slice(0, r)) + } + + [SLURP] (ex, global) { + for (const k in ex) { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. + if (ex[k] !== null && ex[k] !== undefined && + !(global && k === 'path')) { + this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k] + } + } + } +} diff --git a/node_modules/tar/lib/replace.js b/node_modules/tar/lib/replace.js new file mode 100644 index 0000000..8db6800 --- /dev/null +++ b/node_modules/tar/lib/replace.js @@ -0,0 +1,246 @@ +'use strict' + +// tar -r +const hlo = require('./high-level-opt.js') +const Pack = require('./pack.js') +const fs = require('fs') +const fsm = require('fs-minipass') +const t = require('./list.js') +const path = require('path') + +// starting at the head of the file, read a Header +// If the checksum is invalid, that's our position to start writing +// If it is, jump forward by the specified size (round up to 512) +// and try again. +// Write the new Pack stream starting there. + +const Header = require('./header.js') + +module.exports = (opt_, files, cb) => { + const opt = hlo(opt_) + + if (!opt.file) { + throw new TypeError('file is required') + } + + if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) { + throw new TypeError('cannot append to compressed archives') + } + + if (!files || !Array.isArray(files) || !files.length) { + throw new TypeError('no files or directories specified') + } + + files = Array.from(files) + + return opt.sync ? replaceSync(opt, files) + : replace(opt, files, cb) +} + +const replaceSync = (opt, files) => { + const p = new Pack.Sync(opt) + + let threw = true + let fd + let position + + try { + try { + fd = fs.openSync(opt.file, 'r+') + } catch (er) { + if (er.code === 'ENOENT') { + fd = fs.openSync(opt.file, 'w+') + } else { + throw er + } + } + + const st = fs.fstatSync(fd) + const headBuf = Buffer.alloc(512) + + POSITION: for (position = 0; position < st.size; position += 512) { + for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) { + bytes = fs.readSync( + fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos + ) + + if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) { + throw new Error('cannot append to compressed archives') + } + + if (!bytes) { + break POSITION + } + } + + const h = new Header(headBuf) + if (!h.cksumValid) { + break + } + const entryBlockSize = 512 * Math.ceil(h.size / 512) + if (position + entryBlockSize + 512 > st.size) { + break + } + // the 512 for the header we just parsed will be added as well + // also jump ahead all the blocks for the body + position += entryBlockSize + if (opt.mtimeCache) { + opt.mtimeCache.set(h.path, h.mtime) + } + } + threw = false + + streamSync(opt, p, position, fd, files) + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } + } +} + +const streamSync = (opt, p, position, fd, files) => { + const stream = new fsm.WriteStreamSync(opt.file, { + fd: fd, + start: position, + }) + p.pipe(stream) + addFilesSync(p, files) +} + +const replace = (opt, files, cb) => { + files = Array.from(files) + const p = new Pack(opt) + + const getPos = (fd, size, cb_) => { + const cb = (er, pos) => { + if (er) { + fs.close(fd, _ => cb_(er)) + } else { + cb_(null, pos) + } + } + + let position = 0 + if (size === 0) { + return cb(null, 0) + } + + let bufPos = 0 + const headBuf = Buffer.alloc(512) + const onread = (er, bytes) => { + if (er) { + return cb(er) + } + bufPos += bytes + if (bufPos < 512 && bytes) { + return fs.read( + fd, headBuf, bufPos, headBuf.length - bufPos, + position + bufPos, onread + ) + } + + if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) { + return cb(new Error('cannot append to compressed archives')) + } + + // truncated header + if (bufPos < 512) { + return cb(null, position) + } + + const h = new Header(headBuf) + if (!h.cksumValid) { + return cb(null, position) + } + + const entryBlockSize = 512 * Math.ceil(h.size / 512) + if (position + entryBlockSize + 512 > size) { + return cb(null, position) + } + + position += entryBlockSize + 512 + if (position >= size) { + return cb(null, position) + } + + if (opt.mtimeCache) { + opt.mtimeCache.set(h.path, h.mtime) + } + bufPos = 0 + fs.read(fd, headBuf, 0, 512, position, onread) + } + fs.read(fd, headBuf, 0, 512, position, onread) + } + + const promise = new Promise((resolve, reject) => { + p.on('error', reject) + let flag = 'r+' + const onopen = (er, fd) => { + if (er && er.code === 'ENOENT' && flag === 'r+') { + flag = 'w+' + return fs.open(opt.file, flag, onopen) + } + + if (er) { + return reject(er) + } + + fs.fstat(fd, (er, st) => { + if (er) { + return fs.close(fd, () => reject(er)) + } + + getPos(fd, st.size, (er, position) => { + if (er) { + return reject(er) + } + const stream = new fsm.WriteStream(opt.file, { + fd: fd, + start: position, + }) + p.pipe(stream) + stream.on('error', reject) + stream.on('close', resolve) + addFilesAsync(p, files) + }) + }) + } + fs.open(opt.file, flag, onopen) + }) + + return cb ? promise.then(cb, cb) : promise +} + +const addFilesSync = (p, files) => { + files.forEach(file => { + if (file.charAt(0) === '@') { + t({ + file: path.resolve(p.cwd, file.slice(1)), + sync: true, + noResume: true, + onentry: entry => p.add(entry), + }) + } else { + p.add(file) + } + }) + p.end() +} + +const addFilesAsync = (p, files) => { + while (files.length) { + const file = files.shift() + if (file.charAt(0) === '@') { + return t({ + file: path.resolve(p.cwd, file.slice(1)), + noResume: true, + onentry: entry => p.add(entry), + }).then(_ => addFilesAsync(p, files)) + } else { + p.add(file) + } + } + p.end() +} diff --git a/node_modules/tar/lib/strip-absolute-path.js b/node_modules/tar/lib/strip-absolute-path.js new file mode 100644 index 0000000..185e2de --- /dev/null +++ b/node_modules/tar/lib/strip-absolute-path.js @@ -0,0 +1,24 @@ +// unix absolute paths are also absolute on win32, so we use this for both +const { isAbsolute, parse } = require('path').win32 + +// returns [root, stripped] +// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in +// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip / +// explicitly if it's the first character. +// drive-specific relative paths on Windows get their root stripped off even +// though they are not absolute, so `c:../foo` becomes ['c:', '../foo'] +module.exports = path => { + let r = '' + + let parsed = parse(path) + while (isAbsolute(path) || parsed.root) { + // windows will think that //x/y/z has a "root" of //x/y/ + // but strip the //?/C:/ off of //?/C:/path + const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? '/' + : parsed.root + path = path.slice(root.length) + r += root + parsed = parse(path) + } + return [r, path] +} diff --git a/node_modules/tar/lib/strip-trailing-slashes.js b/node_modules/tar/lib/strip-trailing-slashes.js new file mode 100644 index 0000000..3e3ecec --- /dev/null +++ b/node_modules/tar/lib/strip-trailing-slashes.js @@ -0,0 +1,13 @@ +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. +module.exports = str => { + let i = str.length - 1 + let slashesStart = -1 + while (i > -1 && str.charAt(i) === '/') { + slashesStart = i + i-- + } + return slashesStart === -1 ? str : str.slice(0, slashesStart) +} diff --git a/node_modules/tar/lib/types.js b/node_modules/tar/lib/types.js new file mode 100644 index 0000000..7bfc254 --- /dev/null +++ b/node_modules/tar/lib/types.js @@ -0,0 +1,44 @@ +'use strict' +// map types from key to human-friendly name +exports.name = new Map([ + ['0', 'File'], + // same as File + ['', 'OldFile'], + ['1', 'Link'], + ['2', 'SymbolicLink'], + // Devices and FIFOs aren't fully supported + // they are parsed, but skipped when unpacking + ['3', 'CharacterDevice'], + ['4', 'BlockDevice'], + ['5', 'Directory'], + ['6', 'FIFO'], + // same as File + ['7', 'ContiguousFile'], + // pax headers + ['g', 'GlobalExtendedHeader'], + ['x', 'ExtendedHeader'], + // vendor-specific stuff + // skip + ['A', 'SolarisACL'], + // like 5, but with data, which should be skipped + ['D', 'GNUDumpDir'], + // metadata only, skip + ['I', 'Inode'], + // data = link path of next file + ['K', 'NextFileHasLongLinkpath'], + // data = path of next file + ['L', 'NextFileHasLongPath'], + // skip + ['M', 'ContinuationFile'], + // like L + ['N', 'OldGnuLongPath'], + // skip + ['S', 'SparseFile'], + // skip + ['V', 'TapeVolumeHeader'], + // like x + ['X', 'OldExtendedHeader'], +]) + +// map the other direction +exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]])) diff --git a/node_modules/tar/lib/unpack.js b/node_modules/tar/lib/unpack.js new file mode 100644 index 0000000..03172e2 --- /dev/null +++ b/node_modules/tar/lib/unpack.js @@ -0,0 +1,923 @@ +'use strict' + +// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet. +// but the path reservations are required to avoid race conditions where +// parallelized unpack ops may mess with one another, due to dependencies +// (like a Link depending on its target) or destructive operations (like +// clobbering an fs object to create one of a different type.) + +const assert = require('assert') +const Parser = require('./parse.js') +const fs = require('fs') +const fsm = require('fs-minipass') +const path = require('path') +const mkdir = require('./mkdir.js') +const wc = require('./winchars.js') +const pathReservations = require('./path-reservations.js') +const stripAbsolutePath = require('./strip-absolute-path.js') +const normPath = require('./normalize-windows-path.js') +const stripSlash = require('./strip-trailing-slashes.js') +const normalize = require('./normalize-unicode.js') + +const ONENTRY = Symbol('onEntry') +const CHECKFS = Symbol('checkFs') +const CHECKFS2 = Symbol('checkFs2') +const PRUNECACHE = Symbol('pruneCache') +const ISREUSABLE = Symbol('isReusable') +const MAKEFS = Symbol('makeFs') +const FILE = Symbol('file') +const DIRECTORY = Symbol('directory') +const LINK = Symbol('link') +const SYMLINK = Symbol('symlink') +const HARDLINK = Symbol('hardlink') +const UNSUPPORTED = Symbol('unsupported') +const CHECKPATH = Symbol('checkPath') +const MKDIR = Symbol('mkdir') +const ONERROR = Symbol('onError') +const PENDING = Symbol('pending') +const PEND = Symbol('pend') +const UNPEND = Symbol('unpend') +const ENDED = Symbol('ended') +const MAYBECLOSE = Symbol('maybeClose') +const SKIP = Symbol('skip') +const DOCHOWN = Symbol('doChown') +const UID = Symbol('uid') +const GID = Symbol('gid') +const CHECKED_CWD = Symbol('checkedCwd') +const crypto = require('crypto') +const getFlag = require('./get-write-flag.js') +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform +const isWindows = platform === 'win32' +const DEFAULT_MAX_DEPTH = 1024 + +// Unlinks on Windows are not atomic. +// +// This means that if you have a file entry, followed by another +// file entry with an identical name, and you cannot re-use the file +// (because it's a hardlink, or because unlink:true is set, or it's +// Windows, which does not have useful nlink values), then the unlink +// will be committed to the disk AFTER the new file has been written +// over the old one, deleting the new file. +// +// To work around this, on Windows systems, we rename the file and then +// delete the renamed file. It's a sloppy kludge, but frankly, I do not +// know of a better way to do this, given windows' non-atomic unlink +// semantics. +// +// See: https://github.com/npm/node-tar/issues/183 +/* istanbul ignore next */ +const unlinkFile = (path, cb) => { + if (!isWindows) { + return fs.unlink(path, cb) + } + + const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') + fs.rename(path, name, er => { + if (er) { + return cb(er) + } + fs.unlink(name, cb) + }) +} + +/* istanbul ignore next */ +const unlinkFileSync = path => { + if (!isWindows) { + return fs.unlinkSync(path) + } + + const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') + fs.renameSync(path, name) + fs.unlinkSync(name) +} + +// this.gid, entry.gid, this.processUid +const uint32 = (a, b, c) => + a === a >>> 0 ? a + : b === b >>> 0 ? b + : c + +// clear the cache if it's a case-insensitive unicode-squashing match. +// we can't know if the current file system is case-sensitive or supports +// unicode fully, so we check for similarity on the maximally compatible +// representation. Err on the side of pruning, since all it's doing is +// preventing lstats, and it's not the end of the world if we get a false +// positive. +// Note that on windows, we always drop the entire cache whenever a +// symbolic link is encountered, because 8.3 filenames are impossible +// to reason about, and collisions are hazards rather than just failures. +const cacheKeyNormalize = path => stripSlash(normPath(normalize(path))) + .toLowerCase() + +const pruneCache = (cache, abs) => { + abs = cacheKeyNormalize(abs) + for (const path of cache.keys()) { + const pnorm = cacheKeyNormalize(path) + if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) { + cache.delete(path) + } + } +} + +const dropCache = cache => { + for (const key of cache.keys()) { + cache.delete(key) + } +} + +class Unpack extends Parser { + constructor (opt) { + if (!opt) { + opt = {} + } + + opt.ondone = _ => { + this[ENDED] = true + this[MAYBECLOSE]() + } + + super(opt) + + this[CHECKED_CWD] = false + + this.reservations = pathReservations() + + this.transform = typeof opt.transform === 'function' ? opt.transform : null + + this.writable = true + this.readable = false + + this[PENDING] = 0 + this[ENDED] = false + + this.dirCache = opt.dirCache || new Map() + + if (typeof opt.uid === 'number' || typeof opt.gid === 'number') { + // need both or neither + if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number') { + throw new TypeError('cannot set owner without number uid and gid') + } + if (opt.preserveOwner) { + throw new TypeError( + 'cannot preserve owner in archive and also set owner explicitly') + } + this.uid = opt.uid + this.gid = opt.gid + this.setOwner = true + } else { + this.uid = null + this.gid = null + this.setOwner = false + } + + // default true for root + if (opt.preserveOwner === undefined && typeof opt.uid !== 'number') { + this.preserveOwner = process.getuid && process.getuid() === 0 + } else { + this.preserveOwner = !!opt.preserveOwner + } + + this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ? + process.getuid() : null + this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ? + process.getgid() : null + + // prevent excessively deep nesting of subfolders + // set to `Infinity` to remove this restriction + this.maxDepth = typeof opt.maxDepth === 'number' + ? opt.maxDepth + : DEFAULT_MAX_DEPTH + + // mostly just for testing, but useful in some cases. + // Forcibly trigger a chown on every entry, no matter what + this.forceChown = opt.forceChown === true + + // turn > this[ONENTRY](entry)) + } + + // a bad or damaged archive is a warning for Parser, but an error + // when extracting. Mark those errors as unrecoverable, because + // the Unpack contract cannot be met. + warn (code, msg, data = {}) { + if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') { + data.recoverable = false + } + return super.warn(code, msg, data) + } + + [MAYBECLOSE] () { + if (this[ENDED] && this[PENDING] === 0) { + this.emit('prefinish') + this.emit('finish') + this.emit('end') + } + } + + [CHECKPATH] (entry) { + const p = normPath(entry.path) + const parts = p.split('/') + + if (this.strip) { + if (parts.length < this.strip) { + return false + } + if (entry.type === 'Link') { + const linkparts = normPath(entry.linkpath).split('/') + if (linkparts.length >= this.strip) { + entry.linkpath = linkparts.slice(this.strip).join('/') + } else { + return false + } + } + parts.splice(0, this.strip) + entry.path = parts.join('/') + } + + if (isFinite(this.maxDepth) && parts.length > this.maxDepth) { + this.warn('TAR_ENTRY_ERROR', 'path excessively deep', { + entry, + path: p, + depth: parts.length, + maxDepth: this.maxDepth, + }) + return false + } + + if (!this.preservePaths) { + if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) { + this.warn('TAR_ENTRY_ERROR', `path contains '..'`, { + entry, + path: p, + }) + return false + } + + // strip off the root + const [root, stripped] = stripAbsolutePath(p) + if (root) { + entry.path = stripped + this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, { + entry, + path: p, + }) + } + } + + if (path.isAbsolute(entry.path)) { + entry.absolute = normPath(path.resolve(entry.path)) + } else { + entry.absolute = normPath(path.resolve(this.cwd, entry.path)) + } + + // if we somehow ended up with a path that escapes the cwd, and we are + // not in preservePaths mode, then something is fishy! This should have + // been prevented above, so ignore this for coverage. + /* istanbul ignore if - defense in depth */ + if (!this.preservePaths && + entry.absolute.indexOf(this.cwd + '/') !== 0 && + entry.absolute !== this.cwd) { + this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', { + entry, + path: normPath(entry.path), + resolvedPath: entry.absolute, + cwd: this.cwd, + }) + return false + } + + // an archive can set properties on the extraction directory, but it + // may not replace the cwd with a different kind of thing entirely. + if (entry.absolute === this.cwd && + entry.type !== 'Directory' && + entry.type !== 'GNUDumpDir') { + return false + } + + // only encode : chars that aren't drive letter indicators + if (this.win32) { + const { root: aRoot } = path.win32.parse(entry.absolute) + entry.absolute = aRoot + wc.encode(entry.absolute.slice(aRoot.length)) + const { root: pRoot } = path.win32.parse(entry.path) + entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length)) + } + + return true + } + + [ONENTRY] (entry) { + if (!this[CHECKPATH](entry)) { + return entry.resume() + } + + assert.equal(typeof entry.absolute, 'string') + + switch (entry.type) { + case 'Directory': + case 'GNUDumpDir': + if (entry.mode) { + entry.mode = entry.mode | 0o700 + } + + // eslint-disable-next-line no-fallthrough + case 'File': + case 'OldFile': + case 'ContiguousFile': + case 'Link': + case 'SymbolicLink': + return this[CHECKFS](entry) + + case 'CharacterDevice': + case 'BlockDevice': + case 'FIFO': + default: + return this[UNSUPPORTED](entry) + } + } + + [ONERROR] (er, entry) { + // Cwd has to exist, or else nothing works. That's serious. + // Other errors are warnings, which raise the error in strict + // mode, but otherwise continue on. + if (er.name === 'CwdError') { + this.emit('error', er) + } else { + this.warn('TAR_ENTRY_ERROR', er, { entry }) + this[UNPEND]() + entry.resume() + } + } + + [MKDIR] (dir, mode, cb) { + mkdir(normPath(dir), { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode, + noChmod: this.noChmod, + }, cb) + } + + [DOCHOWN] (entry) { + // in preserve owner mode, chown if the entry doesn't match process + // in set owner mode, chown if setting doesn't match process + return this.forceChown || + this.preserveOwner && + (typeof entry.uid === 'number' && entry.uid !== this.processUid || + typeof entry.gid === 'number' && entry.gid !== this.processGid) + || + (typeof this.uid === 'number' && this.uid !== this.processUid || + typeof this.gid === 'number' && this.gid !== this.processGid) + } + + [UID] (entry) { + return uint32(this.uid, entry.uid, this.processUid) + } + + [GID] (entry) { + return uint32(this.gid, entry.gid, this.processGid) + } + + [FILE] (entry, fullyDone) { + const mode = entry.mode & 0o7777 || this.fmode + const stream = new fsm.WriteStream(entry.absolute, { + flags: getFlag(entry.size), + mode: mode, + autoClose: false, + }) + stream.on('error', er => { + if (stream.fd) { + fs.close(stream.fd, () => {}) + } + + // flush all the data out so that we aren't left hanging + // if the error wasn't actually fatal. otherwise the parse + // is blocked, and we never proceed. + stream.write = () => true + this[ONERROR](er, entry) + fullyDone() + }) + + let actions = 1 + const done = er => { + if (er) { + /* istanbul ignore else - we should always have a fd by now */ + if (stream.fd) { + fs.close(stream.fd, () => {}) + } + + this[ONERROR](er, entry) + fullyDone() + return + } + + if (--actions === 0) { + fs.close(stream.fd, er => { + if (er) { + this[ONERROR](er, entry) + } else { + this[UNPEND]() + } + fullyDone() + }) + } + } + + stream.on('finish', _ => { + // if futimes fails, try utimes + // if utimes fails, fail with the original error + // same for fchown/chown + const abs = entry.absolute + const fd = stream.fd + + if (entry.mtime && !this.noMtime) { + actions++ + const atime = entry.atime || new Date() + const mtime = entry.mtime + fs.futimes(fd, atime, mtime, er => + er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er)) + : done()) + } + + if (this[DOCHOWN](entry)) { + actions++ + const uid = this[UID](entry) + const gid = this[GID](entry) + fs.fchown(fd, uid, gid, er => + er ? fs.chown(abs, uid, gid, er2 => done(er2 && er)) + : done()) + } + + done() + }) + + const tx = this.transform ? this.transform(entry) || entry : entry + if (tx !== entry) { + tx.on('error', er => { + this[ONERROR](er, entry) + fullyDone() + }) + entry.pipe(tx) + } + tx.pipe(stream) + } + + [DIRECTORY] (entry, fullyDone) { + const mode = entry.mode & 0o7777 || this.dmode + this[MKDIR](entry.absolute, mode, er => { + if (er) { + this[ONERROR](er, entry) + fullyDone() + return + } + + let actions = 1 + const done = _ => { + if (--actions === 0) { + fullyDone() + this[UNPEND]() + entry.resume() + } + } + + if (entry.mtime && !this.noMtime) { + actions++ + fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done) + } + + if (this[DOCHOWN](entry)) { + actions++ + fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done) + } + + done() + }) + } + + [UNSUPPORTED] (entry) { + entry.unsupported = true + this.warn('TAR_ENTRY_UNSUPPORTED', + `unsupported entry type: ${entry.type}`, { entry }) + entry.resume() + } + + [SYMLINK] (entry, done) { + this[LINK](entry, entry.linkpath, 'symlink', done) + } + + [HARDLINK] (entry, done) { + const linkpath = normPath(path.resolve(this.cwd, entry.linkpath)) + this[LINK](entry, linkpath, 'link', done) + } + + [PEND] () { + this[PENDING]++ + } + + [UNPEND] () { + this[PENDING]-- + this[MAYBECLOSE]() + } + + [SKIP] (entry) { + this[UNPEND]() + entry.resume() + } + + // Check if we can reuse an existing filesystem entry safely and + // overwrite it, rather than unlinking and recreating + // Windows doesn't report a useful nlink, so we just never reuse entries + [ISREUSABLE] (entry, st) { + return entry.type === 'File' && + !this.unlink && + st.isFile() && + st.nlink <= 1 && + !isWindows + } + + // check if a thing is there, and if so, try to clobber it + [CHECKFS] (entry) { + this[PEND]() + const paths = [entry.path] + if (entry.linkpath) { + paths.push(entry.linkpath) + } + this.reservations.reserve(paths, done => this[CHECKFS2](entry, done)) + } + + [PRUNECACHE] (entry) { + // if we are not creating a directory, and the path is in the dirCache, + // then that means we are about to delete the directory we created + // previously, and it is no longer going to be a directory, and neither + // is any of its children. + // If a symbolic link is encountered, all bets are off. There is no + // reasonable way to sanitize the cache in such a way we will be able to + // avoid having filesystem collisions. If this happens with a non-symlink + // entry, it'll just fail to unpack, but a symlink to a directory, using an + // 8.3 shortname or certain unicode attacks, can evade detection and lead + // to arbitrary writes to anywhere on the system. + if (entry.type === 'SymbolicLink') { + dropCache(this.dirCache) + } else if (entry.type !== 'Directory') { + pruneCache(this.dirCache, entry.absolute) + } + } + + [CHECKFS2] (entry, fullyDone) { + this[PRUNECACHE](entry) + + const done = er => { + this[PRUNECACHE](entry) + fullyDone(er) + } + + const checkCwd = () => { + this[MKDIR](this.cwd, this.dmode, er => { + if (er) { + this[ONERROR](er, entry) + done() + return + } + this[CHECKED_CWD] = true + start() + }) + } + + const start = () => { + if (entry.absolute !== this.cwd) { + const parent = normPath(path.dirname(entry.absolute)) + if (parent !== this.cwd) { + return this[MKDIR](parent, this.dmode, er => { + if (er) { + this[ONERROR](er, entry) + done() + return + } + afterMakeParent() + }) + } + } + afterMakeParent() + } + + const afterMakeParent = () => { + fs.lstat(entry.absolute, (lstatEr, st) => { + if (st && (this.keep || this.newer && st.mtime > entry.mtime)) { + this[SKIP](entry) + done() + return + } + if (lstatEr || this[ISREUSABLE](entry, st)) { + return this[MAKEFS](null, entry, done) + } + + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = !this.noChmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode + const afterChmod = er => this[MAKEFS](er, entry, done) + if (!needChmod) { + return afterChmod() + } + return fs.chmod(entry.absolute, entry.mode, afterChmod) + } + // Not a dir entry, have to remove it. + // NB: the only way to end up with an entry that is the cwd + // itself, in such a way that == does not detect, is a + // tricky windows absolute path with UNC or 8.3 parts (and + // preservePaths:true, or else it will have been stripped). + // In that case, the user has opted out of path protections + // explicitly, so if they blow away the cwd, c'est la vie. + if (entry.absolute !== this.cwd) { + return fs.rmdir(entry.absolute, er => + this[MAKEFS](er, entry, done)) + } + } + + // not a dir, and not reusable + // don't remove if the cwd, we want that error + if (entry.absolute === this.cwd) { + return this[MAKEFS](null, entry, done) + } + + unlinkFile(entry.absolute, er => + this[MAKEFS](er, entry, done)) + }) + } + + if (this[CHECKED_CWD]) { + start() + } else { + checkCwd() + } + } + + [MAKEFS] (er, entry, done) { + if (er) { + this[ONERROR](er, entry) + done() + return + } + + switch (entry.type) { + case 'File': + case 'OldFile': + case 'ContiguousFile': + return this[FILE](entry, done) + + case 'Link': + return this[HARDLINK](entry, done) + + case 'SymbolicLink': + return this[SYMLINK](entry, done) + + case 'Directory': + case 'GNUDumpDir': + return this[DIRECTORY](entry, done) + } + } + + [LINK] (entry, linkpath, link, done) { + // XXX: get the type ('symlink' or 'junction') for windows + fs[link](linkpath, entry.absolute, er => { + if (er) { + this[ONERROR](er, entry) + } else { + this[UNPEND]() + entry.resume() + } + done() + }) + } +} + +const callSync = fn => { + try { + return [null, fn()] + } catch (er) { + return [er, null] + } +} +class UnpackSync extends Unpack { + [MAKEFS] (er, entry) { + return super[MAKEFS](er, entry, () => {}) + } + + [CHECKFS] (entry) { + this[PRUNECACHE](entry) + + if (!this[CHECKED_CWD]) { + const er = this[MKDIR](this.cwd, this.dmode) + if (er) { + return this[ONERROR](er, entry) + } + this[CHECKED_CWD] = true + } + + // don't bother to make the parent if the current entry is the cwd, + // we've already checked it. + if (entry.absolute !== this.cwd) { + const parent = normPath(path.dirname(entry.absolute)) + if (parent !== this.cwd) { + const mkParent = this[MKDIR](parent, this.dmode) + if (mkParent) { + return this[ONERROR](mkParent, entry) + } + } + } + + const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute)) + if (st && (this.keep || this.newer && st.mtime > entry.mtime)) { + return this[SKIP](entry) + } + + if (lstatEr || this[ISREUSABLE](entry, st)) { + return this[MAKEFS](null, entry) + } + + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = !this.noChmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode + const [er] = needChmod ? callSync(() => { + fs.chmodSync(entry.absolute, entry.mode) + }) : [] + return this[MAKEFS](er, entry) + } + // not a dir entry, have to remove it + const [er] = callSync(() => fs.rmdirSync(entry.absolute)) + this[MAKEFS](er, entry) + } + + // not a dir, and not reusable. + // don't remove if it's the cwd, since we want that error. + const [er] = entry.absolute === this.cwd ? [] + : callSync(() => unlinkFileSync(entry.absolute)) + this[MAKEFS](er, entry) + } + + [FILE] (entry, done) { + const mode = entry.mode & 0o7777 || this.fmode + + const oner = er => { + let closeError + try { + fs.closeSync(fd) + } catch (e) { + closeError = e + } + if (er || closeError) { + this[ONERROR](er || closeError, entry) + } + done() + } + + let fd + try { + fd = fs.openSync(entry.absolute, getFlag(entry.size), mode) + } catch (er) { + return oner(er) + } + const tx = this.transform ? this.transform(entry) || entry : entry + if (tx !== entry) { + tx.on('error', er => this[ONERROR](er, entry)) + entry.pipe(tx) + } + + tx.on('data', chunk => { + try { + fs.writeSync(fd, chunk, 0, chunk.length) + } catch (er) { + oner(er) + } + }) + + tx.on('end', _ => { + let er = null + // try both, falling futimes back to utimes + // if either fails, handle the first error + if (entry.mtime && !this.noMtime) { + const atime = entry.atime || new Date() + const mtime = entry.mtime + try { + fs.futimesSync(fd, atime, mtime) + } catch (futimeser) { + try { + fs.utimesSync(entry.absolute, atime, mtime) + } catch (utimeser) { + er = futimeser + } + } + } + + if (this[DOCHOWN](entry)) { + const uid = this[UID](entry) + const gid = this[GID](entry) + + try { + fs.fchownSync(fd, uid, gid) + } catch (fchowner) { + try { + fs.chownSync(entry.absolute, uid, gid) + } catch (chowner) { + er = er || fchowner + } + } + } + + oner(er) + }) + } + + [DIRECTORY] (entry, done) { + const mode = entry.mode & 0o7777 || this.dmode + const er = this[MKDIR](entry.absolute, mode) + if (er) { + this[ONERROR](er, entry) + done() + return + } + if (entry.mtime && !this.noMtime) { + try { + fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime) + } catch (er) {} + } + if (this[DOCHOWN](entry)) { + try { + fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry)) + } catch (er) {} + } + done() + entry.resume() + } + + [MKDIR] (dir, mode) { + try { + return mkdir.sync(normPath(dir), { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode, + }) + } catch (er) { + return er + } + } + + [LINK] (entry, linkpath, link, done) { + try { + fs[link + 'Sync'](linkpath, entry.absolute) + done() + entry.resume() + } catch (er) { + return this[ONERROR](er, entry) + } + } +} + +Unpack.Sync = UnpackSync +module.exports = Unpack diff --git a/node_modules/tar/lib/update.js b/node_modules/tar/lib/update.js new file mode 100644 index 0000000..4d32854 --- /dev/null +++ b/node_modules/tar/lib/update.js @@ -0,0 +1,40 @@ +'use strict' + +// tar -u + +const hlo = require('./high-level-opt.js') +const r = require('./replace.js') +// just call tar.r with the filter and mtimeCache + +module.exports = (opt_, files, cb) => { + const opt = hlo(opt_) + + if (!opt.file) { + throw new TypeError('file is required') + } + + if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) { + throw new TypeError('cannot append to compressed archives') + } + + if (!files || !Array.isArray(files) || !files.length) { + throw new TypeError('no files or directories specified') + } + + files = Array.from(files) + + mtimeFilter(opt) + return r(opt, files, cb) +} + +const mtimeFilter = opt => { + const filter = opt.filter + + if (!opt.mtimeCache) { + opt.mtimeCache = new Map() + } + + opt.filter = filter ? (path, stat) => + filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime) + : (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime) +} diff --git a/node_modules/tar/lib/warn-mixin.js b/node_modules/tar/lib/warn-mixin.js new file mode 100644 index 0000000..a940639 --- /dev/null +++ b/node_modules/tar/lib/warn-mixin.js @@ -0,0 +1,24 @@ +'use strict' +module.exports = Base => class extends Base { + warn (code, message, data = {}) { + if (this.file) { + data.file = this.file + } + if (this.cwd) { + data.cwd = this.cwd + } + data.code = message instanceof Error && message.code || code + data.tarCode = code + if (!this.strict && data.recoverable !== false) { + if (message instanceof Error) { + data = Object.assign(message, data) + message = message.message + } + this.emit('warn', data.tarCode, message, data) + } else if (message instanceof Error) { + this.emit('error', Object.assign(message, data)) + } else { + this.emit('error', Object.assign(new Error(`${code}: ${message}`), data)) + } + } +} diff --git a/node_modules/tar/lib/winchars.js b/node_modules/tar/lib/winchars.js new file mode 100644 index 0000000..ebcab4a --- /dev/null +++ b/node_modules/tar/lib/winchars.js @@ -0,0 +1,23 @@ +'use strict' + +// When writing files on Windows, translate the characters to their +// 0xf000 higher-encoded versions. + +const raw = [ + '|', + '<', + '>', + '?', + ':', +] + +const win = raw.map(char => + String.fromCharCode(0xf000 + char.charCodeAt(0))) + +const toWin = new Map(raw.map((char, i) => [char, win[i]])) +const toRaw = new Map(win.map((char, i) => [char, raw[i]])) + +module.exports = { + encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s), + decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s), +} diff --git a/node_modules/tar/lib/write-entry.js b/node_modules/tar/lib/write-entry.js new file mode 100644 index 0000000..7d2f3eb --- /dev/null +++ b/node_modules/tar/lib/write-entry.js @@ -0,0 +1,546 @@ +'use strict' +const { Minipass } = require('minipass') +const Pax = require('./pax.js') +const Header = require('./header.js') +const fs = require('fs') +const path = require('path') +const normPath = require('./normalize-windows-path.js') +const stripSlash = require('./strip-trailing-slashes.js') + +const prefixPath = (path, prefix) => { + if (!prefix) { + return normPath(path) + } + path = normPath(path).replace(/^\.(\/|$)/, '') + return stripSlash(prefix) + '/' + path +} + +const maxReadSize = 16 * 1024 * 1024 +const PROCESS = Symbol('process') +const FILE = Symbol('file') +const DIRECTORY = Symbol('directory') +const SYMLINK = Symbol('symlink') +const HARDLINK = Symbol('hardlink') +const HEADER = Symbol('header') +const READ = Symbol('read') +const LSTAT = Symbol('lstat') +const ONLSTAT = Symbol('onlstat') +const ONREAD = Symbol('onread') +const ONREADLINK = Symbol('onreadlink') +const OPENFILE = Symbol('openfile') +const ONOPENFILE = Symbol('onopenfile') +const CLOSE = Symbol('close') +const MODE = Symbol('mode') +const AWAITDRAIN = Symbol('awaitDrain') +const ONDRAIN = Symbol('ondrain') +const PREFIX = Symbol('prefix') +const HAD_ERROR = Symbol('hadError') +const warner = require('./warn-mixin.js') +const winchars = require('./winchars.js') +const stripAbsolutePath = require('./strip-absolute-path.js') + +const modeFix = require('./mode-fix.js') + +const WriteEntry = warner(class WriteEntry extends Minipass { + constructor (p, opt) { + opt = opt || {} + super(opt) + if (typeof p !== 'string') { + throw new TypeError('path is required') + } + this.path = normPath(p) + // suppress atime, ctime, uid, gid, uname, gname + this.portable = !!opt.portable + // until node has builtin pwnam functions, this'll have to do + this.myuid = process.getuid && process.getuid() || 0 + this.myuser = process.env.USER || '' + this.maxReadSize = opt.maxReadSize || maxReadSize + this.linkCache = opt.linkCache || new Map() + this.statCache = opt.statCache || new Map() + this.preservePaths = !!opt.preservePaths + this.cwd = normPath(opt.cwd || process.cwd()) + this.strict = !!opt.strict + this.noPax = !!opt.noPax + this.noMtime = !!opt.noMtime + this.mtime = opt.mtime || null + this.prefix = opt.prefix ? normPath(opt.prefix) : null + + this.fd = null + this.blockLen = null + this.blockRemain = null + this.buf = null + this.offset = null + this.length = null + this.pos = null + this.remain = null + + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn) + } + + let pathWarn = false + if (!this.preservePaths) { + const [root, stripped] = stripAbsolutePath(this.path) + if (root) { + this.path = stripped + pathWarn = root + } + } + + this.win32 = !!opt.win32 || process.platform === 'win32' + if (this.win32) { + // force the \ to / normalization, since we might not *actually* + // be on windows, but want \ to be considered a path separator. + this.path = winchars.decode(this.path.replace(/\\/g, '/')) + p = p.replace(/\\/g, '/') + } + + this.absolute = normPath(opt.absolute || path.resolve(this.cwd, p)) + + if (this.path === '') { + this.path = './' + } + + if (pathWarn) { + this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { + entry: this, + path: pathWarn + this.path, + }) + } + + if (this.statCache.has(this.absolute)) { + this[ONLSTAT](this.statCache.get(this.absolute)) + } else { + this[LSTAT]() + } + } + + emit (ev, ...data) { + if (ev === 'error') { + this[HAD_ERROR] = true + } + return super.emit(ev, ...data) + } + + [LSTAT] () { + fs.lstat(this.absolute, (er, stat) => { + if (er) { + return this.emit('error', er) + } + this[ONLSTAT](stat) + }) + } + + [ONLSTAT] (stat) { + this.statCache.set(this.absolute, stat) + this.stat = stat + if (!stat.isFile()) { + stat.size = 0 + } + this.type = getType(stat) + this.emit('stat', stat) + this[PROCESS]() + } + + [PROCESS] () { + switch (this.type) { + case 'File': return this[FILE]() + case 'Directory': return this[DIRECTORY]() + case 'SymbolicLink': return this[SYMLINK]() + // unsupported types are ignored. + default: return this.end() + } + } + + [MODE] (mode) { + return modeFix(mode, this.type === 'Directory', this.portable) + } + + [PREFIX] (path) { + return prefixPath(path, this.prefix) + } + + [HEADER] () { + if (this.type === 'Directory' && this.portable) { + this.noMtime = true + } + + this.header = new Header({ + path: this[PREFIX](this.path), + // only apply the prefix to hard links. + linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this[MODE](this.stat.mode), + uid: this.portable ? null : this.stat.uid, + gid: this.portable ? null : this.stat.gid, + size: this.stat.size, + mtime: this.noMtime ? null : this.mtime || this.stat.mtime, + type: this.type, + uname: this.portable ? null : + this.stat.uid === this.myuid ? this.myuser : '', + atime: this.portable ? null : this.stat.atime, + ctime: this.portable ? null : this.stat.ctime, + }) + + if (this.header.encode() && !this.noPax) { + super.write(new Pax({ + atime: this.portable ? null : this.header.atime, + ctime: this.portable ? null : this.header.ctime, + gid: this.portable ? null : this.header.gid, + mtime: this.noMtime ? null : this.mtime || this.header.mtime, + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) + : this.linkpath, + size: this.header.size, + uid: this.portable ? null : this.header.uid, + uname: this.portable ? null : this.header.uname, + dev: this.portable ? null : this.stat.dev, + ino: this.portable ? null : this.stat.ino, + nlink: this.portable ? null : this.stat.nlink, + }).encode()) + } + super.write(this.header.block) + } + + [DIRECTORY] () { + if (this.path.slice(-1) !== '/') { + this.path += '/' + } + this.stat.size = 0 + this[HEADER]() + this.end() + } + + [SYMLINK] () { + fs.readlink(this.absolute, (er, linkpath) => { + if (er) { + return this.emit('error', er) + } + this[ONREADLINK](linkpath) + }) + } + + [ONREADLINK] (linkpath) { + this.linkpath = normPath(linkpath) + this[HEADER]() + this.end() + } + + [HARDLINK] (linkpath) { + this.type = 'Link' + this.linkpath = normPath(path.relative(this.cwd, linkpath)) + this.stat.size = 0 + this[HEADER]() + this.end() + } + + [FILE] () { + if (this.stat.nlink > 1) { + const linkKey = this.stat.dev + ':' + this.stat.ino + if (this.linkCache.has(linkKey)) { + const linkpath = this.linkCache.get(linkKey) + if (linkpath.indexOf(this.cwd) === 0) { + return this[HARDLINK](linkpath) + } + } + this.linkCache.set(linkKey, this.absolute) + } + + this[HEADER]() + if (this.stat.size === 0) { + return this.end() + } + + this[OPENFILE]() + } + + [OPENFILE] () { + fs.open(this.absolute, 'r', (er, fd) => { + if (er) { + return this.emit('error', er) + } + this[ONOPENFILE](fd) + }) + } + + [ONOPENFILE] (fd) { + this.fd = fd + if (this[HAD_ERROR]) { + return this[CLOSE]() + } + + this.blockLen = 512 * Math.ceil(this.stat.size / 512) + this.blockRemain = this.blockLen + const bufLen = Math.min(this.blockLen, this.maxReadSize) + this.buf = Buffer.allocUnsafe(bufLen) + this.offset = 0 + this.pos = 0 + this.remain = this.stat.size + this.length = this.buf.length + this[READ]() + } + + [READ] () { + const { fd, buf, offset, length, pos } = this + fs.read(fd, buf, offset, length, pos, (er, bytesRead) => { + if (er) { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + return this[CLOSE](() => this.emit('error', er)) + } + this[ONREAD](bytesRead) + }) + } + + [CLOSE] (cb) { + fs.close(this.fd, cb) + } + + [ONREAD] (bytesRead) { + if (bytesRead <= 0 && this.remain > 0) { + const er = new Error('encountered unexpected EOF') + er.path = this.absolute + er.syscall = 'read' + er.code = 'EOF' + return this[CLOSE](() => this.emit('error', er)) + } + + if (bytesRead > this.remain) { + const er = new Error('did not encounter expected EOF') + er.path = this.absolute + er.syscall = 'read' + er.code = 'EOF' + return this[CLOSE](() => this.emit('error', er)) + } + + // null out the rest of the buffer, if we could fit the block padding + // at the end of this loop, we've incremented bytesRead and this.remain + // to be incremented up to the blockRemain level, as if we had expected + // to get a null-padded file, and read it until the end. then we will + // decrement both remain and blockRemain by bytesRead, and know that we + // reached the expected EOF, without any null buffer to append. + if (bytesRead === this.remain) { + for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) { + this.buf[i + this.offset] = 0 + bytesRead++ + this.remain++ + } + } + + const writeBuf = this.offset === 0 && bytesRead === this.buf.length ? + this.buf : this.buf.slice(this.offset, this.offset + bytesRead) + + const flushed = this.write(writeBuf) + if (!flushed) { + this[AWAITDRAIN](() => this[ONDRAIN]()) + } else { + this[ONDRAIN]() + } + } + + [AWAITDRAIN] (cb) { + this.once('drain', cb) + } + + write (writeBuf) { + if (this.blockRemain < writeBuf.length) { + const er = new Error('writing more data than expected') + er.path = this.absolute + return this.emit('error', er) + } + this.remain -= writeBuf.length + this.blockRemain -= writeBuf.length + this.pos += writeBuf.length + this.offset += writeBuf.length + return super.write(writeBuf) + } + + [ONDRAIN] () { + if (!this.remain) { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)) + } + return this[CLOSE](er => er ? this.emit('error', er) : this.end()) + } + + if (this.offset >= this.length) { + // if we only have a smaller bit left to read, alloc a smaller buffer + // otherwise, keep it the same length it was before. + this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length)) + this.offset = 0 + } + this.length = this.buf.length - this.offset + this[READ]() + } +}) + +class WriteEntrySync extends WriteEntry { + [LSTAT] () { + this[ONLSTAT](fs.lstatSync(this.absolute)) + } + + [SYMLINK] () { + this[ONREADLINK](fs.readlinkSync(this.absolute)) + } + + [OPENFILE] () { + this[ONOPENFILE](fs.openSync(this.absolute, 'r')) + } + + [READ] () { + let threw = true + try { + const { fd, buf, offset, length, pos } = this + const bytesRead = fs.readSync(fd, buf, offset, length, pos) + this[ONREAD](bytesRead) + threw = false + } finally { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + if (threw) { + try { + this[CLOSE](() => {}) + } catch (er) {} + } + } + } + + [AWAITDRAIN] (cb) { + cb() + } + + [CLOSE] (cb) { + fs.closeSync(this.fd) + cb() + } +} + +const WriteEntryTar = warner(class WriteEntryTar extends Minipass { + constructor (readEntry, opt) { + opt = opt || {} + super(opt) + this.preservePaths = !!opt.preservePaths + this.portable = !!opt.portable + this.strict = !!opt.strict + this.noPax = !!opt.noPax + this.noMtime = !!opt.noMtime + + this.readEntry = readEntry + this.type = readEntry.type + if (this.type === 'Directory' && this.portable) { + this.noMtime = true + } + + this.prefix = opt.prefix || null + + this.path = normPath(readEntry.path) + this.mode = this[MODE](readEntry.mode) + this.uid = this.portable ? null : readEntry.uid + this.gid = this.portable ? null : readEntry.gid + this.uname = this.portable ? null : readEntry.uname + this.gname = this.portable ? null : readEntry.gname + this.size = readEntry.size + this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime + this.atime = this.portable ? null : readEntry.atime + this.ctime = this.portable ? null : readEntry.ctime + this.linkpath = normPath(readEntry.linkpath) + + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn) + } + + let pathWarn = false + if (!this.preservePaths) { + const [root, stripped] = stripAbsolutePath(this.path) + if (root) { + this.path = stripped + pathWarn = root + } + } + + this.remain = readEntry.size + this.blockRemain = readEntry.startBlockSize + + this.header = new Header({ + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this.mode, + uid: this.portable ? null : this.uid, + gid: this.portable ? null : this.gid, + size: this.size, + mtime: this.noMtime ? null : this.mtime, + type: this.type, + uname: this.portable ? null : this.uname, + atime: this.portable ? null : this.atime, + ctime: this.portable ? null : this.ctime, + }) + + if (pathWarn) { + this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { + entry: this, + path: pathWarn + this.path, + }) + } + + if (this.header.encode() && !this.noPax) { + super.write(new Pax({ + atime: this.portable ? null : this.atime, + ctime: this.portable ? null : this.ctime, + gid: this.portable ? null : this.gid, + mtime: this.noMtime ? null : this.mtime, + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath) + : this.linkpath, + size: this.size, + uid: this.portable ? null : this.uid, + uname: this.portable ? null : this.uname, + dev: this.portable ? null : this.readEntry.dev, + ino: this.portable ? null : this.readEntry.ino, + nlink: this.portable ? null : this.readEntry.nlink, + }).encode()) + } + + super.write(this.header.block) + readEntry.pipe(this) + } + + [PREFIX] (path) { + return prefixPath(path, this.prefix) + } + + [MODE] (mode) { + return modeFix(mode, this.type === 'Directory', this.portable) + } + + write (data) { + const writeLen = data.length + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate') + } + this.blockRemain -= writeLen + return super.write(data) + } + + end () { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)) + } + return super.end() + } +}) + +WriteEntry.Sync = WriteEntrySync +WriteEntry.Tar = WriteEntryTar + +const getType = stat => + stat.isFile() ? 'File' + : stat.isDirectory() ? 'Directory' + : stat.isSymbolicLink() ? 'SymbolicLink' + : 'Unsupported' + +module.exports = WriteEntry diff --git a/node_modules/tar/package.json b/node_modules/tar/package.json new file mode 100644 index 0000000..f84a41c --- /dev/null +++ b/node_modules/tar/package.json @@ -0,0 +1,70 @@ +{ + "author": "GitHub Inc.", + "name": "tar", + "description": "tar for node", + "version": "6.2.1", + "repository": { + "type": "git", + "url": "https://github.com/isaacs/node-tar.git" + }, + "scripts": { + "genparse": "node scripts/generate-parse-fixtures.js", + "snap": "tap", + "test": "tap" + }, + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.11.0", + "chmodr": "^1.2.0", + "end-of-stream": "^1.4.3", + "events-to-array": "^2.0.3", + "mutate-fs": "^2.1.1", + "nock": "^13.2.9", + "rimraf": "^3.0.2", + "tap": "^16.0.1" + }, + "license": "ISC", + "engines": { + "node": ">=10" + }, + "files": [ + "bin/", + "lib/", + "index.js" + ], + "tap": { + "coverage-map": "map.js", + "timeout": 0, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.11.0", + "content": "scripts/template-oss", + "engines": ">=10", + "distPaths": [ + "index.js" + ], + "allowPaths": [ + "/index.js" + ], + "ciVersions": [ + "10.x", + "12.x", + "14.x", + "16.x", + "18.x" + ] + } +} diff --git a/node_modules/tr46/.npmignore b/node_modules/tr46/.npmignore new file mode 100644 index 0000000..96e9161 --- /dev/null +++ b/node_modules/tr46/.npmignore @@ -0,0 +1,4 @@ +scripts/ +test/ + +!lib/mapping_table.json diff --git a/node_modules/tr46/index.js b/node_modules/tr46/index.js new file mode 100644 index 0000000..9ce12ca --- /dev/null +++ b/node_modules/tr46/index.js @@ -0,0 +1,193 @@ +"use strict"; + +var punycode = require("punycode"); +var mappingTable = require("./lib/mappingTable.json"); + +var PROCESSING_OPTIONS = { + TRANSITIONAL: 0, + NONTRANSITIONAL: 1 +}; + +function normalize(str) { // fix bug in v8 + return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); +} + +function findStatus(val) { + var start = 0; + var end = mappingTable.length - 1; + + while (start <= end) { + var mid = Math.floor((start + end) / 2); + + var target = mappingTable[mid]; + if (target[0][0] <= val && target[0][1] >= val) { + return target; + } else if (target[0][0] > val) { + end = mid - 1; + } else { + start = mid + 1; + } + } + + return null; +} + +var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; + +function countSymbols(string) { + return string + // replace every surrogate pair with a BMP symbol + .replace(regexAstralSymbols, '_') + // then get the length + .length; +} + +function mapChars(domain_name, useSTD3, processing_option) { + var hasError = false; + var processed = ""; + + var len = countSymbols(domain_name); + for (var i = 0; i < len; ++i) { + var codePoint = domain_name.codePointAt(i); + var status = findStatus(codePoint); + + switch (status[1]) { + case "disallowed": + hasError = true; + processed += String.fromCodePoint(codePoint); + break; + case "ignored": + break; + case "mapped": + processed += String.fromCodePoint.apply(String, status[2]); + break; + case "deviation": + if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { + processed += String.fromCodePoint.apply(String, status[2]); + } else { + processed += String.fromCodePoint(codePoint); + } + break; + case "valid": + processed += String.fromCodePoint(codePoint); + break; + case "disallowed_STD3_mapped": + if (useSTD3) { + hasError = true; + processed += String.fromCodePoint(codePoint); + } else { + processed += String.fromCodePoint.apply(String, status[2]); + } + break; + case "disallowed_STD3_valid": + if (useSTD3) { + hasError = true; + } + + processed += String.fromCodePoint(codePoint); + break; + } + } + + return { + string: processed, + error: hasError + }; +} + +var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; + +function validateLabel(label, processing_option) { + if (label.substr(0, 4) === "xn--") { + label = punycode.toUnicode(label); + processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; + } + + var error = false; + + if (normalize(label) !== label || + (label[3] === "-" && label[4] === "-") || + label[0] === "-" || label[label.length - 1] === "-" || + label.indexOf(".") !== -1 || + label.search(combiningMarksRegex) === 0) { + error = true; + } + + var len = countSymbols(label); + for (var i = 0; i < len; ++i) { + var status = findStatus(label.codePointAt(i)); + if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || + (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && + status[1] !== "valid" && status[1] !== "deviation")) { + error = true; + break; + } + } + + return { + label: label, + error: error + }; +} + +function processing(domain_name, useSTD3, processing_option) { + var result = mapChars(domain_name, useSTD3, processing_option); + result.string = normalize(result.string); + + var labels = result.string.split("."); + for (var i = 0; i < labels.length; ++i) { + try { + var validation = validateLabel(labels[i]); + labels[i] = validation.label; + result.error = result.error || validation.error; + } catch(e) { + result.error = true; + } + } + + return { + string: labels.join("."), + error: result.error + }; +} + +module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { + var result = processing(domain_name, useSTD3, processing_option); + var labels = result.string.split("."); + labels = labels.map(function(l) { + try { + return punycode.toASCII(l); + } catch(e) { + result.error = true; + return l; + } + }); + + if (verifyDnsLength) { + var total = labels.slice(0, labels.length - 1).join(".").length; + if (total.length > 253 || total.length === 0) { + result.error = true; + } + + for (var i=0; i < labels.length; ++i) { + if (labels.length > 63 || labels.length === 0) { + result.error = true; + break; + } + } + } + + if (result.error) return null; + return labels.join("."); +}; + +module.exports.toUnicode = function(domain_name, useSTD3) { + var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); + + return { + domain: result.string, + error: result.error + }; +}; + +module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; diff --git a/node_modules/tr46/lib/.gitkeep b/node_modules/tr46/lib/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/node_modules/tr46/lib/mappingTable.json b/node_modules/tr46/lib/mappingTable.json new file mode 100644 index 0000000..89cf19a --- /dev/null +++ b/node_modules/tr46/lib/mappingTable.json @@ -0,0 +1 @@ +[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"],[[47,47],"disallowed_STD3_valid"],[[48,57],"valid"],[[58,64],"disallowed_STD3_valid"],[[65,65],"mapped",[97]],[[66,66],"mapped",[98]],[[67,67],"mapped",[99]],[[68,68],"mapped",[100]],[[69,69],"mapped",[101]],[[70,70],"mapped",[102]],[[71,71],"mapped",[103]],[[72,72],"mapped",[104]],[[73,73],"mapped",[105]],[[74,74],"mapped",[106]],[[75,75],"mapped",[107]],[[76,76],"mapped",[108]],[[77,77],"mapped",[109]],[[78,78],"mapped",[110]],[[79,79],"mapped",[111]],[[80,80],"mapped",[112]],[[81,81],"mapped",[113]],[[82,82],"mapped",[114]],[[83,83],"mapped",[115]],[[84,84],"mapped",[116]],[[85,85],"mapped",[117]],[[86,86],"mapped",[118]],[[87,87],"mapped",[119]],[[88,88],"mapped",[120]],[[89,89],"mapped",[121]],[[90,90],"mapped",[122]],[[91,96],"disallowed_STD3_valid"],[[97,122],"valid"],[[123,127],"disallowed_STD3_valid"],[[128,159],"disallowed"],[[160,160],"disallowed_STD3_mapped",[32]],[[161,167],"valid",[],"NV8"],[[168,168],"disallowed_STD3_mapped",[32,776]],[[169,169],"valid",[],"NV8"],[[170,170],"mapped",[97]],[[171,172],"valid",[],"NV8"],[[173,173],"ignored"],[[174,174],"valid",[],"NV8"],[[175,175],"disallowed_STD3_mapped",[32,772]],[[176,177],"valid",[],"NV8"],[[178,178],"mapped",[50]],[[179,179],"mapped",[51]],[[180,180],"disallowed_STD3_mapped",[32,769]],[[181,181],"mapped",[956]],[[182,182],"valid",[],"NV8"],[[183,183],"valid"],[[184,184],"disallowed_STD3_mapped",[32,807]],[[185,185],"mapped",[49]],[[186,186],"mapped",[111]],[[187,187],"valid",[],"NV8"],[[188,188],"mapped",[49,8260,52]],[[189,189],"mapped",[49,8260,50]],[[190,190],"mapped",[51,8260,52]],[[191,191],"valid",[],"NV8"],[[192,192],"mapped",[224]],[[193,193],"mapped",[225]],[[194,194],"mapped",[226]],[[195,195],"mapped",[227]],[[196,196],"mapped",[228]],[[197,197],"mapped",[229]],[[198,198],"mapped",[230]],[[199,199],"mapped",[231]],[[200,200],"mapped",[232]],[[201,201],"mapped",[233]],[[202,202],"mapped",[234]],[[203,203],"mapped",[235]],[[204,204],"mapped",[236]],[[205,205],"mapped",[237]],[[206,206],"mapped",[238]],[[207,207],"mapped",[239]],[[208,208],"mapped",[240]],[[209,209],"mapped",[241]],[[210,210],"mapped",[242]],[[211,211],"mapped",[243]],[[212,212],"mapped",[244]],[[213,213],"mapped",[245]],[[214,214],"mapped",[246]],[[215,215],"valid",[],"NV8"],[[216,216],"mapped",[248]],[[217,217],"mapped",[249]],[[218,218],"mapped",[250]],[[219,219],"mapped",[251]],[[220,220],"mapped",[252]],[[221,221],"mapped",[253]],[[222,222],"mapped",[254]],[[223,223],"deviation",[115,115]],[[224,246],"valid"],[[247,247],"valid",[],"NV8"],[[248,255],"valid"],[[256,256],"mapped",[257]],[[257,257],"valid"],[[258,258],"mapped",[259]],[[259,259],"valid"],[[260,260],"mapped",[261]],[[261,261],"valid"],[[262,262],"mapped",[263]],[[263,263],"valid"],[[264,264],"mapped",[265]],[[265,265],"valid"],[[266,266],"mapped",[267]],[[267,267],"valid"],[[268,268],"mapped",[269]],[[269,269],"valid"],[[270,270],"mapped",[271]],[[271,271],"valid"],[[272,272],"mapped",[273]],[[273,273],"valid"],[[274,274],"mapped",[275]],[[275,275],"valid"],[[276,276],"mapped",[277]],[[277,277],"valid"],[[278,278],"mapped",[279]],[[279,279],"valid"],[[280,280],"mapped",[281]],[[281,281],"valid"],[[282,282],"mapped",[283]],[[283,283],"valid"],[[284,284],"mapped",[285]],[[285,285],"valid"],[[286,286],"mapped",[287]],[[287,287],"valid"],[[288,288],"mapped",[289]],[[289,289],"valid"],[[290,290],"mapped",[291]],[[291,291],"valid"],[[292,292],"mapped",[293]],[[293,293],"valid"],[[294,294],"mapped",[295]],[[295,295],"valid"],[[296,296],"mapped",[297]],[[297,297],"valid"],[[298,298],"mapped",[299]],[[299,299],"valid"],[[300,300],"mapped",[301]],[[301,301],"valid"],[[302,302],"mapped",[303]],[[303,303],"valid"],[[304,304],"mapped",[105,775]],[[305,305],"valid"],[[306,307],"mapped",[105,106]],[[308,308],"mapped",[309]],[[309,309],"valid"],[[310,310],"mapped",[311]],[[311,312],"valid"],[[313,313],"mapped",[314]],[[314,314],"valid"],[[315,315],"mapped",[316]],[[316,316],"valid"],[[317,317],"mapped",[318]],[[318,318],"valid"],[[319,320],"mapped",[108,183]],[[321,321],"mapped",[322]],[[322,322],"valid"],[[323,323],"mapped",[324]],[[324,324],"valid"],[[325,325],"mapped",[326]],[[326,326],"valid"],[[327,327],"mapped",[328]],[[328,328],"valid"],[[329,329],"mapped",[700,110]],[[330,330],"mapped",[331]],[[331,331],"valid"],[[332,332],"mapped",[333]],[[333,333],"valid"],[[334,334],"mapped",[335]],[[335,335],"valid"],[[336,336],"mapped",[337]],[[337,337],"valid"],[[338,338],"mapped",[339]],[[339,339],"valid"],[[340,340],"mapped",[341]],[[341,341],"valid"],[[342,342],"mapped",[343]],[[343,343],"valid"],[[344,344],"mapped",[345]],[[345,345],"valid"],[[346,346],"mapped",[347]],[[347,347],"valid"],[[348,348],"mapped",[349]],[[349,349],"valid"],[[350,350],"mapped",[351]],[[351,351],"valid"],[[352,352],"mapped",[353]],[[353,353],"valid"],[[354,354],"mapped",[355]],[[355,355],"valid"],[[356,356],"mapped",[357]],[[357,357],"valid"],[[358,358],"mapped",[359]],[[359,359],"valid"],[[360,360],"mapped",[361]],[[361,361],"valid"],[[362,362],"mapped",[363]],[[363,363],"valid"],[[364,364],"mapped",[365]],[[365,365],"valid"],[[366,366],"mapped",[367]],[[367,367],"valid"],[[368,368],"mapped",[369]],[[369,369],"valid"],[[370,370],"mapped",[371]],[[371,371],"valid"],[[372,372],"mapped",[373]],[[373,373],"valid"],[[374,374],"mapped",[375]],[[375,375],"valid"],[[376,376],"mapped",[255]],[[377,377],"mapped",[378]],[[378,378],"valid"],[[379,379],"mapped",[380]],[[380,380],"valid"],[[381,381],"mapped",[382]],[[382,382],"valid"],[[383,383],"mapped",[115]],[[384,384],"valid"],[[385,385],"mapped",[595]],[[386,386],"mapped",[387]],[[387,387],"valid"],[[388,388],"mapped",[389]],[[389,389],"valid"],[[390,390],"mapped",[596]],[[391,391],"mapped",[392]],[[392,392],"valid"],[[393,393],"mapped",[598]],[[394,394],"mapped",[599]],[[395,395],"mapped",[396]],[[396,397],"valid"],[[398,398],"mapped",[477]],[[399,399],"mapped",[601]],[[400,400],"mapped",[603]],[[401,401],"mapped",[402]],[[402,402],"valid"],[[403,403],"mapped",[608]],[[404,404],"mapped",[611]],[[405,405],"valid"],[[406,406],"mapped",[617]],[[407,407],"mapped",[616]],[[408,408],"mapped",[409]],[[409,411],"valid"],[[412,412],"mapped",[623]],[[413,413],"mapped",[626]],[[414,414],"valid"],[[415,415],"mapped",[629]],[[416,416],"mapped",[417]],[[417,417],"valid"],[[418,418],"mapped",[419]],[[419,419],"valid"],[[420,420],"mapped",[421]],[[421,421],"valid"],[[422,422],"mapped",[640]],[[423,423],"mapped",[424]],[[424,424],"valid"],[[425,425],"mapped",[643]],[[426,427],"valid"],[[428,428],"mapped",[429]],[[429,429],"valid"],[[430,430],"mapped",[648]],[[431,431],"mapped",[432]],[[432,432],"valid"],[[433,433],"mapped",[650]],[[434,434],"mapped",[651]],[[435,435],"mapped",[436]],[[436,436],"valid"],[[437,437],"mapped",[438]],[[438,438],"valid"],[[439,439],"mapped",[658]],[[440,440],"mapped",[441]],[[441,443],"valid"],[[444,444],"mapped",[445]],[[445,451],"valid"],[[452,454],"mapped",[100,382]],[[455,457],"mapped",[108,106]],[[458,460],"mapped",[110,106]],[[461,461],"mapped",[462]],[[462,462],"valid"],[[463,463],"mapped",[464]],[[464,464],"valid"],[[465,465],"mapped",[466]],[[466,466],"valid"],[[467,467],"mapped",[468]],[[468,468],"valid"],[[469,469],"mapped",[470]],[[470,470],"valid"],[[471,471],"mapped",[472]],[[472,472],"valid"],[[473,473],"mapped",[474]],[[474,474],"valid"],[[475,475],"mapped",[476]],[[476,477],"valid"],[[478,478],"mapped",[479]],[[479,479],"valid"],[[480,480],"mapped",[481]],[[481,481],"valid"],[[482,482],"mapped",[483]],[[483,483],"valid"],[[484,484],"mapped",[485]],[[485,485],"valid"],[[486,486],"mapped",[487]],[[487,487],"valid"],[[488,488],"mapped",[489]],[[489,489],"valid"],[[490,490],"mapped",[491]],[[491,491],"valid"],[[492,492],"mapped",[493]],[[493,493],"valid"],[[494,494],"mapped",[495]],[[495,496],"valid"],[[497,499],"mapped",[100,122]],[[500,500],"mapped",[501]],[[501,501],"valid"],[[502,502],"mapped",[405]],[[503,503],"mapped",[447]],[[504,504],"mapped",[505]],[[505,505],"valid"],[[506,506],"mapped",[507]],[[507,507],"valid"],[[508,508],"mapped",[509]],[[509,509],"valid"],[[510,510],"mapped",[511]],[[511,511],"valid"],[[512,512],"mapped",[513]],[[513,513],"valid"],[[514,514],"mapped",[515]],[[515,515],"valid"],[[516,516],"mapped",[517]],[[517,517],"valid"],[[518,518],"mapped",[519]],[[519,519],"valid"],[[520,520],"mapped",[521]],[[521,521],"valid"],[[522,522],"mapped",[523]],[[523,523],"valid"],[[524,524],"mapped",[525]],[[525,525],"valid"],[[526,526],"mapped",[527]],[[527,527],"valid"],[[528,528],"mapped",[529]],[[529,529],"valid"],[[530,530],"mapped",[531]],[[531,531],"valid"],[[532,532],"mapped",[533]],[[533,533],"valid"],[[534,534],"mapped",[535]],[[535,535],"valid"],[[536,536],"mapped",[537]],[[537,537],"valid"],[[538,538],"mapped",[539]],[[539,539],"valid"],[[540,540],"mapped",[541]],[[541,541],"valid"],[[542,542],"mapped",[543]],[[543,543],"valid"],[[544,544],"mapped",[414]],[[545,545],"valid"],[[546,546],"mapped",[547]],[[547,547],"valid"],[[548,548],"mapped",[549]],[[549,549],"valid"],[[550,550],"mapped",[551]],[[551,551],"valid"],[[552,552],"mapped",[553]],[[553,553],"valid"],[[554,554],"mapped",[555]],[[555,555],"valid"],[[556,556],"mapped",[557]],[[557,557],"valid"],[[558,558],"mapped",[559]],[[559,559],"valid"],[[560,560],"mapped",[561]],[[561,561],"valid"],[[562,562],"mapped",[563]],[[563,563],"valid"],[[564,566],"valid"],[[567,569],"valid"],[[570,570],"mapped",[11365]],[[571,571],"mapped",[572]],[[572,572],"valid"],[[573,573],"mapped",[410]],[[574,574],"mapped",[11366]],[[575,576],"valid"],[[577,577],"mapped",[578]],[[578,578],"valid"],[[579,579],"mapped",[384]],[[580,580],"mapped",[649]],[[581,581],"mapped",[652]],[[582,582],"mapped",[583]],[[583,583],"valid"],[[584,584],"mapped",[585]],[[585,585],"valid"],[[586,586],"mapped",[587]],[[587,587],"valid"],[[588,588],"mapped",[589]],[[589,589],"valid"],[[590,590],"mapped",[591]],[[591,591],"valid"],[[592,680],"valid"],[[681,685],"valid"],[[686,687],"valid"],[[688,688],"mapped",[104]],[[689,689],"mapped",[614]],[[690,690],"mapped",[106]],[[691,691],"mapped",[114]],[[692,692],"mapped",[633]],[[693,693],"mapped",[635]],[[694,694],"mapped",[641]],[[695,695],"mapped",[119]],[[696,696],"mapped",[121]],[[697,705],"valid"],[[706,709],"valid",[],"NV8"],[[710,721],"valid"],[[722,727],"valid",[],"NV8"],[[728,728],"disallowed_STD3_mapped",[32,774]],[[729,729],"disallowed_STD3_mapped",[32,775]],[[730,730],"disallowed_STD3_mapped",[32,778]],[[731,731],"disallowed_STD3_mapped",[32,808]],[[732,732],"disallowed_STD3_mapped",[32,771]],[[733,733],"disallowed_STD3_mapped",[32,779]],[[734,734],"valid",[],"NV8"],[[735,735],"valid",[],"NV8"],[[736,736],"mapped",[611]],[[737,737],"mapped",[108]],[[738,738],"mapped",[115]],[[739,739],"mapped",[120]],[[740,740],"mapped",[661]],[[741,745],"valid",[],"NV8"],[[746,747],"valid",[],"NV8"],[[748,748],"valid"],[[749,749],"valid",[],"NV8"],[[750,750],"valid"],[[751,767],"valid",[],"NV8"],[[768,831],"valid"],[[832,832],"mapped",[768]],[[833,833],"mapped",[769]],[[834,834],"valid"],[[835,835],"mapped",[787]],[[836,836],"mapped",[776,769]],[[837,837],"mapped",[953]],[[838,846],"valid"],[[847,847],"ignored"],[[848,855],"valid"],[[856,860],"valid"],[[861,863],"valid"],[[864,865],"valid"],[[866,866],"valid"],[[867,879],"valid"],[[880,880],"mapped",[881]],[[881,881],"valid"],[[882,882],"mapped",[883]],[[883,883],"valid"],[[884,884],"mapped",[697]],[[885,885],"valid"],[[886,886],"mapped",[887]],[[887,887],"valid"],[[888,889],"disallowed"],[[890,890],"disallowed_STD3_mapped",[32,953]],[[891,893],"valid"],[[894,894],"disallowed_STD3_mapped",[59]],[[895,895],"mapped",[1011]],[[896,899],"disallowed"],[[900,900],"disallowed_STD3_mapped",[32,769]],[[901,901],"disallowed_STD3_mapped",[32,776,769]],[[902,902],"mapped",[940]],[[903,903],"mapped",[183]],[[904,904],"mapped",[941]],[[905,905],"mapped",[942]],[[906,906],"mapped",[943]],[[907,907],"disallowed"],[[908,908],"mapped",[972]],[[909,909],"disallowed"],[[910,910],"mapped",[973]],[[911,911],"mapped",[974]],[[912,912],"valid"],[[913,913],"mapped",[945]],[[914,914],"mapped",[946]],[[915,915],"mapped",[947]],[[916,916],"mapped",[948]],[[917,917],"mapped",[949]],[[918,918],"mapped",[950]],[[919,919],"mapped",[951]],[[920,920],"mapped",[952]],[[921,921],"mapped",[953]],[[922,922],"mapped",[954]],[[923,923],"mapped",[955]],[[924,924],"mapped",[956]],[[925,925],"mapped",[957]],[[926,926],"mapped",[958]],[[927,927],"mapped",[959]],[[928,928],"mapped",[960]],[[929,929],"mapped",[961]],[[930,930],"disallowed"],[[931,931],"mapped",[963]],[[932,932],"mapped",[964]],[[933,933],"mapped",[965]],[[934,934],"mapped",[966]],[[935,935],"mapped",[967]],[[936,936],"mapped",[968]],[[937,937],"mapped",[969]],[[938,938],"mapped",[970]],[[939,939],"mapped",[971]],[[940,961],"valid"],[[962,962],"deviation",[963]],[[963,974],"valid"],[[975,975],"mapped",[983]],[[976,976],"mapped",[946]],[[977,977],"mapped",[952]],[[978,978],"mapped",[965]],[[979,979],"mapped",[973]],[[980,980],"mapped",[971]],[[981,981],"mapped",[966]],[[982,982],"mapped",[960]],[[983,983],"valid"],[[984,984],"mapped",[985]],[[985,985],"valid"],[[986,986],"mapped",[987]],[[987,987],"valid"],[[988,988],"mapped",[989]],[[989,989],"valid"],[[990,990],"mapped",[991]],[[991,991],"valid"],[[992,992],"mapped",[993]],[[993,993],"valid"],[[994,994],"mapped",[995]],[[995,995],"valid"],[[996,996],"mapped",[997]],[[997,997],"valid"],[[998,998],"mapped",[999]],[[999,999],"valid"],[[1000,1000],"mapped",[1001]],[[1001,1001],"valid"],[[1002,1002],"mapped",[1003]],[[1003,1003],"valid"],[[1004,1004],"mapped",[1005]],[[1005,1005],"valid"],[[1006,1006],"mapped",[1007]],[[1007,1007],"valid"],[[1008,1008],"mapped",[954]],[[1009,1009],"mapped",[961]],[[1010,1010],"mapped",[963]],[[1011,1011],"valid"],[[1012,1012],"mapped",[952]],[[1013,1013],"mapped",[949]],[[1014,1014],"valid",[],"NV8"],[[1015,1015],"mapped",[1016]],[[1016,1016],"valid"],[[1017,1017],"mapped",[963]],[[1018,1018],"mapped",[1019]],[[1019,1019],"valid"],[[1020,1020],"valid"],[[1021,1021],"mapped",[891]],[[1022,1022],"mapped",[892]],[[1023,1023],"mapped",[893]],[[1024,1024],"mapped",[1104]],[[1025,1025],"mapped",[1105]],[[1026,1026],"mapped",[1106]],[[1027,1027],"mapped",[1107]],[[1028,1028],"mapped",[1108]],[[1029,1029],"mapped",[1109]],[[1030,1030],"mapped",[1110]],[[1031,1031],"mapped",[1111]],[[1032,1032],"mapped",[1112]],[[1033,1033],"mapped",[1113]],[[1034,1034],"mapped",[1114]],[[1035,1035],"mapped",[1115]],[[1036,1036],"mapped",[1116]],[[1037,1037],"mapped",[1117]],[[1038,1038],"mapped",[1118]],[[1039,1039],"mapped",[1119]],[[1040,1040],"mapped",[1072]],[[1041,1041],"mapped",[1073]],[[1042,1042],"mapped",[1074]],[[1043,1043],"mapped",[1075]],[[1044,1044],"mapped",[1076]],[[1045,1045],"mapped",[1077]],[[1046,1046],"mapped",[1078]],[[1047,1047],"mapped",[1079]],[[1048,1048],"mapped",[1080]],[[1049,1049],"mapped",[1081]],[[1050,1050],"mapped",[1082]],[[1051,1051],"mapped",[1083]],[[1052,1052],"mapped",[1084]],[[1053,1053],"mapped",[1085]],[[1054,1054],"mapped",[1086]],[[1055,1055],"mapped",[1087]],[[1056,1056],"mapped",[1088]],[[1057,1057],"mapped",[1089]],[[1058,1058],"mapped",[1090]],[[1059,1059],"mapped",[1091]],[[1060,1060],"mapped",[1092]],[[1061,1061],"mapped",[1093]],[[1062,1062],"mapped",[1094]],[[1063,1063],"mapped",[1095]],[[1064,1064],"mapped",[1096]],[[1065,1065],"mapped",[1097]],[[1066,1066],"mapped",[1098]],[[1067,1067],"mapped",[1099]],[[1068,1068],"mapped",[1100]],[[1069,1069],"mapped",[1101]],[[1070,1070],"mapped",[1102]],[[1071,1071],"mapped",[1103]],[[1072,1103],"valid"],[[1104,1104],"valid"],[[1105,1116],"valid"],[[1117,1117],"valid"],[[1118,1119],"valid"],[[1120,1120],"mapped",[1121]],[[1121,1121],"valid"],[[1122,1122],"mapped",[1123]],[[1123,1123],"valid"],[[1124,1124],"mapped",[1125]],[[1125,1125],"valid"],[[1126,1126],"mapped",[1127]],[[1127,1127],"valid"],[[1128,1128],"mapped",[1129]],[[1129,1129],"valid"],[[1130,1130],"mapped",[1131]],[[1131,1131],"valid"],[[1132,1132],"mapped",[1133]],[[1133,1133],"valid"],[[1134,1134],"mapped",[1135]],[[1135,1135],"valid"],[[1136,1136],"mapped",[1137]],[[1137,1137],"valid"],[[1138,1138],"mapped",[1139]],[[1139,1139],"valid"],[[1140,1140],"mapped",[1141]],[[1141,1141],"valid"],[[1142,1142],"mapped",[1143]],[[1143,1143],"valid"],[[1144,1144],"mapped",[1145]],[[1145,1145],"valid"],[[1146,1146],"mapped",[1147]],[[1147,1147],"valid"],[[1148,1148],"mapped",[1149]],[[1149,1149],"valid"],[[1150,1150],"mapped",[1151]],[[1151,1151],"valid"],[[1152,1152],"mapped",[1153]],[[1153,1153],"valid"],[[1154,1154],"valid",[],"NV8"],[[1155,1158],"valid"],[[1159,1159],"valid"],[[1160,1161],"valid",[],"NV8"],[[1162,1162],"mapped",[1163]],[[1163,1163],"valid"],[[1164,1164],"mapped",[1165]],[[1165,1165],"valid"],[[1166,1166],"mapped",[1167]],[[1167,1167],"valid"],[[1168,1168],"mapped",[1169]],[[1169,1169],"valid"],[[1170,1170],"mapped",[1171]],[[1171,1171],"valid"],[[1172,1172],"mapped",[1173]],[[1173,1173],"valid"],[[1174,1174],"mapped",[1175]],[[1175,1175],"valid"],[[1176,1176],"mapped",[1177]],[[1177,1177],"valid"],[[1178,1178],"mapped",[1179]],[[1179,1179],"valid"],[[1180,1180],"mapped",[1181]],[[1181,1181],"valid"],[[1182,1182],"mapped",[1183]],[[1183,1183],"valid"],[[1184,1184],"mapped",[1185]],[[1185,1185],"valid"],[[1186,1186],"mapped",[1187]],[[1187,1187],"valid"],[[1188,1188],"mapped",[1189]],[[1189,1189],"valid"],[[1190,1190],"mapped",[1191]],[[1191,1191],"valid"],[[1192,1192],"mapped",[1193]],[[1193,1193],"valid"],[[1194,1194],"mapped",[1195]],[[1195,1195],"valid"],[[1196,1196],"mapped",[1197]],[[1197,1197],"valid"],[[1198,1198],"mapped",[1199]],[[1199,1199],"valid"],[[1200,1200],"mapped",[1201]],[[1201,1201],"valid"],[[1202,1202],"mapped",[1203]],[[1203,1203],"valid"],[[1204,1204],"mapped",[1205]],[[1205,1205],"valid"],[[1206,1206],"mapped",[1207]],[[1207,1207],"valid"],[[1208,1208],"mapped",[1209]],[[1209,1209],"valid"],[[1210,1210],"mapped",[1211]],[[1211,1211],"valid"],[[1212,1212],"mapped",[1213]],[[1213,1213],"valid"],[[1214,1214],"mapped",[1215]],[[1215,1215],"valid"],[[1216,1216],"disallowed"],[[1217,1217],"mapped",[1218]],[[1218,1218],"valid"],[[1219,1219],"mapped",[1220]],[[1220,1220],"valid"],[[1221,1221],"mapped",[1222]],[[1222,1222],"valid"],[[1223,1223],"mapped",[1224]],[[1224,1224],"valid"],[[1225,1225],"mapped",[1226]],[[1226,1226],"valid"],[[1227,1227],"mapped",[1228]],[[1228,1228],"valid"],[[1229,1229],"mapped",[1230]],[[1230,1230],"valid"],[[1231,1231],"valid"],[[1232,1232],"mapped",[1233]],[[1233,1233],"valid"],[[1234,1234],"mapped",[1235]],[[1235,1235],"valid"],[[1236,1236],"mapped",[1237]],[[1237,1237],"valid"],[[1238,1238],"mapped",[1239]],[[1239,1239],"valid"],[[1240,1240],"mapped",[1241]],[[1241,1241],"valid"],[[1242,1242],"mapped",[1243]],[[1243,1243],"valid"],[[1244,1244],"mapped",[1245]],[[1245,1245],"valid"],[[1246,1246],"mapped",[1247]],[[1247,1247],"valid"],[[1248,1248],"mapped",[1249]],[[1249,1249],"valid"],[[1250,1250],"mapped",[1251]],[[1251,1251],"valid"],[[1252,1252],"mapped",[1253]],[[1253,1253],"valid"],[[1254,1254],"mapped",[1255]],[[1255,1255],"valid"],[[1256,1256],"mapped",[1257]],[[1257,1257],"valid"],[[1258,1258],"mapped",[1259]],[[1259,1259],"valid"],[[1260,1260],"mapped",[1261]],[[1261,1261],"valid"],[[1262,1262],"mapped",[1263]],[[1263,1263],"valid"],[[1264,1264],"mapped",[1265]],[[1265,1265],"valid"],[[1266,1266],"mapped",[1267]],[[1267,1267],"valid"],[[1268,1268],"mapped",[1269]],[[1269,1269],"valid"],[[1270,1270],"mapped",[1271]],[[1271,1271],"valid"],[[1272,1272],"mapped",[1273]],[[1273,1273],"valid"],[[1274,1274],"mapped",[1275]],[[1275,1275],"valid"],[[1276,1276],"mapped",[1277]],[[1277,1277],"valid"],[[1278,1278],"mapped",[1279]],[[1279,1279],"valid"],[[1280,1280],"mapped",[1281]],[[1281,1281],"valid"],[[1282,1282],"mapped",[1283]],[[1283,1283],"valid"],[[1284,1284],"mapped",[1285]],[[1285,1285],"valid"],[[1286,1286],"mapped",[1287]],[[1287,1287],"valid"],[[1288,1288],"mapped",[1289]],[[1289,1289],"valid"],[[1290,1290],"mapped",[1291]],[[1291,1291],"valid"],[[1292,1292],"mapped",[1293]],[[1293,1293],"valid"],[[1294,1294],"mapped",[1295]],[[1295,1295],"valid"],[[1296,1296],"mapped",[1297]],[[1297,1297],"valid"],[[1298,1298],"mapped",[1299]],[[1299,1299],"valid"],[[1300,1300],"mapped",[1301]],[[1301,1301],"valid"],[[1302,1302],"mapped",[1303]],[[1303,1303],"valid"],[[1304,1304],"mapped",[1305]],[[1305,1305],"valid"],[[1306,1306],"mapped",[1307]],[[1307,1307],"valid"],[[1308,1308],"mapped",[1309]],[[1309,1309],"valid"],[[1310,1310],"mapped",[1311]],[[1311,1311],"valid"],[[1312,1312],"mapped",[1313]],[[1313,1313],"valid"],[[1314,1314],"mapped",[1315]],[[1315,1315],"valid"],[[1316,1316],"mapped",[1317]],[[1317,1317],"valid"],[[1318,1318],"mapped",[1319]],[[1319,1319],"valid"],[[1320,1320],"mapped",[1321]],[[1321,1321],"valid"],[[1322,1322],"mapped",[1323]],[[1323,1323],"valid"],[[1324,1324],"mapped",[1325]],[[1325,1325],"valid"],[[1326,1326],"mapped",[1327]],[[1327,1327],"valid"],[[1328,1328],"disallowed"],[[1329,1329],"mapped",[1377]],[[1330,1330],"mapped",[1378]],[[1331,1331],"mapped",[1379]],[[1332,1332],"mapped",[1380]],[[1333,1333],"mapped",[1381]],[[1334,1334],"mapped",[1382]],[[1335,1335],"mapped",[1383]],[[1336,1336],"mapped",[1384]],[[1337,1337],"mapped",[1385]],[[1338,1338],"mapped",[1386]],[[1339,1339],"mapped",[1387]],[[1340,1340],"mapped",[1388]],[[1341,1341],"mapped",[1389]],[[1342,1342],"mapped",[1390]],[[1343,1343],"mapped",[1391]],[[1344,1344],"mapped",[1392]],[[1345,1345],"mapped",[1393]],[[1346,1346],"mapped",[1394]],[[1347,1347],"mapped",[1395]],[[1348,1348],"mapped",[1396]],[[1349,1349],"mapped",[1397]],[[1350,1350],"mapped",[1398]],[[1351,1351],"mapped",[1399]],[[1352,1352],"mapped",[1400]],[[1353,1353],"mapped",[1401]],[[1354,1354],"mapped",[1402]],[[1355,1355],"mapped",[1403]],[[1356,1356],"mapped",[1404]],[[1357,1357],"mapped",[1405]],[[1358,1358],"mapped",[1406]],[[1359,1359],"mapped",[1407]],[[1360,1360],"mapped",[1408]],[[1361,1361],"mapped",[1409]],[[1362,1362],"mapped",[1410]],[[1363,1363],"mapped",[1411]],[[1364,1364],"mapped",[1412]],[[1365,1365],"mapped",[1413]],[[1366,1366],"mapped",[1414]],[[1367,1368],"disallowed"],[[1369,1369],"valid"],[[1370,1375],"valid",[],"NV8"],[[1376,1376],"disallowed"],[[1377,1414],"valid"],[[1415,1415],"mapped",[1381,1410]],[[1416,1416],"disallowed"],[[1417,1417],"valid",[],"NV8"],[[1418,1418],"valid",[],"NV8"],[[1419,1420],"disallowed"],[[1421,1422],"valid",[],"NV8"],[[1423,1423],"valid",[],"NV8"],[[1424,1424],"disallowed"],[[1425,1441],"valid"],[[1442,1442],"valid"],[[1443,1455],"valid"],[[1456,1465],"valid"],[[1466,1466],"valid"],[[1467,1469],"valid"],[[1470,1470],"valid",[],"NV8"],[[1471,1471],"valid"],[[1472,1472],"valid",[],"NV8"],[[1473,1474],"valid"],[[1475,1475],"valid",[],"NV8"],[[1476,1476],"valid"],[[1477,1477],"valid"],[[1478,1478],"valid",[],"NV8"],[[1479,1479],"valid"],[[1480,1487],"disallowed"],[[1488,1514],"valid"],[[1515,1519],"disallowed"],[[1520,1524],"valid"],[[1525,1535],"disallowed"],[[1536,1539],"disallowed"],[[1540,1540],"disallowed"],[[1541,1541],"disallowed"],[[1542,1546],"valid",[],"NV8"],[[1547,1547],"valid",[],"NV8"],[[1548,1548],"valid",[],"NV8"],[[1549,1551],"valid",[],"NV8"],[[1552,1557],"valid"],[[1558,1562],"valid"],[[1563,1563],"valid",[],"NV8"],[[1564,1564],"disallowed"],[[1565,1565],"disallowed"],[[1566,1566],"valid",[],"NV8"],[[1567,1567],"valid",[],"NV8"],[[1568,1568],"valid"],[[1569,1594],"valid"],[[1595,1599],"valid"],[[1600,1600],"valid",[],"NV8"],[[1601,1618],"valid"],[[1619,1621],"valid"],[[1622,1624],"valid"],[[1625,1630],"valid"],[[1631,1631],"valid"],[[1632,1641],"valid"],[[1642,1645],"valid",[],"NV8"],[[1646,1647],"valid"],[[1648,1652],"valid"],[[1653,1653],"mapped",[1575,1652]],[[1654,1654],"mapped",[1608,1652]],[[1655,1655],"mapped",[1735,1652]],[[1656,1656],"mapped",[1610,1652]],[[1657,1719],"valid"],[[1720,1721],"valid"],[[1722,1726],"valid"],[[1727,1727],"valid"],[[1728,1742],"valid"],[[1743,1743],"valid"],[[1744,1747],"valid"],[[1748,1748],"valid",[],"NV8"],[[1749,1756],"valid"],[[1757,1757],"disallowed"],[[1758,1758],"valid",[],"NV8"],[[1759,1768],"valid"],[[1769,1769],"valid",[],"NV8"],[[1770,1773],"valid"],[[1774,1775],"valid"],[[1776,1785],"valid"],[[1786,1790],"valid"],[[1791,1791],"valid"],[[1792,1805],"valid",[],"NV8"],[[1806,1806],"disallowed"],[[1807,1807],"disallowed"],[[1808,1836],"valid"],[[1837,1839],"valid"],[[1840,1866],"valid"],[[1867,1868],"disallowed"],[[1869,1871],"valid"],[[1872,1901],"valid"],[[1902,1919],"valid"],[[1920,1968],"valid"],[[1969,1969],"valid"],[[1970,1983],"disallowed"],[[1984,2037],"valid"],[[2038,2042],"valid",[],"NV8"],[[2043,2047],"disallowed"],[[2048,2093],"valid"],[[2094,2095],"disallowed"],[[2096,2110],"valid",[],"NV8"],[[2111,2111],"disallowed"],[[2112,2139],"valid"],[[2140,2141],"disallowed"],[[2142,2142],"valid",[],"NV8"],[[2143,2207],"disallowed"],[[2208,2208],"valid"],[[2209,2209],"valid"],[[2210,2220],"valid"],[[2221,2226],"valid"],[[2227,2228],"valid"],[[2229,2274],"disallowed"],[[2275,2275],"valid"],[[2276,2302],"valid"],[[2303,2303],"valid"],[[2304,2304],"valid"],[[2305,2307],"valid"],[[2308,2308],"valid"],[[2309,2361],"valid"],[[2362,2363],"valid"],[[2364,2381],"valid"],[[2382,2382],"valid"],[[2383,2383],"valid"],[[2384,2388],"valid"],[[2389,2389],"valid"],[[2390,2391],"valid"],[[2392,2392],"mapped",[2325,2364]],[[2393,2393],"mapped",[2326,2364]],[[2394,2394],"mapped",[2327,2364]],[[2395,2395],"mapped",[2332,2364]],[[2396,2396],"mapped",[2337,2364]],[[2397,2397],"mapped",[2338,2364]],[[2398,2398],"mapped",[2347,2364]],[[2399,2399],"mapped",[2351,2364]],[[2400,2403],"valid"],[[2404,2405],"valid",[],"NV8"],[[2406,2415],"valid"],[[2416,2416],"valid",[],"NV8"],[[2417,2418],"valid"],[[2419,2423],"valid"],[[2424,2424],"valid"],[[2425,2426],"valid"],[[2427,2428],"valid"],[[2429,2429],"valid"],[[2430,2431],"valid"],[[2432,2432],"valid"],[[2433,2435],"valid"],[[2436,2436],"disallowed"],[[2437,2444],"valid"],[[2445,2446],"disallowed"],[[2447,2448],"valid"],[[2449,2450],"disallowed"],[[2451,2472],"valid"],[[2473,2473],"disallowed"],[[2474,2480],"valid"],[[2481,2481],"disallowed"],[[2482,2482],"valid"],[[2483,2485],"disallowed"],[[2486,2489],"valid"],[[2490,2491],"disallowed"],[[2492,2492],"valid"],[[2493,2493],"valid"],[[2494,2500],"valid"],[[2501,2502],"disallowed"],[[2503,2504],"valid"],[[2505,2506],"disallowed"],[[2507,2509],"valid"],[[2510,2510],"valid"],[[2511,2518],"disallowed"],[[2519,2519],"valid"],[[2520,2523],"disallowed"],[[2524,2524],"mapped",[2465,2492]],[[2525,2525],"mapped",[2466,2492]],[[2526,2526],"disallowed"],[[2527,2527],"mapped",[2479,2492]],[[2528,2531],"valid"],[[2532,2533],"disallowed"],[[2534,2545],"valid"],[[2546,2554],"valid",[],"NV8"],[[2555,2555],"valid",[],"NV8"],[[2556,2560],"disallowed"],[[2561,2561],"valid"],[[2562,2562],"valid"],[[2563,2563],"valid"],[[2564,2564],"disallowed"],[[2565,2570],"valid"],[[2571,2574],"disallowed"],[[2575,2576],"valid"],[[2577,2578],"disallowed"],[[2579,2600],"valid"],[[2601,2601],"disallowed"],[[2602,2608],"valid"],[[2609,2609],"disallowed"],[[2610,2610],"valid"],[[2611,2611],"mapped",[2610,2620]],[[2612,2612],"disallowed"],[[2613,2613],"valid"],[[2614,2614],"mapped",[2616,2620]],[[2615,2615],"disallowed"],[[2616,2617],"valid"],[[2618,2619],"disallowed"],[[2620,2620],"valid"],[[2621,2621],"disallowed"],[[2622,2626],"valid"],[[2627,2630],"disallowed"],[[2631,2632],"valid"],[[2633,2634],"disallowed"],[[2635,2637],"valid"],[[2638,2640],"disallowed"],[[2641,2641],"valid"],[[2642,2648],"disallowed"],[[2649,2649],"mapped",[2582,2620]],[[2650,2650],"mapped",[2583,2620]],[[2651,2651],"mapped",[2588,2620]],[[2652,2652],"valid"],[[2653,2653],"disallowed"],[[2654,2654],"mapped",[2603,2620]],[[2655,2661],"disallowed"],[[2662,2676],"valid"],[[2677,2677],"valid"],[[2678,2688],"disallowed"],[[2689,2691],"valid"],[[2692,2692],"disallowed"],[[2693,2699],"valid"],[[2700,2700],"valid"],[[2701,2701],"valid"],[[2702,2702],"disallowed"],[[2703,2705],"valid"],[[2706,2706],"disallowed"],[[2707,2728],"valid"],[[2729,2729],"disallowed"],[[2730,2736],"valid"],[[2737,2737],"disallowed"],[[2738,2739],"valid"],[[2740,2740],"disallowed"],[[2741,2745],"valid"],[[2746,2747],"disallowed"],[[2748,2757],"valid"],[[2758,2758],"disallowed"],[[2759,2761],"valid"],[[2762,2762],"disallowed"],[[2763,2765],"valid"],[[2766,2767],"disallowed"],[[2768,2768],"valid"],[[2769,2783],"disallowed"],[[2784,2784],"valid"],[[2785,2787],"valid"],[[2788,2789],"disallowed"],[[2790,2799],"valid"],[[2800,2800],"valid",[],"NV8"],[[2801,2801],"valid",[],"NV8"],[[2802,2808],"disallowed"],[[2809,2809],"valid"],[[2810,2816],"disallowed"],[[2817,2819],"valid"],[[2820,2820],"disallowed"],[[2821,2828],"valid"],[[2829,2830],"disallowed"],[[2831,2832],"valid"],[[2833,2834],"disallowed"],[[2835,2856],"valid"],[[2857,2857],"disallowed"],[[2858,2864],"valid"],[[2865,2865],"disallowed"],[[2866,2867],"valid"],[[2868,2868],"disallowed"],[[2869,2869],"valid"],[[2870,2873],"valid"],[[2874,2875],"disallowed"],[[2876,2883],"valid"],[[2884,2884],"valid"],[[2885,2886],"disallowed"],[[2887,2888],"valid"],[[2889,2890],"disallowed"],[[2891,2893],"valid"],[[2894,2901],"disallowed"],[[2902,2903],"valid"],[[2904,2907],"disallowed"],[[2908,2908],"mapped",[2849,2876]],[[2909,2909],"mapped",[2850,2876]],[[2910,2910],"disallowed"],[[2911,2913],"valid"],[[2914,2915],"valid"],[[2916,2917],"disallowed"],[[2918,2927],"valid"],[[2928,2928],"valid",[],"NV8"],[[2929,2929],"valid"],[[2930,2935],"valid",[],"NV8"],[[2936,2945],"disallowed"],[[2946,2947],"valid"],[[2948,2948],"disallowed"],[[2949,2954],"valid"],[[2955,2957],"disallowed"],[[2958,2960],"valid"],[[2961,2961],"disallowed"],[[2962,2965],"valid"],[[2966,2968],"disallowed"],[[2969,2970],"valid"],[[2971,2971],"disallowed"],[[2972,2972],"valid"],[[2973,2973],"disallowed"],[[2974,2975],"valid"],[[2976,2978],"disallowed"],[[2979,2980],"valid"],[[2981,2983],"disallowed"],[[2984,2986],"valid"],[[2987,2989],"disallowed"],[[2990,2997],"valid"],[[2998,2998],"valid"],[[2999,3001],"valid"],[[3002,3005],"disallowed"],[[3006,3010],"valid"],[[3011,3013],"disallowed"],[[3014,3016],"valid"],[[3017,3017],"disallowed"],[[3018,3021],"valid"],[[3022,3023],"disallowed"],[[3024,3024],"valid"],[[3025,3030],"disallowed"],[[3031,3031],"valid"],[[3032,3045],"disallowed"],[[3046,3046],"valid"],[[3047,3055],"valid"],[[3056,3058],"valid",[],"NV8"],[[3059,3066],"valid",[],"NV8"],[[3067,3071],"disallowed"],[[3072,3072],"valid"],[[3073,3075],"valid"],[[3076,3076],"disallowed"],[[3077,3084],"valid"],[[3085,3085],"disallowed"],[[3086,3088],"valid"],[[3089,3089],"disallowed"],[[3090,3112],"valid"],[[3113,3113],"disallowed"],[[3114,3123],"valid"],[[3124,3124],"valid"],[[3125,3129],"valid"],[[3130,3132],"disallowed"],[[3133,3133],"valid"],[[3134,3140],"valid"],[[3141,3141],"disallowed"],[[3142,3144],"valid"],[[3145,3145],"disallowed"],[[3146,3149],"valid"],[[3150,3156],"disallowed"],[[3157,3158],"valid"],[[3159,3159],"disallowed"],[[3160,3161],"valid"],[[3162,3162],"valid"],[[3163,3167],"disallowed"],[[3168,3169],"valid"],[[3170,3171],"valid"],[[3172,3173],"disallowed"],[[3174,3183],"valid"],[[3184,3191],"disallowed"],[[3192,3199],"valid",[],"NV8"],[[3200,3200],"disallowed"],[[3201,3201],"valid"],[[3202,3203],"valid"],[[3204,3204],"disallowed"],[[3205,3212],"valid"],[[3213,3213],"disallowed"],[[3214,3216],"valid"],[[3217,3217],"disallowed"],[[3218,3240],"valid"],[[3241,3241],"disallowed"],[[3242,3251],"valid"],[[3252,3252],"disallowed"],[[3253,3257],"valid"],[[3258,3259],"disallowed"],[[3260,3261],"valid"],[[3262,3268],"valid"],[[3269,3269],"disallowed"],[[3270,3272],"valid"],[[3273,3273],"disallowed"],[[3274,3277],"valid"],[[3278,3284],"disallowed"],[[3285,3286],"valid"],[[3287,3293],"disallowed"],[[3294,3294],"valid"],[[3295,3295],"disallowed"],[[3296,3297],"valid"],[[3298,3299],"valid"],[[3300,3301],"disallowed"],[[3302,3311],"valid"],[[3312,3312],"disallowed"],[[3313,3314],"valid"],[[3315,3328],"disallowed"],[[3329,3329],"valid"],[[3330,3331],"valid"],[[3332,3332],"disallowed"],[[3333,3340],"valid"],[[3341,3341],"disallowed"],[[3342,3344],"valid"],[[3345,3345],"disallowed"],[[3346,3368],"valid"],[[3369,3369],"valid"],[[3370,3385],"valid"],[[3386,3386],"valid"],[[3387,3388],"disallowed"],[[3389,3389],"valid"],[[3390,3395],"valid"],[[3396,3396],"valid"],[[3397,3397],"disallowed"],[[3398,3400],"valid"],[[3401,3401],"disallowed"],[[3402,3405],"valid"],[[3406,3406],"valid"],[[3407,3414],"disallowed"],[[3415,3415],"valid"],[[3416,3422],"disallowed"],[[3423,3423],"valid"],[[3424,3425],"valid"],[[3426,3427],"valid"],[[3428,3429],"disallowed"],[[3430,3439],"valid"],[[3440,3445],"valid",[],"NV8"],[[3446,3448],"disallowed"],[[3449,3449],"valid",[],"NV8"],[[3450,3455],"valid"],[[3456,3457],"disallowed"],[[3458,3459],"valid"],[[3460,3460],"disallowed"],[[3461,3478],"valid"],[[3479,3481],"disallowed"],[[3482,3505],"valid"],[[3506,3506],"disallowed"],[[3507,3515],"valid"],[[3516,3516],"disallowed"],[[3517,3517],"valid"],[[3518,3519],"disallowed"],[[3520,3526],"valid"],[[3527,3529],"disallowed"],[[3530,3530],"valid"],[[3531,3534],"disallowed"],[[3535,3540],"valid"],[[3541,3541],"disallowed"],[[3542,3542],"valid"],[[3543,3543],"disallowed"],[[3544,3551],"valid"],[[3552,3557],"disallowed"],[[3558,3567],"valid"],[[3568,3569],"disallowed"],[[3570,3571],"valid"],[[3572,3572],"valid",[],"NV8"],[[3573,3584],"disallowed"],[[3585,3634],"valid"],[[3635,3635],"mapped",[3661,3634]],[[3636,3642],"valid"],[[3643,3646],"disallowed"],[[3647,3647],"valid",[],"NV8"],[[3648,3662],"valid"],[[3663,3663],"valid",[],"NV8"],[[3664,3673],"valid"],[[3674,3675],"valid",[],"NV8"],[[3676,3712],"disallowed"],[[3713,3714],"valid"],[[3715,3715],"disallowed"],[[3716,3716],"valid"],[[3717,3718],"disallowed"],[[3719,3720],"valid"],[[3721,3721],"disallowed"],[[3722,3722],"valid"],[[3723,3724],"disallowed"],[[3725,3725],"valid"],[[3726,3731],"disallowed"],[[3732,3735],"valid"],[[3736,3736],"disallowed"],[[3737,3743],"valid"],[[3744,3744],"disallowed"],[[3745,3747],"valid"],[[3748,3748],"disallowed"],[[3749,3749],"valid"],[[3750,3750],"disallowed"],[[3751,3751],"valid"],[[3752,3753],"disallowed"],[[3754,3755],"valid"],[[3756,3756],"disallowed"],[[3757,3762],"valid"],[[3763,3763],"mapped",[3789,3762]],[[3764,3769],"valid"],[[3770,3770],"disallowed"],[[3771,3773],"valid"],[[3774,3775],"disallowed"],[[3776,3780],"valid"],[[3781,3781],"disallowed"],[[3782,3782],"valid"],[[3783,3783],"disallowed"],[[3784,3789],"valid"],[[3790,3791],"disallowed"],[[3792,3801],"valid"],[[3802,3803],"disallowed"],[[3804,3804],"mapped",[3755,3737]],[[3805,3805],"mapped",[3755,3745]],[[3806,3807],"valid"],[[3808,3839],"disallowed"],[[3840,3840],"valid"],[[3841,3850],"valid",[],"NV8"],[[3851,3851],"valid"],[[3852,3852],"mapped",[3851]],[[3853,3863],"valid",[],"NV8"],[[3864,3865],"valid"],[[3866,3871],"valid",[],"NV8"],[[3872,3881],"valid"],[[3882,3892],"valid",[],"NV8"],[[3893,3893],"valid"],[[3894,3894],"valid",[],"NV8"],[[3895,3895],"valid"],[[3896,3896],"valid",[],"NV8"],[[3897,3897],"valid"],[[3898,3901],"valid",[],"NV8"],[[3902,3906],"valid"],[[3907,3907],"mapped",[3906,4023]],[[3908,3911],"valid"],[[3912,3912],"disallowed"],[[3913,3916],"valid"],[[3917,3917],"mapped",[3916,4023]],[[3918,3921],"valid"],[[3922,3922],"mapped",[3921,4023]],[[3923,3926],"valid"],[[3927,3927],"mapped",[3926,4023]],[[3928,3931],"valid"],[[3932,3932],"mapped",[3931,4023]],[[3933,3944],"valid"],[[3945,3945],"mapped",[3904,4021]],[[3946,3946],"valid"],[[3947,3948],"valid"],[[3949,3952],"disallowed"],[[3953,3954],"valid"],[[3955,3955],"mapped",[3953,3954]],[[3956,3956],"valid"],[[3957,3957],"mapped",[3953,3956]],[[3958,3958],"mapped",[4018,3968]],[[3959,3959],"mapped",[4018,3953,3968]],[[3960,3960],"mapped",[4019,3968]],[[3961,3961],"mapped",[4019,3953,3968]],[[3962,3968],"valid"],[[3969,3969],"mapped",[3953,3968]],[[3970,3972],"valid"],[[3973,3973],"valid",[],"NV8"],[[3974,3979],"valid"],[[3980,3983],"valid"],[[3984,3986],"valid"],[[3987,3987],"mapped",[3986,4023]],[[3988,3989],"valid"],[[3990,3990],"valid"],[[3991,3991],"valid"],[[3992,3992],"disallowed"],[[3993,3996],"valid"],[[3997,3997],"mapped",[3996,4023]],[[3998,4001],"valid"],[[4002,4002],"mapped",[4001,4023]],[[4003,4006],"valid"],[[4007,4007],"mapped",[4006,4023]],[[4008,4011],"valid"],[[4012,4012],"mapped",[4011,4023]],[[4013,4013],"valid"],[[4014,4016],"valid"],[[4017,4023],"valid"],[[4024,4024],"valid"],[[4025,4025],"mapped",[3984,4021]],[[4026,4028],"valid"],[[4029,4029],"disallowed"],[[4030,4037],"valid",[],"NV8"],[[4038,4038],"valid"],[[4039,4044],"valid",[],"NV8"],[[4045,4045],"disallowed"],[[4046,4046],"valid",[],"NV8"],[[4047,4047],"valid",[],"NV8"],[[4048,4049],"valid",[],"NV8"],[[4050,4052],"valid",[],"NV8"],[[4053,4056],"valid",[],"NV8"],[[4057,4058],"valid",[],"NV8"],[[4059,4095],"disallowed"],[[4096,4129],"valid"],[[4130,4130],"valid"],[[4131,4135],"valid"],[[4136,4136],"valid"],[[4137,4138],"valid"],[[4139,4139],"valid"],[[4140,4146],"valid"],[[4147,4149],"valid"],[[4150,4153],"valid"],[[4154,4159],"valid"],[[4160,4169],"valid"],[[4170,4175],"valid",[],"NV8"],[[4176,4185],"valid"],[[4186,4249],"valid"],[[4250,4253],"valid"],[[4254,4255],"valid",[],"NV8"],[[4256,4293],"disallowed"],[[4294,4294],"disallowed"],[[4295,4295],"mapped",[11559]],[[4296,4300],"disallowed"],[[4301,4301],"mapped",[11565]],[[4302,4303],"disallowed"],[[4304,4342],"valid"],[[4343,4344],"valid"],[[4345,4346],"valid"],[[4347,4347],"valid",[],"NV8"],[[4348,4348],"mapped",[4316]],[[4349,4351],"valid"],[[4352,4441],"valid",[],"NV8"],[[4442,4446],"valid",[],"NV8"],[[4447,4448],"disallowed"],[[4449,4514],"valid",[],"NV8"],[[4515,4519],"valid",[],"NV8"],[[4520,4601],"valid",[],"NV8"],[[4602,4607],"valid",[],"NV8"],[[4608,4614],"valid"],[[4615,4615],"valid"],[[4616,4678],"valid"],[[4679,4679],"valid"],[[4680,4680],"valid"],[[4681,4681],"disallowed"],[[4682,4685],"valid"],[[4686,4687],"disallowed"],[[4688,4694],"valid"],[[4695,4695],"disallowed"],[[4696,4696],"valid"],[[4697,4697],"disallowed"],[[4698,4701],"valid"],[[4702,4703],"disallowed"],[[4704,4742],"valid"],[[4743,4743],"valid"],[[4744,4744],"valid"],[[4745,4745],"disallowed"],[[4746,4749],"valid"],[[4750,4751],"disallowed"],[[4752,4782],"valid"],[[4783,4783],"valid"],[[4784,4784],"valid"],[[4785,4785],"disallowed"],[[4786,4789],"valid"],[[4790,4791],"disallowed"],[[4792,4798],"valid"],[[4799,4799],"disallowed"],[[4800,4800],"valid"],[[4801,4801],"disallowed"],[[4802,4805],"valid"],[[4806,4807],"disallowed"],[[4808,4814],"valid"],[[4815,4815],"valid"],[[4816,4822],"valid"],[[4823,4823],"disallowed"],[[4824,4846],"valid"],[[4847,4847],"valid"],[[4848,4878],"valid"],[[4879,4879],"valid"],[[4880,4880],"valid"],[[4881,4881],"disallowed"],[[4882,4885],"valid"],[[4886,4887],"disallowed"],[[4888,4894],"valid"],[[4895,4895],"valid"],[[4896,4934],"valid"],[[4935,4935],"valid"],[[4936,4954],"valid"],[[4955,4956],"disallowed"],[[4957,4958],"valid"],[[4959,4959],"valid"],[[4960,4960],"valid",[],"NV8"],[[4961,4988],"valid",[],"NV8"],[[4989,4991],"disallowed"],[[4992,5007],"valid"],[[5008,5017],"valid",[],"NV8"],[[5018,5023],"disallowed"],[[5024,5108],"valid"],[[5109,5109],"valid"],[[5110,5111],"disallowed"],[[5112,5112],"mapped",[5104]],[[5113,5113],"mapped",[5105]],[[5114,5114],"mapped",[5106]],[[5115,5115],"mapped",[5107]],[[5116,5116],"mapped",[5108]],[[5117,5117],"mapped",[5109]],[[5118,5119],"disallowed"],[[5120,5120],"valid",[],"NV8"],[[5121,5740],"valid"],[[5741,5742],"valid",[],"NV8"],[[5743,5750],"valid"],[[5751,5759],"valid"],[[5760,5760],"disallowed"],[[5761,5786],"valid"],[[5787,5788],"valid",[],"NV8"],[[5789,5791],"disallowed"],[[5792,5866],"valid"],[[5867,5872],"valid",[],"NV8"],[[5873,5880],"valid"],[[5881,5887],"disallowed"],[[5888,5900],"valid"],[[5901,5901],"disallowed"],[[5902,5908],"valid"],[[5909,5919],"disallowed"],[[5920,5940],"valid"],[[5941,5942],"valid",[],"NV8"],[[5943,5951],"disallowed"],[[5952,5971],"valid"],[[5972,5983],"disallowed"],[[5984,5996],"valid"],[[5997,5997],"disallowed"],[[5998,6000],"valid"],[[6001,6001],"disallowed"],[[6002,6003],"valid"],[[6004,6015],"disallowed"],[[6016,6067],"valid"],[[6068,6069],"disallowed"],[[6070,6099],"valid"],[[6100,6102],"valid",[],"NV8"],[[6103,6103],"valid"],[[6104,6107],"valid",[],"NV8"],[[6108,6108],"valid"],[[6109,6109],"valid"],[[6110,6111],"disallowed"],[[6112,6121],"valid"],[[6122,6127],"disallowed"],[[6128,6137],"valid",[],"NV8"],[[6138,6143],"disallowed"],[[6144,6149],"valid",[],"NV8"],[[6150,6150],"disallowed"],[[6151,6154],"valid",[],"NV8"],[[6155,6157],"ignored"],[[6158,6158],"disallowed"],[[6159,6159],"disallowed"],[[6160,6169],"valid"],[[6170,6175],"disallowed"],[[6176,6263],"valid"],[[6264,6271],"disallowed"],[[6272,6313],"valid"],[[6314,6314],"valid"],[[6315,6319],"disallowed"],[[6320,6389],"valid"],[[6390,6399],"disallowed"],[[6400,6428],"valid"],[[6429,6430],"valid"],[[6431,6431],"disallowed"],[[6432,6443],"valid"],[[6444,6447],"disallowed"],[[6448,6459],"valid"],[[6460,6463],"disallowed"],[[6464,6464],"valid",[],"NV8"],[[6465,6467],"disallowed"],[[6468,6469],"valid",[],"NV8"],[[6470,6509],"valid"],[[6510,6511],"disallowed"],[[6512,6516],"valid"],[[6517,6527],"disallowed"],[[6528,6569],"valid"],[[6570,6571],"valid"],[[6572,6575],"disallowed"],[[6576,6601],"valid"],[[6602,6607],"disallowed"],[[6608,6617],"valid"],[[6618,6618],"valid",[],"XV8"],[[6619,6621],"disallowed"],[[6622,6623],"valid",[],"NV8"],[[6624,6655],"valid",[],"NV8"],[[6656,6683],"valid"],[[6684,6685],"disallowed"],[[6686,6687],"valid",[],"NV8"],[[6688,6750],"valid"],[[6751,6751],"disallowed"],[[6752,6780],"valid"],[[6781,6782],"disallowed"],[[6783,6793],"valid"],[[6794,6799],"disallowed"],[[6800,6809],"valid"],[[6810,6815],"disallowed"],[[6816,6822],"valid",[],"NV8"],[[6823,6823],"valid"],[[6824,6829],"valid",[],"NV8"],[[6830,6831],"disallowed"],[[6832,6845],"valid"],[[6846,6846],"valid",[],"NV8"],[[6847,6911],"disallowed"],[[6912,6987],"valid"],[[6988,6991],"disallowed"],[[6992,7001],"valid"],[[7002,7018],"valid",[],"NV8"],[[7019,7027],"valid"],[[7028,7036],"valid",[],"NV8"],[[7037,7039],"disallowed"],[[7040,7082],"valid"],[[7083,7085],"valid"],[[7086,7097],"valid"],[[7098,7103],"valid"],[[7104,7155],"valid"],[[7156,7163],"disallowed"],[[7164,7167],"valid",[],"NV8"],[[7168,7223],"valid"],[[7224,7226],"disallowed"],[[7227,7231],"valid",[],"NV8"],[[7232,7241],"valid"],[[7242,7244],"disallowed"],[[7245,7293],"valid"],[[7294,7295],"valid",[],"NV8"],[[7296,7359],"disallowed"],[[7360,7367],"valid",[],"NV8"],[[7368,7375],"disallowed"],[[7376,7378],"valid"],[[7379,7379],"valid",[],"NV8"],[[7380,7410],"valid"],[[7411,7414],"valid"],[[7415,7415],"disallowed"],[[7416,7417],"valid"],[[7418,7423],"disallowed"],[[7424,7467],"valid"],[[7468,7468],"mapped",[97]],[[7469,7469],"mapped",[230]],[[7470,7470],"mapped",[98]],[[7471,7471],"valid"],[[7472,7472],"mapped",[100]],[[7473,7473],"mapped",[101]],[[7474,7474],"mapped",[477]],[[7475,7475],"mapped",[103]],[[7476,7476],"mapped",[104]],[[7477,7477],"mapped",[105]],[[7478,7478],"mapped",[106]],[[7479,7479],"mapped",[107]],[[7480,7480],"mapped",[108]],[[7481,7481],"mapped",[109]],[[7482,7482],"mapped",[110]],[[7483,7483],"valid"],[[7484,7484],"mapped",[111]],[[7485,7485],"mapped",[547]],[[7486,7486],"mapped",[112]],[[7487,7487],"mapped",[114]],[[7488,7488],"mapped",[116]],[[7489,7489],"mapped",[117]],[[7490,7490],"mapped",[119]],[[7491,7491],"mapped",[97]],[[7492,7492],"mapped",[592]],[[7493,7493],"mapped",[593]],[[7494,7494],"mapped",[7426]],[[7495,7495],"mapped",[98]],[[7496,7496],"mapped",[100]],[[7497,7497],"mapped",[101]],[[7498,7498],"mapped",[601]],[[7499,7499],"mapped",[603]],[[7500,7500],"mapped",[604]],[[7501,7501],"mapped",[103]],[[7502,7502],"valid"],[[7503,7503],"mapped",[107]],[[7504,7504],"mapped",[109]],[[7505,7505],"mapped",[331]],[[7506,7506],"mapped",[111]],[[7507,7507],"mapped",[596]],[[7508,7508],"mapped",[7446]],[[7509,7509],"mapped",[7447]],[[7510,7510],"mapped",[112]],[[7511,7511],"mapped",[116]],[[7512,7512],"mapped",[117]],[[7513,7513],"mapped",[7453]],[[7514,7514],"mapped",[623]],[[7515,7515],"mapped",[118]],[[7516,7516],"mapped",[7461]],[[7517,7517],"mapped",[946]],[[7518,7518],"mapped",[947]],[[7519,7519],"mapped",[948]],[[7520,7520],"mapped",[966]],[[7521,7521],"mapped",[967]],[[7522,7522],"mapped",[105]],[[7523,7523],"mapped",[114]],[[7524,7524],"mapped",[117]],[[7525,7525],"mapped",[118]],[[7526,7526],"mapped",[946]],[[7527,7527],"mapped",[947]],[[7528,7528],"mapped",[961]],[[7529,7529],"mapped",[966]],[[7530,7530],"mapped",[967]],[[7531,7531],"valid"],[[7532,7543],"valid"],[[7544,7544],"mapped",[1085]],[[7545,7578],"valid"],[[7579,7579],"mapped",[594]],[[7580,7580],"mapped",[99]],[[7581,7581],"mapped",[597]],[[7582,7582],"mapped",[240]],[[7583,7583],"mapped",[604]],[[7584,7584],"mapped",[102]],[[7585,7585],"mapped",[607]],[[7586,7586],"mapped",[609]],[[7587,7587],"mapped",[613]],[[7588,7588],"mapped",[616]],[[7589,7589],"mapped",[617]],[[7590,7590],"mapped",[618]],[[7591,7591],"mapped",[7547]],[[7592,7592],"mapped",[669]],[[7593,7593],"mapped",[621]],[[7594,7594],"mapped",[7557]],[[7595,7595],"mapped",[671]],[[7596,7596],"mapped",[625]],[[7597,7597],"mapped",[624]],[[7598,7598],"mapped",[626]],[[7599,7599],"mapped",[627]],[[7600,7600],"mapped",[628]],[[7601,7601],"mapped",[629]],[[7602,7602],"mapped",[632]],[[7603,7603],"mapped",[642]],[[7604,7604],"mapped",[643]],[[7605,7605],"mapped",[427]],[[7606,7606],"mapped",[649]],[[7607,7607],"mapped",[650]],[[7608,7608],"mapped",[7452]],[[7609,7609],"mapped",[651]],[[7610,7610],"mapped",[652]],[[7611,7611],"mapped",[122]],[[7612,7612],"mapped",[656]],[[7613,7613],"mapped",[657]],[[7614,7614],"mapped",[658]],[[7615,7615],"mapped",[952]],[[7616,7619],"valid"],[[7620,7626],"valid"],[[7627,7654],"valid"],[[7655,7669],"valid"],[[7670,7675],"disallowed"],[[7676,7676],"valid"],[[7677,7677],"valid"],[[7678,7679],"valid"],[[7680,7680],"mapped",[7681]],[[7681,7681],"valid"],[[7682,7682],"mapped",[7683]],[[7683,7683],"valid"],[[7684,7684],"mapped",[7685]],[[7685,7685],"valid"],[[7686,7686],"mapped",[7687]],[[7687,7687],"valid"],[[7688,7688],"mapped",[7689]],[[7689,7689],"valid"],[[7690,7690],"mapped",[7691]],[[7691,7691],"valid"],[[7692,7692],"mapped",[7693]],[[7693,7693],"valid"],[[7694,7694],"mapped",[7695]],[[7695,7695],"valid"],[[7696,7696],"mapped",[7697]],[[7697,7697],"valid"],[[7698,7698],"mapped",[7699]],[[7699,7699],"valid"],[[7700,7700],"mapped",[7701]],[[7701,7701],"valid"],[[7702,7702],"mapped",[7703]],[[7703,7703],"valid"],[[7704,7704],"mapped",[7705]],[[7705,7705],"valid"],[[7706,7706],"mapped",[7707]],[[7707,7707],"valid"],[[7708,7708],"mapped",[7709]],[[7709,7709],"valid"],[[7710,7710],"mapped",[7711]],[[7711,7711],"valid"],[[7712,7712],"mapped",[7713]],[[7713,7713],"valid"],[[7714,7714],"mapped",[7715]],[[7715,7715],"valid"],[[7716,7716],"mapped",[7717]],[[7717,7717],"valid"],[[7718,7718],"mapped",[7719]],[[7719,7719],"valid"],[[7720,7720],"mapped",[7721]],[[7721,7721],"valid"],[[7722,7722],"mapped",[7723]],[[7723,7723],"valid"],[[7724,7724],"mapped",[7725]],[[7725,7725],"valid"],[[7726,7726],"mapped",[7727]],[[7727,7727],"valid"],[[7728,7728],"mapped",[7729]],[[7729,7729],"valid"],[[7730,7730],"mapped",[7731]],[[7731,7731],"valid"],[[7732,7732],"mapped",[7733]],[[7733,7733],"valid"],[[7734,7734],"mapped",[7735]],[[7735,7735],"valid"],[[7736,7736],"mapped",[7737]],[[7737,7737],"valid"],[[7738,7738],"mapped",[7739]],[[7739,7739],"valid"],[[7740,7740],"mapped",[7741]],[[7741,7741],"valid"],[[7742,7742],"mapped",[7743]],[[7743,7743],"valid"],[[7744,7744],"mapped",[7745]],[[7745,7745],"valid"],[[7746,7746],"mapped",[7747]],[[7747,7747],"valid"],[[7748,7748],"mapped",[7749]],[[7749,7749],"valid"],[[7750,7750],"mapped",[7751]],[[7751,7751],"valid"],[[7752,7752],"mapped",[7753]],[[7753,7753],"valid"],[[7754,7754],"mapped",[7755]],[[7755,7755],"valid"],[[7756,7756],"mapped",[7757]],[[7757,7757],"valid"],[[7758,7758],"mapped",[7759]],[[7759,7759],"valid"],[[7760,7760],"mapped",[7761]],[[7761,7761],"valid"],[[7762,7762],"mapped",[7763]],[[7763,7763],"valid"],[[7764,7764],"mapped",[7765]],[[7765,7765],"valid"],[[7766,7766],"mapped",[7767]],[[7767,7767],"valid"],[[7768,7768],"mapped",[7769]],[[7769,7769],"valid"],[[7770,7770],"mapped",[7771]],[[7771,7771],"valid"],[[7772,7772],"mapped",[7773]],[[7773,7773],"valid"],[[7774,7774],"mapped",[7775]],[[7775,7775],"valid"],[[7776,7776],"mapped",[7777]],[[7777,7777],"valid"],[[7778,7778],"mapped",[7779]],[[7779,7779],"valid"],[[7780,7780],"mapped",[7781]],[[7781,7781],"valid"],[[7782,7782],"mapped",[7783]],[[7783,7783],"valid"],[[7784,7784],"mapped",[7785]],[[7785,7785],"valid"],[[7786,7786],"mapped",[7787]],[[7787,7787],"valid"],[[7788,7788],"mapped",[7789]],[[7789,7789],"valid"],[[7790,7790],"mapped",[7791]],[[7791,7791],"valid"],[[7792,7792],"mapped",[7793]],[[7793,7793],"valid"],[[7794,7794],"mapped",[7795]],[[7795,7795],"valid"],[[7796,7796],"mapped",[7797]],[[7797,7797],"valid"],[[7798,7798],"mapped",[7799]],[[7799,7799],"valid"],[[7800,7800],"mapped",[7801]],[[7801,7801],"valid"],[[7802,7802],"mapped",[7803]],[[7803,7803],"valid"],[[7804,7804],"mapped",[7805]],[[7805,7805],"valid"],[[7806,7806],"mapped",[7807]],[[7807,7807],"valid"],[[7808,7808],"mapped",[7809]],[[7809,7809],"valid"],[[7810,7810],"mapped",[7811]],[[7811,7811],"valid"],[[7812,7812],"mapped",[7813]],[[7813,7813],"valid"],[[7814,7814],"mapped",[7815]],[[7815,7815],"valid"],[[7816,7816],"mapped",[7817]],[[7817,7817],"valid"],[[7818,7818],"mapped",[7819]],[[7819,7819],"valid"],[[7820,7820],"mapped",[7821]],[[7821,7821],"valid"],[[7822,7822],"mapped",[7823]],[[7823,7823],"valid"],[[7824,7824],"mapped",[7825]],[[7825,7825],"valid"],[[7826,7826],"mapped",[7827]],[[7827,7827],"valid"],[[7828,7828],"mapped",[7829]],[[7829,7833],"valid"],[[7834,7834],"mapped",[97,702]],[[7835,7835],"mapped",[7777]],[[7836,7837],"valid"],[[7838,7838],"mapped",[115,115]],[[7839,7839],"valid"],[[7840,7840],"mapped",[7841]],[[7841,7841],"valid"],[[7842,7842],"mapped",[7843]],[[7843,7843],"valid"],[[7844,7844],"mapped",[7845]],[[7845,7845],"valid"],[[7846,7846],"mapped",[7847]],[[7847,7847],"valid"],[[7848,7848],"mapped",[7849]],[[7849,7849],"valid"],[[7850,7850],"mapped",[7851]],[[7851,7851],"valid"],[[7852,7852],"mapped",[7853]],[[7853,7853],"valid"],[[7854,7854],"mapped",[7855]],[[7855,7855],"valid"],[[7856,7856],"mapped",[7857]],[[7857,7857],"valid"],[[7858,7858],"mapped",[7859]],[[7859,7859],"valid"],[[7860,7860],"mapped",[7861]],[[7861,7861],"valid"],[[7862,7862],"mapped",[7863]],[[7863,7863],"valid"],[[7864,7864],"mapped",[7865]],[[7865,7865],"valid"],[[7866,7866],"mapped",[7867]],[[7867,7867],"valid"],[[7868,7868],"mapped",[7869]],[[7869,7869],"valid"],[[7870,7870],"mapped",[7871]],[[7871,7871],"valid"],[[7872,7872],"mapped",[7873]],[[7873,7873],"valid"],[[7874,7874],"mapped",[7875]],[[7875,7875],"valid"],[[7876,7876],"mapped",[7877]],[[7877,7877],"valid"],[[7878,7878],"mapped",[7879]],[[7879,7879],"valid"],[[7880,7880],"mapped",[7881]],[[7881,7881],"valid"],[[7882,7882],"mapped",[7883]],[[7883,7883],"valid"],[[7884,7884],"mapped",[7885]],[[7885,7885],"valid"],[[7886,7886],"mapped",[7887]],[[7887,7887],"valid"],[[7888,7888],"mapped",[7889]],[[7889,7889],"valid"],[[7890,7890],"mapped",[7891]],[[7891,7891],"valid"],[[7892,7892],"mapped",[7893]],[[7893,7893],"valid"],[[7894,7894],"mapped",[7895]],[[7895,7895],"valid"],[[7896,7896],"mapped",[7897]],[[7897,7897],"valid"],[[7898,7898],"mapped",[7899]],[[7899,7899],"valid"],[[7900,7900],"mapped",[7901]],[[7901,7901],"valid"],[[7902,7902],"mapped",[7903]],[[7903,7903],"valid"],[[7904,7904],"mapped",[7905]],[[7905,7905],"valid"],[[7906,7906],"mapped",[7907]],[[7907,7907],"valid"],[[7908,7908],"mapped",[7909]],[[7909,7909],"valid"],[[7910,7910],"mapped",[7911]],[[7911,7911],"valid"],[[7912,7912],"mapped",[7913]],[[7913,7913],"valid"],[[7914,7914],"mapped",[7915]],[[7915,7915],"valid"],[[7916,7916],"mapped",[7917]],[[7917,7917],"valid"],[[7918,7918],"mapped",[7919]],[[7919,7919],"valid"],[[7920,7920],"mapped",[7921]],[[7921,7921],"valid"],[[7922,7922],"mapped",[7923]],[[7923,7923],"valid"],[[7924,7924],"mapped",[7925]],[[7925,7925],"valid"],[[7926,7926],"mapped",[7927]],[[7927,7927],"valid"],[[7928,7928],"mapped",[7929]],[[7929,7929],"valid"],[[7930,7930],"mapped",[7931]],[[7931,7931],"valid"],[[7932,7932],"mapped",[7933]],[[7933,7933],"valid"],[[7934,7934],"mapped",[7935]],[[7935,7935],"valid"],[[7936,7943],"valid"],[[7944,7944],"mapped",[7936]],[[7945,7945],"mapped",[7937]],[[7946,7946],"mapped",[7938]],[[7947,7947],"mapped",[7939]],[[7948,7948],"mapped",[7940]],[[7949,7949],"mapped",[7941]],[[7950,7950],"mapped",[7942]],[[7951,7951],"mapped",[7943]],[[7952,7957],"valid"],[[7958,7959],"disallowed"],[[7960,7960],"mapped",[7952]],[[7961,7961],"mapped",[7953]],[[7962,7962],"mapped",[7954]],[[7963,7963],"mapped",[7955]],[[7964,7964],"mapped",[7956]],[[7965,7965],"mapped",[7957]],[[7966,7967],"disallowed"],[[7968,7975],"valid"],[[7976,7976],"mapped",[7968]],[[7977,7977],"mapped",[7969]],[[7978,7978],"mapped",[7970]],[[7979,7979],"mapped",[7971]],[[7980,7980],"mapped",[7972]],[[7981,7981],"mapped",[7973]],[[7982,7982],"mapped",[7974]],[[7983,7983],"mapped",[7975]],[[7984,7991],"valid"],[[7992,7992],"mapped",[7984]],[[7993,7993],"mapped",[7985]],[[7994,7994],"mapped",[7986]],[[7995,7995],"mapped",[7987]],[[7996,7996],"mapped",[7988]],[[7997,7997],"mapped",[7989]],[[7998,7998],"mapped",[7990]],[[7999,7999],"mapped",[7991]],[[8000,8005],"valid"],[[8006,8007],"disallowed"],[[8008,8008],"mapped",[8000]],[[8009,8009],"mapped",[8001]],[[8010,8010],"mapped",[8002]],[[8011,8011],"mapped",[8003]],[[8012,8012],"mapped",[8004]],[[8013,8013],"mapped",[8005]],[[8014,8015],"disallowed"],[[8016,8023],"valid"],[[8024,8024],"disallowed"],[[8025,8025],"mapped",[8017]],[[8026,8026],"disallowed"],[[8027,8027],"mapped",[8019]],[[8028,8028],"disallowed"],[[8029,8029],"mapped",[8021]],[[8030,8030],"disallowed"],[[8031,8031],"mapped",[8023]],[[8032,8039],"valid"],[[8040,8040],"mapped",[8032]],[[8041,8041],"mapped",[8033]],[[8042,8042],"mapped",[8034]],[[8043,8043],"mapped",[8035]],[[8044,8044],"mapped",[8036]],[[8045,8045],"mapped",[8037]],[[8046,8046],"mapped",[8038]],[[8047,8047],"mapped",[8039]],[[8048,8048],"valid"],[[8049,8049],"mapped",[940]],[[8050,8050],"valid"],[[8051,8051],"mapped",[941]],[[8052,8052],"valid"],[[8053,8053],"mapped",[942]],[[8054,8054],"valid"],[[8055,8055],"mapped",[943]],[[8056,8056],"valid"],[[8057,8057],"mapped",[972]],[[8058,8058],"valid"],[[8059,8059],"mapped",[973]],[[8060,8060],"valid"],[[8061,8061],"mapped",[974]],[[8062,8063],"disallowed"],[[8064,8064],"mapped",[7936,953]],[[8065,8065],"mapped",[7937,953]],[[8066,8066],"mapped",[7938,953]],[[8067,8067],"mapped",[7939,953]],[[8068,8068],"mapped",[7940,953]],[[8069,8069],"mapped",[7941,953]],[[8070,8070],"mapped",[7942,953]],[[8071,8071],"mapped",[7943,953]],[[8072,8072],"mapped",[7936,953]],[[8073,8073],"mapped",[7937,953]],[[8074,8074],"mapped",[7938,953]],[[8075,8075],"mapped",[7939,953]],[[8076,8076],"mapped",[7940,953]],[[8077,8077],"mapped",[7941,953]],[[8078,8078],"mapped",[7942,953]],[[8079,8079],"mapped",[7943,953]],[[8080,8080],"mapped",[7968,953]],[[8081,8081],"mapped",[7969,953]],[[8082,8082],"mapped",[7970,953]],[[8083,8083],"mapped",[7971,953]],[[8084,8084],"mapped",[7972,953]],[[8085,8085],"mapped",[7973,953]],[[8086,8086],"mapped",[7974,953]],[[8087,8087],"mapped",[7975,953]],[[8088,8088],"mapped",[7968,953]],[[8089,8089],"mapped",[7969,953]],[[8090,8090],"mapped",[7970,953]],[[8091,8091],"mapped",[7971,953]],[[8092,8092],"mapped",[7972,953]],[[8093,8093],"mapped",[7973,953]],[[8094,8094],"mapped",[7974,953]],[[8095,8095],"mapped",[7975,953]],[[8096,8096],"mapped",[8032,953]],[[8097,8097],"mapped",[8033,953]],[[8098,8098],"mapped",[8034,953]],[[8099,8099],"mapped",[8035,953]],[[8100,8100],"mapped",[8036,953]],[[8101,8101],"mapped",[8037,953]],[[8102,8102],"mapped",[8038,953]],[[8103,8103],"mapped",[8039,953]],[[8104,8104],"mapped",[8032,953]],[[8105,8105],"mapped",[8033,953]],[[8106,8106],"mapped",[8034,953]],[[8107,8107],"mapped",[8035,953]],[[8108,8108],"mapped",[8036,953]],[[8109,8109],"mapped",[8037,953]],[[8110,8110],"mapped",[8038,953]],[[8111,8111],"mapped",[8039,953]],[[8112,8113],"valid"],[[8114,8114],"mapped",[8048,953]],[[8115,8115],"mapped",[945,953]],[[8116,8116],"mapped",[940,953]],[[8117,8117],"disallowed"],[[8118,8118],"valid"],[[8119,8119],"mapped",[8118,953]],[[8120,8120],"mapped",[8112]],[[8121,8121],"mapped",[8113]],[[8122,8122],"mapped",[8048]],[[8123,8123],"mapped",[940]],[[8124,8124],"mapped",[945,953]],[[8125,8125],"disallowed_STD3_mapped",[32,787]],[[8126,8126],"mapped",[953]],[[8127,8127],"disallowed_STD3_mapped",[32,787]],[[8128,8128],"disallowed_STD3_mapped",[32,834]],[[8129,8129],"disallowed_STD3_mapped",[32,776,834]],[[8130,8130],"mapped",[8052,953]],[[8131,8131],"mapped",[951,953]],[[8132,8132],"mapped",[942,953]],[[8133,8133],"disallowed"],[[8134,8134],"valid"],[[8135,8135],"mapped",[8134,953]],[[8136,8136],"mapped",[8050]],[[8137,8137],"mapped",[941]],[[8138,8138],"mapped",[8052]],[[8139,8139],"mapped",[942]],[[8140,8140],"mapped",[951,953]],[[8141,8141],"disallowed_STD3_mapped",[32,787,768]],[[8142,8142],"disallowed_STD3_mapped",[32,787,769]],[[8143,8143],"disallowed_STD3_mapped",[32,787,834]],[[8144,8146],"valid"],[[8147,8147],"mapped",[912]],[[8148,8149],"disallowed"],[[8150,8151],"valid"],[[8152,8152],"mapped",[8144]],[[8153,8153],"mapped",[8145]],[[8154,8154],"mapped",[8054]],[[8155,8155],"mapped",[943]],[[8156,8156],"disallowed"],[[8157,8157],"disallowed_STD3_mapped",[32,788,768]],[[8158,8158],"disallowed_STD3_mapped",[32,788,769]],[[8159,8159],"disallowed_STD3_mapped",[32,788,834]],[[8160,8162],"valid"],[[8163,8163],"mapped",[944]],[[8164,8167],"valid"],[[8168,8168],"mapped",[8160]],[[8169,8169],"mapped",[8161]],[[8170,8170],"mapped",[8058]],[[8171,8171],"mapped",[973]],[[8172,8172],"mapped",[8165]],[[8173,8173],"disallowed_STD3_mapped",[32,776,768]],[[8174,8174],"disallowed_STD3_mapped",[32,776,769]],[[8175,8175],"disallowed_STD3_mapped",[96]],[[8176,8177],"disallowed"],[[8178,8178],"mapped",[8060,953]],[[8179,8179],"mapped",[969,953]],[[8180,8180],"mapped",[974,953]],[[8181,8181],"disallowed"],[[8182,8182],"valid"],[[8183,8183],"mapped",[8182,953]],[[8184,8184],"mapped",[8056]],[[8185,8185],"mapped",[972]],[[8186,8186],"mapped",[8060]],[[8187,8187],"mapped",[974]],[[8188,8188],"mapped",[969,953]],[[8189,8189],"disallowed_STD3_mapped",[32,769]],[[8190,8190],"disallowed_STD3_mapped",[32,788]],[[8191,8191],"disallowed"],[[8192,8202],"disallowed_STD3_mapped",[32]],[[8203,8203],"ignored"],[[8204,8205],"deviation",[]],[[8206,8207],"disallowed"],[[8208,8208],"valid",[],"NV8"],[[8209,8209],"mapped",[8208]],[[8210,8214],"valid",[],"NV8"],[[8215,8215],"disallowed_STD3_mapped",[32,819]],[[8216,8227],"valid",[],"NV8"],[[8228,8230],"disallowed"],[[8231,8231],"valid",[],"NV8"],[[8232,8238],"disallowed"],[[8239,8239],"disallowed_STD3_mapped",[32]],[[8240,8242],"valid",[],"NV8"],[[8243,8243],"mapped",[8242,8242]],[[8244,8244],"mapped",[8242,8242,8242]],[[8245,8245],"valid",[],"NV8"],[[8246,8246],"mapped",[8245,8245]],[[8247,8247],"mapped",[8245,8245,8245]],[[8248,8251],"valid",[],"NV8"],[[8252,8252],"disallowed_STD3_mapped",[33,33]],[[8253,8253],"valid",[],"NV8"],[[8254,8254],"disallowed_STD3_mapped",[32,773]],[[8255,8262],"valid",[],"NV8"],[[8263,8263],"disallowed_STD3_mapped",[63,63]],[[8264,8264],"disallowed_STD3_mapped",[63,33]],[[8265,8265],"disallowed_STD3_mapped",[33,63]],[[8266,8269],"valid",[],"NV8"],[[8270,8274],"valid",[],"NV8"],[[8275,8276],"valid",[],"NV8"],[[8277,8278],"valid",[],"NV8"],[[8279,8279],"mapped",[8242,8242,8242,8242]],[[8280,8286],"valid",[],"NV8"],[[8287,8287],"disallowed_STD3_mapped",[32]],[[8288,8288],"ignored"],[[8289,8291],"disallowed"],[[8292,8292],"ignored"],[[8293,8293],"disallowed"],[[8294,8297],"disallowed"],[[8298,8303],"disallowed"],[[8304,8304],"mapped",[48]],[[8305,8305],"mapped",[105]],[[8306,8307],"disallowed"],[[8308,8308],"mapped",[52]],[[8309,8309],"mapped",[53]],[[8310,8310],"mapped",[54]],[[8311,8311],"mapped",[55]],[[8312,8312],"mapped",[56]],[[8313,8313],"mapped",[57]],[[8314,8314],"disallowed_STD3_mapped",[43]],[[8315,8315],"mapped",[8722]],[[8316,8316],"disallowed_STD3_mapped",[61]],[[8317,8317],"disallowed_STD3_mapped",[40]],[[8318,8318],"disallowed_STD3_mapped",[41]],[[8319,8319],"mapped",[110]],[[8320,8320],"mapped",[48]],[[8321,8321],"mapped",[49]],[[8322,8322],"mapped",[50]],[[8323,8323],"mapped",[51]],[[8324,8324],"mapped",[52]],[[8325,8325],"mapped",[53]],[[8326,8326],"mapped",[54]],[[8327,8327],"mapped",[55]],[[8328,8328],"mapped",[56]],[[8329,8329],"mapped",[57]],[[8330,8330],"disallowed_STD3_mapped",[43]],[[8331,8331],"mapped",[8722]],[[8332,8332],"disallowed_STD3_mapped",[61]],[[8333,8333],"disallowed_STD3_mapped",[40]],[[8334,8334],"disallowed_STD3_mapped",[41]],[[8335,8335],"disallowed"],[[8336,8336],"mapped",[97]],[[8337,8337],"mapped",[101]],[[8338,8338],"mapped",[111]],[[8339,8339],"mapped",[120]],[[8340,8340],"mapped",[601]],[[8341,8341],"mapped",[104]],[[8342,8342],"mapped",[107]],[[8343,8343],"mapped",[108]],[[8344,8344],"mapped",[109]],[[8345,8345],"mapped",[110]],[[8346,8346],"mapped",[112]],[[8347,8347],"mapped",[115]],[[8348,8348],"mapped",[116]],[[8349,8351],"disallowed"],[[8352,8359],"valid",[],"NV8"],[[8360,8360],"mapped",[114,115]],[[8361,8362],"valid",[],"NV8"],[[8363,8363],"valid",[],"NV8"],[[8364,8364],"valid",[],"NV8"],[[8365,8367],"valid",[],"NV8"],[[8368,8369],"valid",[],"NV8"],[[8370,8373],"valid",[],"NV8"],[[8374,8376],"valid",[],"NV8"],[[8377,8377],"valid",[],"NV8"],[[8378,8378],"valid",[],"NV8"],[[8379,8381],"valid",[],"NV8"],[[8382,8382],"valid",[],"NV8"],[[8383,8399],"disallowed"],[[8400,8417],"valid",[],"NV8"],[[8418,8419],"valid",[],"NV8"],[[8420,8426],"valid",[],"NV8"],[[8427,8427],"valid",[],"NV8"],[[8428,8431],"valid",[],"NV8"],[[8432,8432],"valid",[],"NV8"],[[8433,8447],"disallowed"],[[8448,8448],"disallowed_STD3_mapped",[97,47,99]],[[8449,8449],"disallowed_STD3_mapped",[97,47,115]],[[8450,8450],"mapped",[99]],[[8451,8451],"mapped",[176,99]],[[8452,8452],"valid",[],"NV8"],[[8453,8453],"disallowed_STD3_mapped",[99,47,111]],[[8454,8454],"disallowed_STD3_mapped",[99,47,117]],[[8455,8455],"mapped",[603]],[[8456,8456],"valid",[],"NV8"],[[8457,8457],"mapped",[176,102]],[[8458,8458],"mapped",[103]],[[8459,8462],"mapped",[104]],[[8463,8463],"mapped",[295]],[[8464,8465],"mapped",[105]],[[8466,8467],"mapped",[108]],[[8468,8468],"valid",[],"NV8"],[[8469,8469],"mapped",[110]],[[8470,8470],"mapped",[110,111]],[[8471,8472],"valid",[],"NV8"],[[8473,8473],"mapped",[112]],[[8474,8474],"mapped",[113]],[[8475,8477],"mapped",[114]],[[8478,8479],"valid",[],"NV8"],[[8480,8480],"mapped",[115,109]],[[8481,8481],"mapped",[116,101,108]],[[8482,8482],"mapped",[116,109]],[[8483,8483],"valid",[],"NV8"],[[8484,8484],"mapped",[122]],[[8485,8485],"valid",[],"NV8"],[[8486,8486],"mapped",[969]],[[8487,8487],"valid",[],"NV8"],[[8488,8488],"mapped",[122]],[[8489,8489],"valid",[],"NV8"],[[8490,8490],"mapped",[107]],[[8491,8491],"mapped",[229]],[[8492,8492],"mapped",[98]],[[8493,8493],"mapped",[99]],[[8494,8494],"valid",[],"NV8"],[[8495,8496],"mapped",[101]],[[8497,8497],"mapped",[102]],[[8498,8498],"disallowed"],[[8499,8499],"mapped",[109]],[[8500,8500],"mapped",[111]],[[8501,8501],"mapped",[1488]],[[8502,8502],"mapped",[1489]],[[8503,8503],"mapped",[1490]],[[8504,8504],"mapped",[1491]],[[8505,8505],"mapped",[105]],[[8506,8506],"valid",[],"NV8"],[[8507,8507],"mapped",[102,97,120]],[[8508,8508],"mapped",[960]],[[8509,8510],"mapped",[947]],[[8511,8511],"mapped",[960]],[[8512,8512],"mapped",[8721]],[[8513,8516],"valid",[],"NV8"],[[8517,8518],"mapped",[100]],[[8519,8519],"mapped",[101]],[[8520,8520],"mapped",[105]],[[8521,8521],"mapped",[106]],[[8522,8523],"valid",[],"NV8"],[[8524,8524],"valid",[],"NV8"],[[8525,8525],"valid",[],"NV8"],[[8526,8526],"valid"],[[8527,8527],"valid",[],"NV8"],[[8528,8528],"mapped",[49,8260,55]],[[8529,8529],"mapped",[49,8260,57]],[[8530,8530],"mapped",[49,8260,49,48]],[[8531,8531],"mapped",[49,8260,51]],[[8532,8532],"mapped",[50,8260,51]],[[8533,8533],"mapped",[49,8260,53]],[[8534,8534],"mapped",[50,8260,53]],[[8535,8535],"mapped",[51,8260,53]],[[8536,8536],"mapped",[52,8260,53]],[[8537,8537],"mapped",[49,8260,54]],[[8538,8538],"mapped",[53,8260,54]],[[8539,8539],"mapped",[49,8260,56]],[[8540,8540],"mapped",[51,8260,56]],[[8541,8541],"mapped",[53,8260,56]],[[8542,8542],"mapped",[55,8260,56]],[[8543,8543],"mapped",[49,8260]],[[8544,8544],"mapped",[105]],[[8545,8545],"mapped",[105,105]],[[8546,8546],"mapped",[105,105,105]],[[8547,8547],"mapped",[105,118]],[[8548,8548],"mapped",[118]],[[8549,8549],"mapped",[118,105]],[[8550,8550],"mapped",[118,105,105]],[[8551,8551],"mapped",[118,105,105,105]],[[8552,8552],"mapped",[105,120]],[[8553,8553],"mapped",[120]],[[8554,8554],"mapped",[120,105]],[[8555,8555],"mapped",[120,105,105]],[[8556,8556],"mapped",[108]],[[8557,8557],"mapped",[99]],[[8558,8558],"mapped",[100]],[[8559,8559],"mapped",[109]],[[8560,8560],"mapped",[105]],[[8561,8561],"mapped",[105,105]],[[8562,8562],"mapped",[105,105,105]],[[8563,8563],"mapped",[105,118]],[[8564,8564],"mapped",[118]],[[8565,8565],"mapped",[118,105]],[[8566,8566],"mapped",[118,105,105]],[[8567,8567],"mapped",[118,105,105,105]],[[8568,8568],"mapped",[105,120]],[[8569,8569],"mapped",[120]],[[8570,8570],"mapped",[120,105]],[[8571,8571],"mapped",[120,105,105]],[[8572,8572],"mapped",[108]],[[8573,8573],"mapped",[99]],[[8574,8574],"mapped",[100]],[[8575,8575],"mapped",[109]],[[8576,8578],"valid",[],"NV8"],[[8579,8579],"disallowed"],[[8580,8580],"valid"],[[8581,8584],"valid",[],"NV8"],[[8585,8585],"mapped",[48,8260,51]],[[8586,8587],"valid",[],"NV8"],[[8588,8591],"disallowed"],[[8592,8682],"valid",[],"NV8"],[[8683,8691],"valid",[],"NV8"],[[8692,8703],"valid",[],"NV8"],[[8704,8747],"valid",[],"NV8"],[[8748,8748],"mapped",[8747,8747]],[[8749,8749],"mapped",[8747,8747,8747]],[[8750,8750],"valid",[],"NV8"],[[8751,8751],"mapped",[8750,8750]],[[8752,8752],"mapped",[8750,8750,8750]],[[8753,8799],"valid",[],"NV8"],[[8800,8800],"disallowed_STD3_valid"],[[8801,8813],"valid",[],"NV8"],[[8814,8815],"disallowed_STD3_valid"],[[8816,8945],"valid",[],"NV8"],[[8946,8959],"valid",[],"NV8"],[[8960,8960],"valid",[],"NV8"],[[8961,8961],"valid",[],"NV8"],[[8962,9000],"valid",[],"NV8"],[[9001,9001],"mapped",[12296]],[[9002,9002],"mapped",[12297]],[[9003,9082],"valid",[],"NV8"],[[9083,9083],"valid",[],"NV8"],[[9084,9084],"valid",[],"NV8"],[[9085,9114],"valid",[],"NV8"],[[9115,9166],"valid",[],"NV8"],[[9167,9168],"valid",[],"NV8"],[[9169,9179],"valid",[],"NV8"],[[9180,9191],"valid",[],"NV8"],[[9192,9192],"valid",[],"NV8"],[[9193,9203],"valid",[],"NV8"],[[9204,9210],"valid",[],"NV8"],[[9211,9215],"disallowed"],[[9216,9252],"valid",[],"NV8"],[[9253,9254],"valid",[],"NV8"],[[9255,9279],"disallowed"],[[9280,9290],"valid",[],"NV8"],[[9291,9311],"disallowed"],[[9312,9312],"mapped",[49]],[[9313,9313],"mapped",[50]],[[9314,9314],"mapped",[51]],[[9315,9315],"mapped",[52]],[[9316,9316],"mapped",[53]],[[9317,9317],"mapped",[54]],[[9318,9318],"mapped",[55]],[[9319,9319],"mapped",[56]],[[9320,9320],"mapped",[57]],[[9321,9321],"mapped",[49,48]],[[9322,9322],"mapped",[49,49]],[[9323,9323],"mapped",[49,50]],[[9324,9324],"mapped",[49,51]],[[9325,9325],"mapped",[49,52]],[[9326,9326],"mapped",[49,53]],[[9327,9327],"mapped",[49,54]],[[9328,9328],"mapped",[49,55]],[[9329,9329],"mapped",[49,56]],[[9330,9330],"mapped",[49,57]],[[9331,9331],"mapped",[50,48]],[[9332,9332],"disallowed_STD3_mapped",[40,49,41]],[[9333,9333],"disallowed_STD3_mapped",[40,50,41]],[[9334,9334],"disallowed_STD3_mapped",[40,51,41]],[[9335,9335],"disallowed_STD3_mapped",[40,52,41]],[[9336,9336],"disallowed_STD3_mapped",[40,53,41]],[[9337,9337],"disallowed_STD3_mapped",[40,54,41]],[[9338,9338],"disallowed_STD3_mapped",[40,55,41]],[[9339,9339],"disallowed_STD3_mapped",[40,56,41]],[[9340,9340],"disallowed_STD3_mapped",[40,57,41]],[[9341,9341],"disallowed_STD3_mapped",[40,49,48,41]],[[9342,9342],"disallowed_STD3_mapped",[40,49,49,41]],[[9343,9343],"disallowed_STD3_mapped",[40,49,50,41]],[[9344,9344],"disallowed_STD3_mapped",[40,49,51,41]],[[9345,9345],"disallowed_STD3_mapped",[40,49,52,41]],[[9346,9346],"disallowed_STD3_mapped",[40,49,53,41]],[[9347,9347],"disallowed_STD3_mapped",[40,49,54,41]],[[9348,9348],"disallowed_STD3_mapped",[40,49,55,41]],[[9349,9349],"disallowed_STD3_mapped",[40,49,56,41]],[[9350,9350],"disallowed_STD3_mapped",[40,49,57,41]],[[9351,9351],"disallowed_STD3_mapped",[40,50,48,41]],[[9352,9371],"disallowed"],[[9372,9372],"disallowed_STD3_mapped",[40,97,41]],[[9373,9373],"disallowed_STD3_mapped",[40,98,41]],[[9374,9374],"disallowed_STD3_mapped",[40,99,41]],[[9375,9375],"disallowed_STD3_mapped",[40,100,41]],[[9376,9376],"disallowed_STD3_mapped",[40,101,41]],[[9377,9377],"disallowed_STD3_mapped",[40,102,41]],[[9378,9378],"disallowed_STD3_mapped",[40,103,41]],[[9379,9379],"disallowed_STD3_mapped",[40,104,41]],[[9380,9380],"disallowed_STD3_mapped",[40,105,41]],[[9381,9381],"disallowed_STD3_mapped",[40,106,41]],[[9382,9382],"disallowed_STD3_mapped",[40,107,41]],[[9383,9383],"disallowed_STD3_mapped",[40,108,41]],[[9384,9384],"disallowed_STD3_mapped",[40,109,41]],[[9385,9385],"disallowed_STD3_mapped",[40,110,41]],[[9386,9386],"disallowed_STD3_mapped",[40,111,41]],[[9387,9387],"disallowed_STD3_mapped",[40,112,41]],[[9388,9388],"disallowed_STD3_mapped",[40,113,41]],[[9389,9389],"disallowed_STD3_mapped",[40,114,41]],[[9390,9390],"disallowed_STD3_mapped",[40,115,41]],[[9391,9391],"disallowed_STD3_mapped",[40,116,41]],[[9392,9392],"disallowed_STD3_mapped",[40,117,41]],[[9393,9393],"disallowed_STD3_mapped",[40,118,41]],[[9394,9394],"disallowed_STD3_mapped",[40,119,41]],[[9395,9395],"disallowed_STD3_mapped",[40,120,41]],[[9396,9396],"disallowed_STD3_mapped",[40,121,41]],[[9397,9397],"disallowed_STD3_mapped",[40,122,41]],[[9398,9398],"mapped",[97]],[[9399,9399],"mapped",[98]],[[9400,9400],"mapped",[99]],[[9401,9401],"mapped",[100]],[[9402,9402],"mapped",[101]],[[9403,9403],"mapped",[102]],[[9404,9404],"mapped",[103]],[[9405,9405],"mapped",[104]],[[9406,9406],"mapped",[105]],[[9407,9407],"mapped",[106]],[[9408,9408],"mapped",[107]],[[9409,9409],"mapped",[108]],[[9410,9410],"mapped",[109]],[[9411,9411],"mapped",[110]],[[9412,9412],"mapped",[111]],[[9413,9413],"mapped",[112]],[[9414,9414],"mapped",[113]],[[9415,9415],"mapped",[114]],[[9416,9416],"mapped",[115]],[[9417,9417],"mapped",[116]],[[9418,9418],"mapped",[117]],[[9419,9419],"mapped",[118]],[[9420,9420],"mapped",[119]],[[9421,9421],"mapped",[120]],[[9422,9422],"mapped",[121]],[[9423,9423],"mapped",[122]],[[9424,9424],"mapped",[97]],[[9425,9425],"mapped",[98]],[[9426,9426],"mapped",[99]],[[9427,9427],"mapped",[100]],[[9428,9428],"mapped",[101]],[[9429,9429],"mapped",[102]],[[9430,9430],"mapped",[103]],[[9431,9431],"mapped",[104]],[[9432,9432],"mapped",[105]],[[9433,9433],"mapped",[106]],[[9434,9434],"mapped",[107]],[[9435,9435],"mapped",[108]],[[9436,9436],"mapped",[109]],[[9437,9437],"mapped",[110]],[[9438,9438],"mapped",[111]],[[9439,9439],"mapped",[112]],[[9440,9440],"mapped",[113]],[[9441,9441],"mapped",[114]],[[9442,9442],"mapped",[115]],[[9443,9443],"mapped",[116]],[[9444,9444],"mapped",[117]],[[9445,9445],"mapped",[118]],[[9446,9446],"mapped",[119]],[[9447,9447],"mapped",[120]],[[9448,9448],"mapped",[121]],[[9449,9449],"mapped",[122]],[[9450,9450],"mapped",[48]],[[9451,9470],"valid",[],"NV8"],[[9471,9471],"valid",[],"NV8"],[[9472,9621],"valid",[],"NV8"],[[9622,9631],"valid",[],"NV8"],[[9632,9711],"valid",[],"NV8"],[[9712,9719],"valid",[],"NV8"],[[9720,9727],"valid",[],"NV8"],[[9728,9747],"valid",[],"NV8"],[[9748,9749],"valid",[],"NV8"],[[9750,9751],"valid",[],"NV8"],[[9752,9752],"valid",[],"NV8"],[[9753,9753],"valid",[],"NV8"],[[9754,9839],"valid",[],"NV8"],[[9840,9841],"valid",[],"NV8"],[[9842,9853],"valid",[],"NV8"],[[9854,9855],"valid",[],"NV8"],[[9856,9865],"valid",[],"NV8"],[[9866,9873],"valid",[],"NV8"],[[9874,9884],"valid",[],"NV8"],[[9885,9885],"valid",[],"NV8"],[[9886,9887],"valid",[],"NV8"],[[9888,9889],"valid",[],"NV8"],[[9890,9905],"valid",[],"NV8"],[[9906,9906],"valid",[],"NV8"],[[9907,9916],"valid",[],"NV8"],[[9917,9919],"valid",[],"NV8"],[[9920,9923],"valid",[],"NV8"],[[9924,9933],"valid",[],"NV8"],[[9934,9934],"valid",[],"NV8"],[[9935,9953],"valid",[],"NV8"],[[9954,9954],"valid",[],"NV8"],[[9955,9955],"valid",[],"NV8"],[[9956,9959],"valid",[],"NV8"],[[9960,9983],"valid",[],"NV8"],[[9984,9984],"valid",[],"NV8"],[[9985,9988],"valid",[],"NV8"],[[9989,9989],"valid",[],"NV8"],[[9990,9993],"valid",[],"NV8"],[[9994,9995],"valid",[],"NV8"],[[9996,10023],"valid",[],"NV8"],[[10024,10024],"valid",[],"NV8"],[[10025,10059],"valid",[],"NV8"],[[10060,10060],"valid",[],"NV8"],[[10061,10061],"valid",[],"NV8"],[[10062,10062],"valid",[],"NV8"],[[10063,10066],"valid",[],"NV8"],[[10067,10069],"valid",[],"NV8"],[[10070,10070],"valid",[],"NV8"],[[10071,10071],"valid",[],"NV8"],[[10072,10078],"valid",[],"NV8"],[[10079,10080],"valid",[],"NV8"],[[10081,10087],"valid",[],"NV8"],[[10088,10101],"valid",[],"NV8"],[[10102,10132],"valid",[],"NV8"],[[10133,10135],"valid",[],"NV8"],[[10136,10159],"valid",[],"NV8"],[[10160,10160],"valid",[],"NV8"],[[10161,10174],"valid",[],"NV8"],[[10175,10175],"valid",[],"NV8"],[[10176,10182],"valid",[],"NV8"],[[10183,10186],"valid",[],"NV8"],[[10187,10187],"valid",[],"NV8"],[[10188,10188],"valid",[],"NV8"],[[10189,10189],"valid",[],"NV8"],[[10190,10191],"valid",[],"NV8"],[[10192,10219],"valid",[],"NV8"],[[10220,10223],"valid",[],"NV8"],[[10224,10239],"valid",[],"NV8"],[[10240,10495],"valid",[],"NV8"],[[10496,10763],"valid",[],"NV8"],[[10764,10764],"mapped",[8747,8747,8747,8747]],[[10765,10867],"valid",[],"NV8"],[[10868,10868],"disallowed_STD3_mapped",[58,58,61]],[[10869,10869],"disallowed_STD3_mapped",[61,61]],[[10870,10870],"disallowed_STD3_mapped",[61,61,61]],[[10871,10971],"valid",[],"NV8"],[[10972,10972],"mapped",[10973,824]],[[10973,11007],"valid",[],"NV8"],[[11008,11021],"valid",[],"NV8"],[[11022,11027],"valid",[],"NV8"],[[11028,11034],"valid",[],"NV8"],[[11035,11039],"valid",[],"NV8"],[[11040,11043],"valid",[],"NV8"],[[11044,11084],"valid",[],"NV8"],[[11085,11087],"valid",[],"NV8"],[[11088,11092],"valid",[],"NV8"],[[11093,11097],"valid",[],"NV8"],[[11098,11123],"valid",[],"NV8"],[[11124,11125],"disallowed"],[[11126,11157],"valid",[],"NV8"],[[11158,11159],"disallowed"],[[11160,11193],"valid",[],"NV8"],[[11194,11196],"disallowed"],[[11197,11208],"valid",[],"NV8"],[[11209,11209],"disallowed"],[[11210,11217],"valid",[],"NV8"],[[11218,11243],"disallowed"],[[11244,11247],"valid",[],"NV8"],[[11248,11263],"disallowed"],[[11264,11264],"mapped",[11312]],[[11265,11265],"mapped",[11313]],[[11266,11266],"mapped",[11314]],[[11267,11267],"mapped",[11315]],[[11268,11268],"mapped",[11316]],[[11269,11269],"mapped",[11317]],[[11270,11270],"mapped",[11318]],[[11271,11271],"mapped",[11319]],[[11272,11272],"mapped",[11320]],[[11273,11273],"mapped",[11321]],[[11274,11274],"mapped",[11322]],[[11275,11275],"mapped",[11323]],[[11276,11276],"mapped",[11324]],[[11277,11277],"mapped",[11325]],[[11278,11278],"mapped",[11326]],[[11279,11279],"mapped",[11327]],[[11280,11280],"mapped",[11328]],[[11281,11281],"mapped",[11329]],[[11282,11282],"mapped",[11330]],[[11283,11283],"mapped",[11331]],[[11284,11284],"mapped",[11332]],[[11285,11285],"mapped",[11333]],[[11286,11286],"mapped",[11334]],[[11287,11287],"mapped",[11335]],[[11288,11288],"mapped",[11336]],[[11289,11289],"mapped",[11337]],[[11290,11290],"mapped",[11338]],[[11291,11291],"mapped",[11339]],[[11292,11292],"mapped",[11340]],[[11293,11293],"mapped",[11341]],[[11294,11294],"mapped",[11342]],[[11295,11295],"mapped",[11343]],[[11296,11296],"mapped",[11344]],[[11297,11297],"mapped",[11345]],[[11298,11298],"mapped",[11346]],[[11299,11299],"mapped",[11347]],[[11300,11300],"mapped",[11348]],[[11301,11301],"mapped",[11349]],[[11302,11302],"mapped",[11350]],[[11303,11303],"mapped",[11351]],[[11304,11304],"mapped",[11352]],[[11305,11305],"mapped",[11353]],[[11306,11306],"mapped",[11354]],[[11307,11307],"mapped",[11355]],[[11308,11308],"mapped",[11356]],[[11309,11309],"mapped",[11357]],[[11310,11310],"mapped",[11358]],[[11311,11311],"disallowed"],[[11312,11358],"valid"],[[11359,11359],"disallowed"],[[11360,11360],"mapped",[11361]],[[11361,11361],"valid"],[[11362,11362],"mapped",[619]],[[11363,11363],"mapped",[7549]],[[11364,11364],"mapped",[637]],[[11365,11366],"valid"],[[11367,11367],"mapped",[11368]],[[11368,11368],"valid"],[[11369,11369],"mapped",[11370]],[[11370,11370],"valid"],[[11371,11371],"mapped",[11372]],[[11372,11372],"valid"],[[11373,11373],"mapped",[593]],[[11374,11374],"mapped",[625]],[[11375,11375],"mapped",[592]],[[11376,11376],"mapped",[594]],[[11377,11377],"valid"],[[11378,11378],"mapped",[11379]],[[11379,11379],"valid"],[[11380,11380],"valid"],[[11381,11381],"mapped",[11382]],[[11382,11383],"valid"],[[11384,11387],"valid"],[[11388,11388],"mapped",[106]],[[11389,11389],"mapped",[118]],[[11390,11390],"mapped",[575]],[[11391,11391],"mapped",[576]],[[11392,11392],"mapped",[11393]],[[11393,11393],"valid"],[[11394,11394],"mapped",[11395]],[[11395,11395],"valid"],[[11396,11396],"mapped",[11397]],[[11397,11397],"valid"],[[11398,11398],"mapped",[11399]],[[11399,11399],"valid"],[[11400,11400],"mapped",[11401]],[[11401,11401],"valid"],[[11402,11402],"mapped",[11403]],[[11403,11403],"valid"],[[11404,11404],"mapped",[11405]],[[11405,11405],"valid"],[[11406,11406],"mapped",[11407]],[[11407,11407],"valid"],[[11408,11408],"mapped",[11409]],[[11409,11409],"valid"],[[11410,11410],"mapped",[11411]],[[11411,11411],"valid"],[[11412,11412],"mapped",[11413]],[[11413,11413],"valid"],[[11414,11414],"mapped",[11415]],[[11415,11415],"valid"],[[11416,11416],"mapped",[11417]],[[11417,11417],"valid"],[[11418,11418],"mapped",[11419]],[[11419,11419],"valid"],[[11420,11420],"mapped",[11421]],[[11421,11421],"valid"],[[11422,11422],"mapped",[11423]],[[11423,11423],"valid"],[[11424,11424],"mapped",[11425]],[[11425,11425],"valid"],[[11426,11426],"mapped",[11427]],[[11427,11427],"valid"],[[11428,11428],"mapped",[11429]],[[11429,11429],"valid"],[[11430,11430],"mapped",[11431]],[[11431,11431],"valid"],[[11432,11432],"mapped",[11433]],[[11433,11433],"valid"],[[11434,11434],"mapped",[11435]],[[11435,11435],"valid"],[[11436,11436],"mapped",[11437]],[[11437,11437],"valid"],[[11438,11438],"mapped",[11439]],[[11439,11439],"valid"],[[11440,11440],"mapped",[11441]],[[11441,11441],"valid"],[[11442,11442],"mapped",[11443]],[[11443,11443],"valid"],[[11444,11444],"mapped",[11445]],[[11445,11445],"valid"],[[11446,11446],"mapped",[11447]],[[11447,11447],"valid"],[[11448,11448],"mapped",[11449]],[[11449,11449],"valid"],[[11450,11450],"mapped",[11451]],[[11451,11451],"valid"],[[11452,11452],"mapped",[11453]],[[11453,11453],"valid"],[[11454,11454],"mapped",[11455]],[[11455,11455],"valid"],[[11456,11456],"mapped",[11457]],[[11457,11457],"valid"],[[11458,11458],"mapped",[11459]],[[11459,11459],"valid"],[[11460,11460],"mapped",[11461]],[[11461,11461],"valid"],[[11462,11462],"mapped",[11463]],[[11463,11463],"valid"],[[11464,11464],"mapped",[11465]],[[11465,11465],"valid"],[[11466,11466],"mapped",[11467]],[[11467,11467],"valid"],[[11468,11468],"mapped",[11469]],[[11469,11469],"valid"],[[11470,11470],"mapped",[11471]],[[11471,11471],"valid"],[[11472,11472],"mapped",[11473]],[[11473,11473],"valid"],[[11474,11474],"mapped",[11475]],[[11475,11475],"valid"],[[11476,11476],"mapped",[11477]],[[11477,11477],"valid"],[[11478,11478],"mapped",[11479]],[[11479,11479],"valid"],[[11480,11480],"mapped",[11481]],[[11481,11481],"valid"],[[11482,11482],"mapped",[11483]],[[11483,11483],"valid"],[[11484,11484],"mapped",[11485]],[[11485,11485],"valid"],[[11486,11486],"mapped",[11487]],[[11487,11487],"valid"],[[11488,11488],"mapped",[11489]],[[11489,11489],"valid"],[[11490,11490],"mapped",[11491]],[[11491,11492],"valid"],[[11493,11498],"valid",[],"NV8"],[[11499,11499],"mapped",[11500]],[[11500,11500],"valid"],[[11501,11501],"mapped",[11502]],[[11502,11505],"valid"],[[11506,11506],"mapped",[11507]],[[11507,11507],"valid"],[[11508,11512],"disallowed"],[[11513,11519],"valid",[],"NV8"],[[11520,11557],"valid"],[[11558,11558],"disallowed"],[[11559,11559],"valid"],[[11560,11564],"disallowed"],[[11565,11565],"valid"],[[11566,11567],"disallowed"],[[11568,11621],"valid"],[[11622,11623],"valid"],[[11624,11630],"disallowed"],[[11631,11631],"mapped",[11617]],[[11632,11632],"valid",[],"NV8"],[[11633,11646],"disallowed"],[[11647,11647],"valid"],[[11648,11670],"valid"],[[11671,11679],"disallowed"],[[11680,11686],"valid"],[[11687,11687],"disallowed"],[[11688,11694],"valid"],[[11695,11695],"disallowed"],[[11696,11702],"valid"],[[11703,11703],"disallowed"],[[11704,11710],"valid"],[[11711,11711],"disallowed"],[[11712,11718],"valid"],[[11719,11719],"disallowed"],[[11720,11726],"valid"],[[11727,11727],"disallowed"],[[11728,11734],"valid"],[[11735,11735],"disallowed"],[[11736,11742],"valid"],[[11743,11743],"disallowed"],[[11744,11775],"valid"],[[11776,11799],"valid",[],"NV8"],[[11800,11803],"valid",[],"NV8"],[[11804,11805],"valid",[],"NV8"],[[11806,11822],"valid",[],"NV8"],[[11823,11823],"valid"],[[11824,11824],"valid",[],"NV8"],[[11825,11825],"valid",[],"NV8"],[[11826,11835],"valid",[],"NV8"],[[11836,11842],"valid",[],"NV8"],[[11843,11903],"disallowed"],[[11904,11929],"valid",[],"NV8"],[[11930,11930],"disallowed"],[[11931,11934],"valid",[],"NV8"],[[11935,11935],"mapped",[27597]],[[11936,12018],"valid",[],"NV8"],[[12019,12019],"mapped",[40863]],[[12020,12031],"disallowed"],[[12032,12032],"mapped",[19968]],[[12033,12033],"mapped",[20008]],[[12034,12034],"mapped",[20022]],[[12035,12035],"mapped",[20031]],[[12036,12036],"mapped",[20057]],[[12037,12037],"mapped",[20101]],[[12038,12038],"mapped",[20108]],[[12039,12039],"mapped",[20128]],[[12040,12040],"mapped",[20154]],[[12041,12041],"mapped",[20799]],[[12042,12042],"mapped",[20837]],[[12043,12043],"mapped",[20843]],[[12044,12044],"mapped",[20866]],[[12045,12045],"mapped",[20886]],[[12046,12046],"mapped",[20907]],[[12047,12047],"mapped",[20960]],[[12048,12048],"mapped",[20981]],[[12049,12049],"mapped",[20992]],[[12050,12050],"mapped",[21147]],[[12051,12051],"mapped",[21241]],[[12052,12052],"mapped",[21269]],[[12053,12053],"mapped",[21274]],[[12054,12054],"mapped",[21304]],[[12055,12055],"mapped",[21313]],[[12056,12056],"mapped",[21340]],[[12057,12057],"mapped",[21353]],[[12058,12058],"mapped",[21378]],[[12059,12059],"mapped",[21430]],[[12060,12060],"mapped",[21448]],[[12061,12061],"mapped",[21475]],[[12062,12062],"mapped",[22231]],[[12063,12063],"mapped",[22303]],[[12064,12064],"mapped",[22763]],[[12065,12065],"mapped",[22786]],[[12066,12066],"mapped",[22794]],[[12067,12067],"mapped",[22805]],[[12068,12068],"mapped",[22823]],[[12069,12069],"mapped",[22899]],[[12070,12070],"mapped",[23376]],[[12071,12071],"mapped",[23424]],[[12072,12072],"mapped",[23544]],[[12073,12073],"mapped",[23567]],[[12074,12074],"mapped",[23586]],[[12075,12075],"mapped",[23608]],[[12076,12076],"mapped",[23662]],[[12077,12077],"mapped",[23665]],[[12078,12078],"mapped",[24027]],[[12079,12079],"mapped",[24037]],[[12080,12080],"mapped",[24049]],[[12081,12081],"mapped",[24062]],[[12082,12082],"mapped",[24178]],[[12083,12083],"mapped",[24186]],[[12084,12084],"mapped",[24191]],[[12085,12085],"mapped",[24308]],[[12086,12086],"mapped",[24318]],[[12087,12087],"mapped",[24331]],[[12088,12088],"mapped",[24339]],[[12089,12089],"mapped",[24400]],[[12090,12090],"mapped",[24417]],[[12091,12091],"mapped",[24435]],[[12092,12092],"mapped",[24515]],[[12093,12093],"mapped",[25096]],[[12094,12094],"mapped",[25142]],[[12095,12095],"mapped",[25163]],[[12096,12096],"mapped",[25903]],[[12097,12097],"mapped",[25908]],[[12098,12098],"mapped",[25991]],[[12099,12099],"mapped",[26007]],[[12100,12100],"mapped",[26020]],[[12101,12101],"mapped",[26041]],[[12102,12102],"mapped",[26080]],[[12103,12103],"mapped",[26085]],[[12104,12104],"mapped",[26352]],[[12105,12105],"mapped",[26376]],[[12106,12106],"mapped",[26408]],[[12107,12107],"mapped",[27424]],[[12108,12108],"mapped",[27490]],[[12109,12109],"mapped",[27513]],[[12110,12110],"mapped",[27571]],[[12111,12111],"mapped",[27595]],[[12112,12112],"mapped",[27604]],[[12113,12113],"mapped",[27611]],[[12114,12114],"mapped",[27663]],[[12115,12115],"mapped",[27668]],[[12116,12116],"mapped",[27700]],[[12117,12117],"mapped",[28779]],[[12118,12118],"mapped",[29226]],[[12119,12119],"mapped",[29238]],[[12120,12120],"mapped",[29243]],[[12121,12121],"mapped",[29247]],[[12122,12122],"mapped",[29255]],[[12123,12123],"mapped",[29273]],[[12124,12124],"mapped",[29275]],[[12125,12125],"mapped",[29356]],[[12126,12126],"mapped",[29572]],[[12127,12127],"mapped",[29577]],[[12128,12128],"mapped",[29916]],[[12129,12129],"mapped",[29926]],[[12130,12130],"mapped",[29976]],[[12131,12131],"mapped",[29983]],[[12132,12132],"mapped",[29992]],[[12133,12133],"mapped",[30000]],[[12134,12134],"mapped",[30091]],[[12135,12135],"mapped",[30098]],[[12136,12136],"mapped",[30326]],[[12137,12137],"mapped",[30333]],[[12138,12138],"mapped",[30382]],[[12139,12139],"mapped",[30399]],[[12140,12140],"mapped",[30446]],[[12141,12141],"mapped",[30683]],[[12142,12142],"mapped",[30690]],[[12143,12143],"mapped",[30707]],[[12144,12144],"mapped",[31034]],[[12145,12145],"mapped",[31160]],[[12146,12146],"mapped",[31166]],[[12147,12147],"mapped",[31348]],[[12148,12148],"mapped",[31435]],[[12149,12149],"mapped",[31481]],[[12150,12150],"mapped",[31859]],[[12151,12151],"mapped",[31992]],[[12152,12152],"mapped",[32566]],[[12153,12153],"mapped",[32593]],[[12154,12154],"mapped",[32650]],[[12155,12155],"mapped",[32701]],[[12156,12156],"mapped",[32769]],[[12157,12157],"mapped",[32780]],[[12158,12158],"mapped",[32786]],[[12159,12159],"mapped",[32819]],[[12160,12160],"mapped",[32895]],[[12161,12161],"mapped",[32905]],[[12162,12162],"mapped",[33251]],[[12163,12163],"mapped",[33258]],[[12164,12164],"mapped",[33267]],[[12165,12165],"mapped",[33276]],[[12166,12166],"mapped",[33292]],[[12167,12167],"mapped",[33307]],[[12168,12168],"mapped",[33311]],[[12169,12169],"mapped",[33390]],[[12170,12170],"mapped",[33394]],[[12171,12171],"mapped",[33400]],[[12172,12172],"mapped",[34381]],[[12173,12173],"mapped",[34411]],[[12174,12174],"mapped",[34880]],[[12175,12175],"mapped",[34892]],[[12176,12176],"mapped",[34915]],[[12177,12177],"mapped",[35198]],[[12178,12178],"mapped",[35211]],[[12179,12179],"mapped",[35282]],[[12180,12180],"mapped",[35328]],[[12181,12181],"mapped",[35895]],[[12182,12182],"mapped",[35910]],[[12183,12183],"mapped",[35925]],[[12184,12184],"mapped",[35960]],[[12185,12185],"mapped",[35997]],[[12186,12186],"mapped",[36196]],[[12187,12187],"mapped",[36208]],[[12188,12188],"mapped",[36275]],[[12189,12189],"mapped",[36523]],[[12190,12190],"mapped",[36554]],[[12191,12191],"mapped",[36763]],[[12192,12192],"mapped",[36784]],[[12193,12193],"mapped",[36789]],[[12194,12194],"mapped",[37009]],[[12195,12195],"mapped",[37193]],[[12196,12196],"mapped",[37318]],[[12197,12197],"mapped",[37324]],[[12198,12198],"mapped",[37329]],[[12199,12199],"mapped",[38263]],[[12200,12200],"mapped",[38272]],[[12201,12201],"mapped",[38428]],[[12202,12202],"mapped",[38582]],[[12203,12203],"mapped",[38585]],[[12204,12204],"mapped",[38632]],[[12205,12205],"mapped",[38737]],[[12206,12206],"mapped",[38750]],[[12207,12207],"mapped",[38754]],[[12208,12208],"mapped",[38761]],[[12209,12209],"mapped",[38859]],[[12210,12210],"mapped",[38893]],[[12211,12211],"mapped",[38899]],[[12212,12212],"mapped",[38913]],[[12213,12213],"mapped",[39080]],[[12214,12214],"mapped",[39131]],[[12215,12215],"mapped",[39135]],[[12216,12216],"mapped",[39318]],[[12217,12217],"mapped",[39321]],[[12218,12218],"mapped",[39340]],[[12219,12219],"mapped",[39592]],[[12220,12220],"mapped",[39640]],[[12221,12221],"mapped",[39647]],[[12222,12222],"mapped",[39717]],[[12223,12223],"mapped",[39727]],[[12224,12224],"mapped",[39730]],[[12225,12225],"mapped",[39740]],[[12226,12226],"mapped",[39770]],[[12227,12227],"mapped",[40165]],[[12228,12228],"mapped",[40565]],[[12229,12229],"mapped",[40575]],[[12230,12230],"mapped",[40613]],[[12231,12231],"mapped",[40635]],[[12232,12232],"mapped",[40643]],[[12233,12233],"mapped",[40653]],[[12234,12234],"mapped",[40657]],[[12235,12235],"mapped",[40697]],[[12236,12236],"mapped",[40701]],[[12237,12237],"mapped",[40718]],[[12238,12238],"mapped",[40723]],[[12239,12239],"mapped",[40736]],[[12240,12240],"mapped",[40763]],[[12241,12241],"mapped",[40778]],[[12242,12242],"mapped",[40786]],[[12243,12243],"mapped",[40845]],[[12244,12244],"mapped",[40860]],[[12245,12245],"mapped",[40864]],[[12246,12271],"disallowed"],[[12272,12283],"disallowed"],[[12284,12287],"disallowed"],[[12288,12288],"disallowed_STD3_mapped",[32]],[[12289,12289],"valid",[],"NV8"],[[12290,12290],"mapped",[46]],[[12291,12292],"valid",[],"NV8"],[[12293,12295],"valid"],[[12296,12329],"valid",[],"NV8"],[[12330,12333],"valid"],[[12334,12341],"valid",[],"NV8"],[[12342,12342],"mapped",[12306]],[[12343,12343],"valid",[],"NV8"],[[12344,12344],"mapped",[21313]],[[12345,12345],"mapped",[21316]],[[12346,12346],"mapped",[21317]],[[12347,12347],"valid",[],"NV8"],[[12348,12348],"valid"],[[12349,12349],"valid",[],"NV8"],[[12350,12350],"valid",[],"NV8"],[[12351,12351],"valid",[],"NV8"],[[12352,12352],"disallowed"],[[12353,12436],"valid"],[[12437,12438],"valid"],[[12439,12440],"disallowed"],[[12441,12442],"valid"],[[12443,12443],"disallowed_STD3_mapped",[32,12441]],[[12444,12444],"disallowed_STD3_mapped",[32,12442]],[[12445,12446],"valid"],[[12447,12447],"mapped",[12424,12426]],[[12448,12448],"valid",[],"NV8"],[[12449,12542],"valid"],[[12543,12543],"mapped",[12467,12488]],[[12544,12548],"disallowed"],[[12549,12588],"valid"],[[12589,12589],"valid"],[[12590,12592],"disallowed"],[[12593,12593],"mapped",[4352]],[[12594,12594],"mapped",[4353]],[[12595,12595],"mapped",[4522]],[[12596,12596],"mapped",[4354]],[[12597,12597],"mapped",[4524]],[[12598,12598],"mapped",[4525]],[[12599,12599],"mapped",[4355]],[[12600,12600],"mapped",[4356]],[[12601,12601],"mapped",[4357]],[[12602,12602],"mapped",[4528]],[[12603,12603],"mapped",[4529]],[[12604,12604],"mapped",[4530]],[[12605,12605],"mapped",[4531]],[[12606,12606],"mapped",[4532]],[[12607,12607],"mapped",[4533]],[[12608,12608],"mapped",[4378]],[[12609,12609],"mapped",[4358]],[[12610,12610],"mapped",[4359]],[[12611,12611],"mapped",[4360]],[[12612,12612],"mapped",[4385]],[[12613,12613],"mapped",[4361]],[[12614,12614],"mapped",[4362]],[[12615,12615],"mapped",[4363]],[[12616,12616],"mapped",[4364]],[[12617,12617],"mapped",[4365]],[[12618,12618],"mapped",[4366]],[[12619,12619],"mapped",[4367]],[[12620,12620],"mapped",[4368]],[[12621,12621],"mapped",[4369]],[[12622,12622],"mapped",[4370]],[[12623,12623],"mapped",[4449]],[[12624,12624],"mapped",[4450]],[[12625,12625],"mapped",[4451]],[[12626,12626],"mapped",[4452]],[[12627,12627],"mapped",[4453]],[[12628,12628],"mapped",[4454]],[[12629,12629],"mapped",[4455]],[[12630,12630],"mapped",[4456]],[[12631,12631],"mapped",[4457]],[[12632,12632],"mapped",[4458]],[[12633,12633],"mapped",[4459]],[[12634,12634],"mapped",[4460]],[[12635,12635],"mapped",[4461]],[[12636,12636],"mapped",[4462]],[[12637,12637],"mapped",[4463]],[[12638,12638],"mapped",[4464]],[[12639,12639],"mapped",[4465]],[[12640,12640],"mapped",[4466]],[[12641,12641],"mapped",[4467]],[[12642,12642],"mapped",[4468]],[[12643,12643],"mapped",[4469]],[[12644,12644],"disallowed"],[[12645,12645],"mapped",[4372]],[[12646,12646],"mapped",[4373]],[[12647,12647],"mapped",[4551]],[[12648,12648],"mapped",[4552]],[[12649,12649],"mapped",[4556]],[[12650,12650],"mapped",[4558]],[[12651,12651],"mapped",[4563]],[[12652,12652],"mapped",[4567]],[[12653,12653],"mapped",[4569]],[[12654,12654],"mapped",[4380]],[[12655,12655],"mapped",[4573]],[[12656,12656],"mapped",[4575]],[[12657,12657],"mapped",[4381]],[[12658,12658],"mapped",[4382]],[[12659,12659],"mapped",[4384]],[[12660,12660],"mapped",[4386]],[[12661,12661],"mapped",[4387]],[[12662,12662],"mapped",[4391]],[[12663,12663],"mapped",[4393]],[[12664,12664],"mapped",[4395]],[[12665,12665],"mapped",[4396]],[[12666,12666],"mapped",[4397]],[[12667,12667],"mapped",[4398]],[[12668,12668],"mapped",[4399]],[[12669,12669],"mapped",[4402]],[[12670,12670],"mapped",[4406]],[[12671,12671],"mapped",[4416]],[[12672,12672],"mapped",[4423]],[[12673,12673],"mapped",[4428]],[[12674,12674],"mapped",[4593]],[[12675,12675],"mapped",[4594]],[[12676,12676],"mapped",[4439]],[[12677,12677],"mapped",[4440]],[[12678,12678],"mapped",[4441]],[[12679,12679],"mapped",[4484]],[[12680,12680],"mapped",[4485]],[[12681,12681],"mapped",[4488]],[[12682,12682],"mapped",[4497]],[[12683,12683],"mapped",[4498]],[[12684,12684],"mapped",[4500]],[[12685,12685],"mapped",[4510]],[[12686,12686],"mapped",[4513]],[[12687,12687],"disallowed"],[[12688,12689],"valid",[],"NV8"],[[12690,12690],"mapped",[19968]],[[12691,12691],"mapped",[20108]],[[12692,12692],"mapped",[19977]],[[12693,12693],"mapped",[22235]],[[12694,12694],"mapped",[19978]],[[12695,12695],"mapped",[20013]],[[12696,12696],"mapped",[19979]],[[12697,12697],"mapped",[30002]],[[12698,12698],"mapped",[20057]],[[12699,12699],"mapped",[19993]],[[12700,12700],"mapped",[19969]],[[12701,12701],"mapped",[22825]],[[12702,12702],"mapped",[22320]],[[12703,12703],"mapped",[20154]],[[12704,12727],"valid"],[[12728,12730],"valid"],[[12731,12735],"disallowed"],[[12736,12751],"valid",[],"NV8"],[[12752,12771],"valid",[],"NV8"],[[12772,12783],"disallowed"],[[12784,12799],"valid"],[[12800,12800],"disallowed_STD3_mapped",[40,4352,41]],[[12801,12801],"disallowed_STD3_mapped",[40,4354,41]],[[12802,12802],"disallowed_STD3_mapped",[40,4355,41]],[[12803,12803],"disallowed_STD3_mapped",[40,4357,41]],[[12804,12804],"disallowed_STD3_mapped",[40,4358,41]],[[12805,12805],"disallowed_STD3_mapped",[40,4359,41]],[[12806,12806],"disallowed_STD3_mapped",[40,4361,41]],[[12807,12807],"disallowed_STD3_mapped",[40,4363,41]],[[12808,12808],"disallowed_STD3_mapped",[40,4364,41]],[[12809,12809],"disallowed_STD3_mapped",[40,4366,41]],[[12810,12810],"disallowed_STD3_mapped",[40,4367,41]],[[12811,12811],"disallowed_STD3_mapped",[40,4368,41]],[[12812,12812],"disallowed_STD3_mapped",[40,4369,41]],[[12813,12813],"disallowed_STD3_mapped",[40,4370,41]],[[12814,12814],"disallowed_STD3_mapped",[40,44032,41]],[[12815,12815],"disallowed_STD3_mapped",[40,45208,41]],[[12816,12816],"disallowed_STD3_mapped",[40,45796,41]],[[12817,12817],"disallowed_STD3_mapped",[40,46972,41]],[[12818,12818],"disallowed_STD3_mapped",[40,47560,41]],[[12819,12819],"disallowed_STD3_mapped",[40,48148,41]],[[12820,12820],"disallowed_STD3_mapped",[40,49324,41]],[[12821,12821],"disallowed_STD3_mapped",[40,50500,41]],[[12822,12822],"disallowed_STD3_mapped",[40,51088,41]],[[12823,12823],"disallowed_STD3_mapped",[40,52264,41]],[[12824,12824],"disallowed_STD3_mapped",[40,52852,41]],[[12825,12825],"disallowed_STD3_mapped",[40,53440,41]],[[12826,12826],"disallowed_STD3_mapped",[40,54028,41]],[[12827,12827],"disallowed_STD3_mapped",[40,54616,41]],[[12828,12828],"disallowed_STD3_mapped",[40,51452,41]],[[12829,12829],"disallowed_STD3_mapped",[40,50724,51204,41]],[[12830,12830],"disallowed_STD3_mapped",[40,50724,54980,41]],[[12831,12831],"disallowed"],[[12832,12832],"disallowed_STD3_mapped",[40,19968,41]],[[12833,12833],"disallowed_STD3_mapped",[40,20108,41]],[[12834,12834],"disallowed_STD3_mapped",[40,19977,41]],[[12835,12835],"disallowed_STD3_mapped",[40,22235,41]],[[12836,12836],"disallowed_STD3_mapped",[40,20116,41]],[[12837,12837],"disallowed_STD3_mapped",[40,20845,41]],[[12838,12838],"disallowed_STD3_mapped",[40,19971,41]],[[12839,12839],"disallowed_STD3_mapped",[40,20843,41]],[[12840,12840],"disallowed_STD3_mapped",[40,20061,41]],[[12841,12841],"disallowed_STD3_mapped",[40,21313,41]],[[12842,12842],"disallowed_STD3_mapped",[40,26376,41]],[[12843,12843],"disallowed_STD3_mapped",[40,28779,41]],[[12844,12844],"disallowed_STD3_mapped",[40,27700,41]],[[12845,12845],"disallowed_STD3_mapped",[40,26408,41]],[[12846,12846],"disallowed_STD3_mapped",[40,37329,41]],[[12847,12847],"disallowed_STD3_mapped",[40,22303,41]],[[12848,12848],"disallowed_STD3_mapped",[40,26085,41]],[[12849,12849],"disallowed_STD3_mapped",[40,26666,41]],[[12850,12850],"disallowed_STD3_mapped",[40,26377,41]],[[12851,12851],"disallowed_STD3_mapped",[40,31038,41]],[[12852,12852],"disallowed_STD3_mapped",[40,21517,41]],[[12853,12853],"disallowed_STD3_mapped",[40,29305,41]],[[12854,12854],"disallowed_STD3_mapped",[40,36001,41]],[[12855,12855],"disallowed_STD3_mapped",[40,31069,41]],[[12856,12856],"disallowed_STD3_mapped",[40,21172,41]],[[12857,12857],"disallowed_STD3_mapped",[40,20195,41]],[[12858,12858],"disallowed_STD3_mapped",[40,21628,41]],[[12859,12859],"disallowed_STD3_mapped",[40,23398,41]],[[12860,12860],"disallowed_STD3_mapped",[40,30435,41]],[[12861,12861],"disallowed_STD3_mapped",[40,20225,41]],[[12862,12862],"disallowed_STD3_mapped",[40,36039,41]],[[12863,12863],"disallowed_STD3_mapped",[40,21332,41]],[[12864,12864],"disallowed_STD3_mapped",[40,31085,41]],[[12865,12865],"disallowed_STD3_mapped",[40,20241,41]],[[12866,12866],"disallowed_STD3_mapped",[40,33258,41]],[[12867,12867],"disallowed_STD3_mapped",[40,33267,41]],[[12868,12868],"mapped",[21839]],[[12869,12869],"mapped",[24188]],[[12870,12870],"mapped",[25991]],[[12871,12871],"mapped",[31631]],[[12872,12879],"valid",[],"NV8"],[[12880,12880],"mapped",[112,116,101]],[[12881,12881],"mapped",[50,49]],[[12882,12882],"mapped",[50,50]],[[12883,12883],"mapped",[50,51]],[[12884,12884],"mapped",[50,52]],[[12885,12885],"mapped",[50,53]],[[12886,12886],"mapped",[50,54]],[[12887,12887],"mapped",[50,55]],[[12888,12888],"mapped",[50,56]],[[12889,12889],"mapped",[50,57]],[[12890,12890],"mapped",[51,48]],[[12891,12891],"mapped",[51,49]],[[12892,12892],"mapped",[51,50]],[[12893,12893],"mapped",[51,51]],[[12894,12894],"mapped",[51,52]],[[12895,12895],"mapped",[51,53]],[[12896,12896],"mapped",[4352]],[[12897,12897],"mapped",[4354]],[[12898,12898],"mapped",[4355]],[[12899,12899],"mapped",[4357]],[[12900,12900],"mapped",[4358]],[[12901,12901],"mapped",[4359]],[[12902,12902],"mapped",[4361]],[[12903,12903],"mapped",[4363]],[[12904,12904],"mapped",[4364]],[[12905,12905],"mapped",[4366]],[[12906,12906],"mapped",[4367]],[[12907,12907],"mapped",[4368]],[[12908,12908],"mapped",[4369]],[[12909,12909],"mapped",[4370]],[[12910,12910],"mapped",[44032]],[[12911,12911],"mapped",[45208]],[[12912,12912],"mapped",[45796]],[[12913,12913],"mapped",[46972]],[[12914,12914],"mapped",[47560]],[[12915,12915],"mapped",[48148]],[[12916,12916],"mapped",[49324]],[[12917,12917],"mapped",[50500]],[[12918,12918],"mapped",[51088]],[[12919,12919],"mapped",[52264]],[[12920,12920],"mapped",[52852]],[[12921,12921],"mapped",[53440]],[[12922,12922],"mapped",[54028]],[[12923,12923],"mapped",[54616]],[[12924,12924],"mapped",[52280,44256]],[[12925,12925],"mapped",[51452,51032]],[[12926,12926],"mapped",[50864]],[[12927,12927],"valid",[],"NV8"],[[12928,12928],"mapped",[19968]],[[12929,12929],"mapped",[20108]],[[12930,12930],"mapped",[19977]],[[12931,12931],"mapped",[22235]],[[12932,12932],"mapped",[20116]],[[12933,12933],"mapped",[20845]],[[12934,12934],"mapped",[19971]],[[12935,12935],"mapped",[20843]],[[12936,12936],"mapped",[20061]],[[12937,12937],"mapped",[21313]],[[12938,12938],"mapped",[26376]],[[12939,12939],"mapped",[28779]],[[12940,12940],"mapped",[27700]],[[12941,12941],"mapped",[26408]],[[12942,12942],"mapped",[37329]],[[12943,12943],"mapped",[22303]],[[12944,12944],"mapped",[26085]],[[12945,12945],"mapped",[26666]],[[12946,12946],"mapped",[26377]],[[12947,12947],"mapped",[31038]],[[12948,12948],"mapped",[21517]],[[12949,12949],"mapped",[29305]],[[12950,12950],"mapped",[36001]],[[12951,12951],"mapped",[31069]],[[12952,12952],"mapped",[21172]],[[12953,12953],"mapped",[31192]],[[12954,12954],"mapped",[30007]],[[12955,12955],"mapped",[22899]],[[12956,12956],"mapped",[36969]],[[12957,12957],"mapped",[20778]],[[12958,12958],"mapped",[21360]],[[12959,12959],"mapped",[27880]],[[12960,12960],"mapped",[38917]],[[12961,12961],"mapped",[20241]],[[12962,12962],"mapped",[20889]],[[12963,12963],"mapped",[27491]],[[12964,12964],"mapped",[19978]],[[12965,12965],"mapped",[20013]],[[12966,12966],"mapped",[19979]],[[12967,12967],"mapped",[24038]],[[12968,12968],"mapped",[21491]],[[12969,12969],"mapped",[21307]],[[12970,12970],"mapped",[23447]],[[12971,12971],"mapped",[23398]],[[12972,12972],"mapped",[30435]],[[12973,12973],"mapped",[20225]],[[12974,12974],"mapped",[36039]],[[12975,12975],"mapped",[21332]],[[12976,12976],"mapped",[22812]],[[12977,12977],"mapped",[51,54]],[[12978,12978],"mapped",[51,55]],[[12979,12979],"mapped",[51,56]],[[12980,12980],"mapped",[51,57]],[[12981,12981],"mapped",[52,48]],[[12982,12982],"mapped",[52,49]],[[12983,12983],"mapped",[52,50]],[[12984,12984],"mapped",[52,51]],[[12985,12985],"mapped",[52,52]],[[12986,12986],"mapped",[52,53]],[[12987,12987],"mapped",[52,54]],[[12988,12988],"mapped",[52,55]],[[12989,12989],"mapped",[52,56]],[[12990,12990],"mapped",[52,57]],[[12991,12991],"mapped",[53,48]],[[12992,12992],"mapped",[49,26376]],[[12993,12993],"mapped",[50,26376]],[[12994,12994],"mapped",[51,26376]],[[12995,12995],"mapped",[52,26376]],[[12996,12996],"mapped",[53,26376]],[[12997,12997],"mapped",[54,26376]],[[12998,12998],"mapped",[55,26376]],[[12999,12999],"mapped",[56,26376]],[[13000,13000],"mapped",[57,26376]],[[13001,13001],"mapped",[49,48,26376]],[[13002,13002],"mapped",[49,49,26376]],[[13003,13003],"mapped",[49,50,26376]],[[13004,13004],"mapped",[104,103]],[[13005,13005],"mapped",[101,114,103]],[[13006,13006],"mapped",[101,118]],[[13007,13007],"mapped",[108,116,100]],[[13008,13008],"mapped",[12450]],[[13009,13009],"mapped",[12452]],[[13010,13010],"mapped",[12454]],[[13011,13011],"mapped",[12456]],[[13012,13012],"mapped",[12458]],[[13013,13013],"mapped",[12459]],[[13014,13014],"mapped",[12461]],[[13015,13015],"mapped",[12463]],[[13016,13016],"mapped",[12465]],[[13017,13017],"mapped",[12467]],[[13018,13018],"mapped",[12469]],[[13019,13019],"mapped",[12471]],[[13020,13020],"mapped",[12473]],[[13021,13021],"mapped",[12475]],[[13022,13022],"mapped",[12477]],[[13023,13023],"mapped",[12479]],[[13024,13024],"mapped",[12481]],[[13025,13025],"mapped",[12484]],[[13026,13026],"mapped",[12486]],[[13027,13027],"mapped",[12488]],[[13028,13028],"mapped",[12490]],[[13029,13029],"mapped",[12491]],[[13030,13030],"mapped",[12492]],[[13031,13031],"mapped",[12493]],[[13032,13032],"mapped",[12494]],[[13033,13033],"mapped",[12495]],[[13034,13034],"mapped",[12498]],[[13035,13035],"mapped",[12501]],[[13036,13036],"mapped",[12504]],[[13037,13037],"mapped",[12507]],[[13038,13038],"mapped",[12510]],[[13039,13039],"mapped",[12511]],[[13040,13040],"mapped",[12512]],[[13041,13041],"mapped",[12513]],[[13042,13042],"mapped",[12514]],[[13043,13043],"mapped",[12516]],[[13044,13044],"mapped",[12518]],[[13045,13045],"mapped",[12520]],[[13046,13046],"mapped",[12521]],[[13047,13047],"mapped",[12522]],[[13048,13048],"mapped",[12523]],[[13049,13049],"mapped",[12524]],[[13050,13050],"mapped",[12525]],[[13051,13051],"mapped",[12527]],[[13052,13052],"mapped",[12528]],[[13053,13053],"mapped",[12529]],[[13054,13054],"mapped",[12530]],[[13055,13055],"disallowed"],[[13056,13056],"mapped",[12450,12497,12540,12488]],[[13057,13057],"mapped",[12450,12523,12501,12449]],[[13058,13058],"mapped",[12450,12531,12506,12450]],[[13059,13059],"mapped",[12450,12540,12523]],[[13060,13060],"mapped",[12452,12491,12531,12464]],[[13061,13061],"mapped",[12452,12531,12481]],[[13062,13062],"mapped",[12454,12457,12531]],[[13063,13063],"mapped",[12456,12473,12463,12540,12489]],[[13064,13064],"mapped",[12456,12540,12459,12540]],[[13065,13065],"mapped",[12458,12531,12473]],[[13066,13066],"mapped",[12458,12540,12512]],[[13067,13067],"mapped",[12459,12452,12522]],[[13068,13068],"mapped",[12459,12521,12483,12488]],[[13069,13069],"mapped",[12459,12525,12522,12540]],[[13070,13070],"mapped",[12460,12525,12531]],[[13071,13071],"mapped",[12460,12531,12510]],[[13072,13072],"mapped",[12462,12460]],[[13073,13073],"mapped",[12462,12491,12540]],[[13074,13074],"mapped",[12461,12517,12522,12540]],[[13075,13075],"mapped",[12462,12523,12480,12540]],[[13076,13076],"mapped",[12461,12525]],[[13077,13077],"mapped",[12461,12525,12464,12521,12512]],[[13078,13078],"mapped",[12461,12525,12513,12540,12488,12523]],[[13079,13079],"mapped",[12461,12525,12527,12483,12488]],[[13080,13080],"mapped",[12464,12521,12512]],[[13081,13081],"mapped",[12464,12521,12512,12488,12531]],[[13082,13082],"mapped",[12463,12523,12476,12452,12525]],[[13083,13083],"mapped",[12463,12525,12540,12493]],[[13084,13084],"mapped",[12465,12540,12473]],[[13085,13085],"mapped",[12467,12523,12490]],[[13086,13086],"mapped",[12467,12540,12509]],[[13087,13087],"mapped",[12469,12452,12463,12523]],[[13088,13088],"mapped",[12469,12531,12481,12540,12512]],[[13089,13089],"mapped",[12471,12522,12531,12464]],[[13090,13090],"mapped",[12475,12531,12481]],[[13091,13091],"mapped",[12475,12531,12488]],[[13092,13092],"mapped",[12480,12540,12473]],[[13093,13093],"mapped",[12487,12471]],[[13094,13094],"mapped",[12489,12523]],[[13095,13095],"mapped",[12488,12531]],[[13096,13096],"mapped",[12490,12494]],[[13097,13097],"mapped",[12494,12483,12488]],[[13098,13098],"mapped",[12495,12452,12484]],[[13099,13099],"mapped",[12497,12540,12475,12531,12488]],[[13100,13100],"mapped",[12497,12540,12484]],[[13101,13101],"mapped",[12496,12540,12524,12523]],[[13102,13102],"mapped",[12500,12450,12473,12488,12523]],[[13103,13103],"mapped",[12500,12463,12523]],[[13104,13104],"mapped",[12500,12467]],[[13105,13105],"mapped",[12499,12523]],[[13106,13106],"mapped",[12501,12449,12521,12483,12489]],[[13107,13107],"mapped",[12501,12451,12540,12488]],[[13108,13108],"mapped",[12502,12483,12471,12455,12523]],[[13109,13109],"mapped",[12501,12521,12531]],[[13110,13110],"mapped",[12504,12463,12479,12540,12523]],[[13111,13111],"mapped",[12506,12477]],[[13112,13112],"mapped",[12506,12491,12498]],[[13113,13113],"mapped",[12504,12523,12484]],[[13114,13114],"mapped",[12506,12531,12473]],[[13115,13115],"mapped",[12506,12540,12472]],[[13116,13116],"mapped",[12505,12540,12479]],[[13117,13117],"mapped",[12509,12452,12531,12488]],[[13118,13118],"mapped",[12508,12523,12488]],[[13119,13119],"mapped",[12507,12531]],[[13120,13120],"mapped",[12509,12531,12489]],[[13121,13121],"mapped",[12507,12540,12523]],[[13122,13122],"mapped",[12507,12540,12531]],[[13123,13123],"mapped",[12510,12452,12463,12525]],[[13124,13124],"mapped",[12510,12452,12523]],[[13125,13125],"mapped",[12510,12483,12495]],[[13126,13126],"mapped",[12510,12523,12463]],[[13127,13127],"mapped",[12510,12531,12471,12519,12531]],[[13128,13128],"mapped",[12511,12463,12525,12531]],[[13129,13129],"mapped",[12511,12522]],[[13130,13130],"mapped",[12511,12522,12496,12540,12523]],[[13131,13131],"mapped",[12513,12460]],[[13132,13132],"mapped",[12513,12460,12488,12531]],[[13133,13133],"mapped",[12513,12540,12488,12523]],[[13134,13134],"mapped",[12516,12540,12489]],[[13135,13135],"mapped",[12516,12540,12523]],[[13136,13136],"mapped",[12518,12450,12531]],[[13137,13137],"mapped",[12522,12483,12488,12523]],[[13138,13138],"mapped",[12522,12521]],[[13139,13139],"mapped",[12523,12500,12540]],[[13140,13140],"mapped",[12523,12540,12502,12523]],[[13141,13141],"mapped",[12524,12512]],[[13142,13142],"mapped",[12524,12531,12488,12466,12531]],[[13143,13143],"mapped",[12527,12483,12488]],[[13144,13144],"mapped",[48,28857]],[[13145,13145],"mapped",[49,28857]],[[13146,13146],"mapped",[50,28857]],[[13147,13147],"mapped",[51,28857]],[[13148,13148],"mapped",[52,28857]],[[13149,13149],"mapped",[53,28857]],[[13150,13150],"mapped",[54,28857]],[[13151,13151],"mapped",[55,28857]],[[13152,13152],"mapped",[56,28857]],[[13153,13153],"mapped",[57,28857]],[[13154,13154],"mapped",[49,48,28857]],[[13155,13155],"mapped",[49,49,28857]],[[13156,13156],"mapped",[49,50,28857]],[[13157,13157],"mapped",[49,51,28857]],[[13158,13158],"mapped",[49,52,28857]],[[13159,13159],"mapped",[49,53,28857]],[[13160,13160],"mapped",[49,54,28857]],[[13161,13161],"mapped",[49,55,28857]],[[13162,13162],"mapped",[49,56,28857]],[[13163,13163],"mapped",[49,57,28857]],[[13164,13164],"mapped",[50,48,28857]],[[13165,13165],"mapped",[50,49,28857]],[[13166,13166],"mapped",[50,50,28857]],[[13167,13167],"mapped",[50,51,28857]],[[13168,13168],"mapped",[50,52,28857]],[[13169,13169],"mapped",[104,112,97]],[[13170,13170],"mapped",[100,97]],[[13171,13171],"mapped",[97,117]],[[13172,13172],"mapped",[98,97,114]],[[13173,13173],"mapped",[111,118]],[[13174,13174],"mapped",[112,99]],[[13175,13175],"mapped",[100,109]],[[13176,13176],"mapped",[100,109,50]],[[13177,13177],"mapped",[100,109,51]],[[13178,13178],"mapped",[105,117]],[[13179,13179],"mapped",[24179,25104]],[[13180,13180],"mapped",[26157,21644]],[[13181,13181],"mapped",[22823,27491]],[[13182,13182],"mapped",[26126,27835]],[[13183,13183],"mapped",[26666,24335,20250,31038]],[[13184,13184],"mapped",[112,97]],[[13185,13185],"mapped",[110,97]],[[13186,13186],"mapped",[956,97]],[[13187,13187],"mapped",[109,97]],[[13188,13188],"mapped",[107,97]],[[13189,13189],"mapped",[107,98]],[[13190,13190],"mapped",[109,98]],[[13191,13191],"mapped",[103,98]],[[13192,13192],"mapped",[99,97,108]],[[13193,13193],"mapped",[107,99,97,108]],[[13194,13194],"mapped",[112,102]],[[13195,13195],"mapped",[110,102]],[[13196,13196],"mapped",[956,102]],[[13197,13197],"mapped",[956,103]],[[13198,13198],"mapped",[109,103]],[[13199,13199],"mapped",[107,103]],[[13200,13200],"mapped",[104,122]],[[13201,13201],"mapped",[107,104,122]],[[13202,13202],"mapped",[109,104,122]],[[13203,13203],"mapped",[103,104,122]],[[13204,13204],"mapped",[116,104,122]],[[13205,13205],"mapped",[956,108]],[[13206,13206],"mapped",[109,108]],[[13207,13207],"mapped",[100,108]],[[13208,13208],"mapped",[107,108]],[[13209,13209],"mapped",[102,109]],[[13210,13210],"mapped",[110,109]],[[13211,13211],"mapped",[956,109]],[[13212,13212],"mapped",[109,109]],[[13213,13213],"mapped",[99,109]],[[13214,13214],"mapped",[107,109]],[[13215,13215],"mapped",[109,109,50]],[[13216,13216],"mapped",[99,109,50]],[[13217,13217],"mapped",[109,50]],[[13218,13218],"mapped",[107,109,50]],[[13219,13219],"mapped",[109,109,51]],[[13220,13220],"mapped",[99,109,51]],[[13221,13221],"mapped",[109,51]],[[13222,13222],"mapped",[107,109,51]],[[13223,13223],"mapped",[109,8725,115]],[[13224,13224],"mapped",[109,8725,115,50]],[[13225,13225],"mapped",[112,97]],[[13226,13226],"mapped",[107,112,97]],[[13227,13227],"mapped",[109,112,97]],[[13228,13228],"mapped",[103,112,97]],[[13229,13229],"mapped",[114,97,100]],[[13230,13230],"mapped",[114,97,100,8725,115]],[[13231,13231],"mapped",[114,97,100,8725,115,50]],[[13232,13232],"mapped",[112,115]],[[13233,13233],"mapped",[110,115]],[[13234,13234],"mapped",[956,115]],[[13235,13235],"mapped",[109,115]],[[13236,13236],"mapped",[112,118]],[[13237,13237],"mapped",[110,118]],[[13238,13238],"mapped",[956,118]],[[13239,13239],"mapped",[109,118]],[[13240,13240],"mapped",[107,118]],[[13241,13241],"mapped",[109,118]],[[13242,13242],"mapped",[112,119]],[[13243,13243],"mapped",[110,119]],[[13244,13244],"mapped",[956,119]],[[13245,13245],"mapped",[109,119]],[[13246,13246],"mapped",[107,119]],[[13247,13247],"mapped",[109,119]],[[13248,13248],"mapped",[107,969]],[[13249,13249],"mapped",[109,969]],[[13250,13250],"disallowed"],[[13251,13251],"mapped",[98,113]],[[13252,13252],"mapped",[99,99]],[[13253,13253],"mapped",[99,100]],[[13254,13254],"mapped",[99,8725,107,103]],[[13255,13255],"disallowed"],[[13256,13256],"mapped",[100,98]],[[13257,13257],"mapped",[103,121]],[[13258,13258],"mapped",[104,97]],[[13259,13259],"mapped",[104,112]],[[13260,13260],"mapped",[105,110]],[[13261,13261],"mapped",[107,107]],[[13262,13262],"mapped",[107,109]],[[13263,13263],"mapped",[107,116]],[[13264,13264],"mapped",[108,109]],[[13265,13265],"mapped",[108,110]],[[13266,13266],"mapped",[108,111,103]],[[13267,13267],"mapped",[108,120]],[[13268,13268],"mapped",[109,98]],[[13269,13269],"mapped",[109,105,108]],[[13270,13270],"mapped",[109,111,108]],[[13271,13271],"mapped",[112,104]],[[13272,13272],"disallowed"],[[13273,13273],"mapped",[112,112,109]],[[13274,13274],"mapped",[112,114]],[[13275,13275],"mapped",[115,114]],[[13276,13276],"mapped",[115,118]],[[13277,13277],"mapped",[119,98]],[[13278,13278],"mapped",[118,8725,109]],[[13279,13279],"mapped",[97,8725,109]],[[13280,13280],"mapped",[49,26085]],[[13281,13281],"mapped",[50,26085]],[[13282,13282],"mapped",[51,26085]],[[13283,13283],"mapped",[52,26085]],[[13284,13284],"mapped",[53,26085]],[[13285,13285],"mapped",[54,26085]],[[13286,13286],"mapped",[55,26085]],[[13287,13287],"mapped",[56,26085]],[[13288,13288],"mapped",[57,26085]],[[13289,13289],"mapped",[49,48,26085]],[[13290,13290],"mapped",[49,49,26085]],[[13291,13291],"mapped",[49,50,26085]],[[13292,13292],"mapped",[49,51,26085]],[[13293,13293],"mapped",[49,52,26085]],[[13294,13294],"mapped",[49,53,26085]],[[13295,13295],"mapped",[49,54,26085]],[[13296,13296],"mapped",[49,55,26085]],[[13297,13297],"mapped",[49,56,26085]],[[13298,13298],"mapped",[49,57,26085]],[[13299,13299],"mapped",[50,48,26085]],[[13300,13300],"mapped",[50,49,26085]],[[13301,13301],"mapped",[50,50,26085]],[[13302,13302],"mapped",[50,51,26085]],[[13303,13303],"mapped",[50,52,26085]],[[13304,13304],"mapped",[50,53,26085]],[[13305,13305],"mapped",[50,54,26085]],[[13306,13306],"mapped",[50,55,26085]],[[13307,13307],"mapped",[50,56,26085]],[[13308,13308],"mapped",[50,57,26085]],[[13309,13309],"mapped",[51,48,26085]],[[13310,13310],"mapped",[51,49,26085]],[[13311,13311],"mapped",[103,97,108]],[[13312,19893],"valid"],[[19894,19903],"disallowed"],[[19904,19967],"valid",[],"NV8"],[[19968,40869],"valid"],[[40870,40891],"valid"],[[40892,40899],"valid"],[[40900,40907],"valid"],[[40908,40908],"valid"],[[40909,40917],"valid"],[[40918,40959],"disallowed"],[[40960,42124],"valid"],[[42125,42127],"disallowed"],[[42128,42145],"valid",[],"NV8"],[[42146,42147],"valid",[],"NV8"],[[42148,42163],"valid",[],"NV8"],[[42164,42164],"valid",[],"NV8"],[[42165,42176],"valid",[],"NV8"],[[42177,42177],"valid",[],"NV8"],[[42178,42180],"valid",[],"NV8"],[[42181,42181],"valid",[],"NV8"],[[42182,42182],"valid",[],"NV8"],[[42183,42191],"disallowed"],[[42192,42237],"valid"],[[42238,42239],"valid",[],"NV8"],[[42240,42508],"valid"],[[42509,42511],"valid",[],"NV8"],[[42512,42539],"valid"],[[42540,42559],"disallowed"],[[42560,42560],"mapped",[42561]],[[42561,42561],"valid"],[[42562,42562],"mapped",[42563]],[[42563,42563],"valid"],[[42564,42564],"mapped",[42565]],[[42565,42565],"valid"],[[42566,42566],"mapped",[42567]],[[42567,42567],"valid"],[[42568,42568],"mapped",[42569]],[[42569,42569],"valid"],[[42570,42570],"mapped",[42571]],[[42571,42571],"valid"],[[42572,42572],"mapped",[42573]],[[42573,42573],"valid"],[[42574,42574],"mapped",[42575]],[[42575,42575],"valid"],[[42576,42576],"mapped",[42577]],[[42577,42577],"valid"],[[42578,42578],"mapped",[42579]],[[42579,42579],"valid"],[[42580,42580],"mapped",[42581]],[[42581,42581],"valid"],[[42582,42582],"mapped",[42583]],[[42583,42583],"valid"],[[42584,42584],"mapped",[42585]],[[42585,42585],"valid"],[[42586,42586],"mapped",[42587]],[[42587,42587],"valid"],[[42588,42588],"mapped",[42589]],[[42589,42589],"valid"],[[42590,42590],"mapped",[42591]],[[42591,42591],"valid"],[[42592,42592],"mapped",[42593]],[[42593,42593],"valid"],[[42594,42594],"mapped",[42595]],[[42595,42595],"valid"],[[42596,42596],"mapped",[42597]],[[42597,42597],"valid"],[[42598,42598],"mapped",[42599]],[[42599,42599],"valid"],[[42600,42600],"mapped",[42601]],[[42601,42601],"valid"],[[42602,42602],"mapped",[42603]],[[42603,42603],"valid"],[[42604,42604],"mapped",[42605]],[[42605,42607],"valid"],[[42608,42611],"valid",[],"NV8"],[[42612,42619],"valid"],[[42620,42621],"valid"],[[42622,42622],"valid",[],"NV8"],[[42623,42623],"valid"],[[42624,42624],"mapped",[42625]],[[42625,42625],"valid"],[[42626,42626],"mapped",[42627]],[[42627,42627],"valid"],[[42628,42628],"mapped",[42629]],[[42629,42629],"valid"],[[42630,42630],"mapped",[42631]],[[42631,42631],"valid"],[[42632,42632],"mapped",[42633]],[[42633,42633],"valid"],[[42634,42634],"mapped",[42635]],[[42635,42635],"valid"],[[42636,42636],"mapped",[42637]],[[42637,42637],"valid"],[[42638,42638],"mapped",[42639]],[[42639,42639],"valid"],[[42640,42640],"mapped",[42641]],[[42641,42641],"valid"],[[42642,42642],"mapped",[42643]],[[42643,42643],"valid"],[[42644,42644],"mapped",[42645]],[[42645,42645],"valid"],[[42646,42646],"mapped",[42647]],[[42647,42647],"valid"],[[42648,42648],"mapped",[42649]],[[42649,42649],"valid"],[[42650,42650],"mapped",[42651]],[[42651,42651],"valid"],[[42652,42652],"mapped",[1098]],[[42653,42653],"mapped",[1100]],[[42654,42654],"valid"],[[42655,42655],"valid"],[[42656,42725],"valid"],[[42726,42735],"valid",[],"NV8"],[[42736,42737],"valid"],[[42738,42743],"valid",[],"NV8"],[[42744,42751],"disallowed"],[[42752,42774],"valid",[],"NV8"],[[42775,42778],"valid"],[[42779,42783],"valid"],[[42784,42785],"valid",[],"NV8"],[[42786,42786],"mapped",[42787]],[[42787,42787],"valid"],[[42788,42788],"mapped",[42789]],[[42789,42789],"valid"],[[42790,42790],"mapped",[42791]],[[42791,42791],"valid"],[[42792,42792],"mapped",[42793]],[[42793,42793],"valid"],[[42794,42794],"mapped",[42795]],[[42795,42795],"valid"],[[42796,42796],"mapped",[42797]],[[42797,42797],"valid"],[[42798,42798],"mapped",[42799]],[[42799,42801],"valid"],[[42802,42802],"mapped",[42803]],[[42803,42803],"valid"],[[42804,42804],"mapped",[42805]],[[42805,42805],"valid"],[[42806,42806],"mapped",[42807]],[[42807,42807],"valid"],[[42808,42808],"mapped",[42809]],[[42809,42809],"valid"],[[42810,42810],"mapped",[42811]],[[42811,42811],"valid"],[[42812,42812],"mapped",[42813]],[[42813,42813],"valid"],[[42814,42814],"mapped",[42815]],[[42815,42815],"valid"],[[42816,42816],"mapped",[42817]],[[42817,42817],"valid"],[[42818,42818],"mapped",[42819]],[[42819,42819],"valid"],[[42820,42820],"mapped",[42821]],[[42821,42821],"valid"],[[42822,42822],"mapped",[42823]],[[42823,42823],"valid"],[[42824,42824],"mapped",[42825]],[[42825,42825],"valid"],[[42826,42826],"mapped",[42827]],[[42827,42827],"valid"],[[42828,42828],"mapped",[42829]],[[42829,42829],"valid"],[[42830,42830],"mapped",[42831]],[[42831,42831],"valid"],[[42832,42832],"mapped",[42833]],[[42833,42833],"valid"],[[42834,42834],"mapped",[42835]],[[42835,42835],"valid"],[[42836,42836],"mapped",[42837]],[[42837,42837],"valid"],[[42838,42838],"mapped",[42839]],[[42839,42839],"valid"],[[42840,42840],"mapped",[42841]],[[42841,42841],"valid"],[[42842,42842],"mapped",[42843]],[[42843,42843],"valid"],[[42844,42844],"mapped",[42845]],[[42845,42845],"valid"],[[42846,42846],"mapped",[42847]],[[42847,42847],"valid"],[[42848,42848],"mapped",[42849]],[[42849,42849],"valid"],[[42850,42850],"mapped",[42851]],[[42851,42851],"valid"],[[42852,42852],"mapped",[42853]],[[42853,42853],"valid"],[[42854,42854],"mapped",[42855]],[[42855,42855],"valid"],[[42856,42856],"mapped",[42857]],[[42857,42857],"valid"],[[42858,42858],"mapped",[42859]],[[42859,42859],"valid"],[[42860,42860],"mapped",[42861]],[[42861,42861],"valid"],[[42862,42862],"mapped",[42863]],[[42863,42863],"valid"],[[42864,42864],"mapped",[42863]],[[42865,42872],"valid"],[[42873,42873],"mapped",[42874]],[[42874,42874],"valid"],[[42875,42875],"mapped",[42876]],[[42876,42876],"valid"],[[42877,42877],"mapped",[7545]],[[42878,42878],"mapped",[42879]],[[42879,42879],"valid"],[[42880,42880],"mapped",[42881]],[[42881,42881],"valid"],[[42882,42882],"mapped",[42883]],[[42883,42883],"valid"],[[42884,42884],"mapped",[42885]],[[42885,42885],"valid"],[[42886,42886],"mapped",[42887]],[[42887,42888],"valid"],[[42889,42890],"valid",[],"NV8"],[[42891,42891],"mapped",[42892]],[[42892,42892],"valid"],[[42893,42893],"mapped",[613]],[[42894,42894],"valid"],[[42895,42895],"valid"],[[42896,42896],"mapped",[42897]],[[42897,42897],"valid"],[[42898,42898],"mapped",[42899]],[[42899,42899],"valid"],[[42900,42901],"valid"],[[42902,42902],"mapped",[42903]],[[42903,42903],"valid"],[[42904,42904],"mapped",[42905]],[[42905,42905],"valid"],[[42906,42906],"mapped",[42907]],[[42907,42907],"valid"],[[42908,42908],"mapped",[42909]],[[42909,42909],"valid"],[[42910,42910],"mapped",[42911]],[[42911,42911],"valid"],[[42912,42912],"mapped",[42913]],[[42913,42913],"valid"],[[42914,42914],"mapped",[42915]],[[42915,42915],"valid"],[[42916,42916],"mapped",[42917]],[[42917,42917],"valid"],[[42918,42918],"mapped",[42919]],[[42919,42919],"valid"],[[42920,42920],"mapped",[42921]],[[42921,42921],"valid"],[[42922,42922],"mapped",[614]],[[42923,42923],"mapped",[604]],[[42924,42924],"mapped",[609]],[[42925,42925],"mapped",[620]],[[42926,42927],"disallowed"],[[42928,42928],"mapped",[670]],[[42929,42929],"mapped",[647]],[[42930,42930],"mapped",[669]],[[42931,42931],"mapped",[43859]],[[42932,42932],"mapped",[42933]],[[42933,42933],"valid"],[[42934,42934],"mapped",[42935]],[[42935,42935],"valid"],[[42936,42998],"disallowed"],[[42999,42999],"valid"],[[43000,43000],"mapped",[295]],[[43001,43001],"mapped",[339]],[[43002,43002],"valid"],[[43003,43007],"valid"],[[43008,43047],"valid"],[[43048,43051],"valid",[],"NV8"],[[43052,43055],"disallowed"],[[43056,43065],"valid",[],"NV8"],[[43066,43071],"disallowed"],[[43072,43123],"valid"],[[43124,43127],"valid",[],"NV8"],[[43128,43135],"disallowed"],[[43136,43204],"valid"],[[43205,43213],"disallowed"],[[43214,43215],"valid",[],"NV8"],[[43216,43225],"valid"],[[43226,43231],"disallowed"],[[43232,43255],"valid"],[[43256,43258],"valid",[],"NV8"],[[43259,43259],"valid"],[[43260,43260],"valid",[],"NV8"],[[43261,43261],"valid"],[[43262,43263],"disallowed"],[[43264,43309],"valid"],[[43310,43311],"valid",[],"NV8"],[[43312,43347],"valid"],[[43348,43358],"disallowed"],[[43359,43359],"valid",[],"NV8"],[[43360,43388],"valid",[],"NV8"],[[43389,43391],"disallowed"],[[43392,43456],"valid"],[[43457,43469],"valid",[],"NV8"],[[43470,43470],"disallowed"],[[43471,43481],"valid"],[[43482,43485],"disallowed"],[[43486,43487],"valid",[],"NV8"],[[43488,43518],"valid"],[[43519,43519],"disallowed"],[[43520,43574],"valid"],[[43575,43583],"disallowed"],[[43584,43597],"valid"],[[43598,43599],"disallowed"],[[43600,43609],"valid"],[[43610,43611],"disallowed"],[[43612,43615],"valid",[],"NV8"],[[43616,43638],"valid"],[[43639,43641],"valid",[],"NV8"],[[43642,43643],"valid"],[[43644,43647],"valid"],[[43648,43714],"valid"],[[43715,43738],"disallowed"],[[43739,43741],"valid"],[[43742,43743],"valid",[],"NV8"],[[43744,43759],"valid"],[[43760,43761],"valid",[],"NV8"],[[43762,43766],"valid"],[[43767,43776],"disallowed"],[[43777,43782],"valid"],[[43783,43784],"disallowed"],[[43785,43790],"valid"],[[43791,43792],"disallowed"],[[43793,43798],"valid"],[[43799,43807],"disallowed"],[[43808,43814],"valid"],[[43815,43815],"disallowed"],[[43816,43822],"valid"],[[43823,43823],"disallowed"],[[43824,43866],"valid"],[[43867,43867],"valid",[],"NV8"],[[43868,43868],"mapped",[42791]],[[43869,43869],"mapped",[43831]],[[43870,43870],"mapped",[619]],[[43871,43871],"mapped",[43858]],[[43872,43875],"valid"],[[43876,43877],"valid"],[[43878,43887],"disallowed"],[[43888,43888],"mapped",[5024]],[[43889,43889],"mapped",[5025]],[[43890,43890],"mapped",[5026]],[[43891,43891],"mapped",[5027]],[[43892,43892],"mapped",[5028]],[[43893,43893],"mapped",[5029]],[[43894,43894],"mapped",[5030]],[[43895,43895],"mapped",[5031]],[[43896,43896],"mapped",[5032]],[[43897,43897],"mapped",[5033]],[[43898,43898],"mapped",[5034]],[[43899,43899],"mapped",[5035]],[[43900,43900],"mapped",[5036]],[[43901,43901],"mapped",[5037]],[[43902,43902],"mapped",[5038]],[[43903,43903],"mapped",[5039]],[[43904,43904],"mapped",[5040]],[[43905,43905],"mapped",[5041]],[[43906,43906],"mapped",[5042]],[[43907,43907],"mapped",[5043]],[[43908,43908],"mapped",[5044]],[[43909,43909],"mapped",[5045]],[[43910,43910],"mapped",[5046]],[[43911,43911],"mapped",[5047]],[[43912,43912],"mapped",[5048]],[[43913,43913],"mapped",[5049]],[[43914,43914],"mapped",[5050]],[[43915,43915],"mapped",[5051]],[[43916,43916],"mapped",[5052]],[[43917,43917],"mapped",[5053]],[[43918,43918],"mapped",[5054]],[[43919,43919],"mapped",[5055]],[[43920,43920],"mapped",[5056]],[[43921,43921],"mapped",[5057]],[[43922,43922],"mapped",[5058]],[[43923,43923],"mapped",[5059]],[[43924,43924],"mapped",[5060]],[[43925,43925],"mapped",[5061]],[[43926,43926],"mapped",[5062]],[[43927,43927],"mapped",[5063]],[[43928,43928],"mapped",[5064]],[[43929,43929],"mapped",[5065]],[[43930,43930],"mapped",[5066]],[[43931,43931],"mapped",[5067]],[[43932,43932],"mapped",[5068]],[[43933,43933],"mapped",[5069]],[[43934,43934],"mapped",[5070]],[[43935,43935],"mapped",[5071]],[[43936,43936],"mapped",[5072]],[[43937,43937],"mapped",[5073]],[[43938,43938],"mapped",[5074]],[[43939,43939],"mapped",[5075]],[[43940,43940],"mapped",[5076]],[[43941,43941],"mapped",[5077]],[[43942,43942],"mapped",[5078]],[[43943,43943],"mapped",[5079]],[[43944,43944],"mapped",[5080]],[[43945,43945],"mapped",[5081]],[[43946,43946],"mapped",[5082]],[[43947,43947],"mapped",[5083]],[[43948,43948],"mapped",[5084]],[[43949,43949],"mapped",[5085]],[[43950,43950],"mapped",[5086]],[[43951,43951],"mapped",[5087]],[[43952,43952],"mapped",[5088]],[[43953,43953],"mapped",[5089]],[[43954,43954],"mapped",[5090]],[[43955,43955],"mapped",[5091]],[[43956,43956],"mapped",[5092]],[[43957,43957],"mapped",[5093]],[[43958,43958],"mapped",[5094]],[[43959,43959],"mapped",[5095]],[[43960,43960],"mapped",[5096]],[[43961,43961],"mapped",[5097]],[[43962,43962],"mapped",[5098]],[[43963,43963],"mapped",[5099]],[[43964,43964],"mapped",[5100]],[[43965,43965],"mapped",[5101]],[[43966,43966],"mapped",[5102]],[[43967,43967],"mapped",[5103]],[[43968,44010],"valid"],[[44011,44011],"valid",[],"NV8"],[[44012,44013],"valid"],[[44014,44015],"disallowed"],[[44016,44025],"valid"],[[44026,44031],"disallowed"],[[44032,55203],"valid"],[[55204,55215],"disallowed"],[[55216,55238],"valid",[],"NV8"],[[55239,55242],"disallowed"],[[55243,55291],"valid",[],"NV8"],[[55292,55295],"disallowed"],[[55296,57343],"disallowed"],[[57344,63743],"disallowed"],[[63744,63744],"mapped",[35912]],[[63745,63745],"mapped",[26356]],[[63746,63746],"mapped",[36554]],[[63747,63747],"mapped",[36040]],[[63748,63748],"mapped",[28369]],[[63749,63749],"mapped",[20018]],[[63750,63750],"mapped",[21477]],[[63751,63752],"mapped",[40860]],[[63753,63753],"mapped",[22865]],[[63754,63754],"mapped",[37329]],[[63755,63755],"mapped",[21895]],[[63756,63756],"mapped",[22856]],[[63757,63757],"mapped",[25078]],[[63758,63758],"mapped",[30313]],[[63759,63759],"mapped",[32645]],[[63760,63760],"mapped",[34367]],[[63761,63761],"mapped",[34746]],[[63762,63762],"mapped",[35064]],[[63763,63763],"mapped",[37007]],[[63764,63764],"mapped",[27138]],[[63765,63765],"mapped",[27931]],[[63766,63766],"mapped",[28889]],[[63767,63767],"mapped",[29662]],[[63768,63768],"mapped",[33853]],[[63769,63769],"mapped",[37226]],[[63770,63770],"mapped",[39409]],[[63771,63771],"mapped",[20098]],[[63772,63772],"mapped",[21365]],[[63773,63773],"mapped",[27396]],[[63774,63774],"mapped",[29211]],[[63775,63775],"mapped",[34349]],[[63776,63776],"mapped",[40478]],[[63777,63777],"mapped",[23888]],[[63778,63778],"mapped",[28651]],[[63779,63779],"mapped",[34253]],[[63780,63780],"mapped",[35172]],[[63781,63781],"mapped",[25289]],[[63782,63782],"mapped",[33240]],[[63783,63783],"mapped",[34847]],[[63784,63784],"mapped",[24266]],[[63785,63785],"mapped",[26391]],[[63786,63786],"mapped",[28010]],[[63787,63787],"mapped",[29436]],[[63788,63788],"mapped",[37070]],[[63789,63789],"mapped",[20358]],[[63790,63790],"mapped",[20919]],[[63791,63791],"mapped",[21214]],[[63792,63792],"mapped",[25796]],[[63793,63793],"mapped",[27347]],[[63794,63794],"mapped",[29200]],[[63795,63795],"mapped",[30439]],[[63796,63796],"mapped",[32769]],[[63797,63797],"mapped",[34310]],[[63798,63798],"mapped",[34396]],[[63799,63799],"mapped",[36335]],[[63800,63800],"mapped",[38706]],[[63801,63801],"mapped",[39791]],[[63802,63802],"mapped",[40442]],[[63803,63803],"mapped",[30860]],[[63804,63804],"mapped",[31103]],[[63805,63805],"mapped",[32160]],[[63806,63806],"mapped",[33737]],[[63807,63807],"mapped",[37636]],[[63808,63808],"mapped",[40575]],[[63809,63809],"mapped",[35542]],[[63810,63810],"mapped",[22751]],[[63811,63811],"mapped",[24324]],[[63812,63812],"mapped",[31840]],[[63813,63813],"mapped",[32894]],[[63814,63814],"mapped",[29282]],[[63815,63815],"mapped",[30922]],[[63816,63816],"mapped",[36034]],[[63817,63817],"mapped",[38647]],[[63818,63818],"mapped",[22744]],[[63819,63819],"mapped",[23650]],[[63820,63820],"mapped",[27155]],[[63821,63821],"mapped",[28122]],[[63822,63822],"mapped",[28431]],[[63823,63823],"mapped",[32047]],[[63824,63824],"mapped",[32311]],[[63825,63825],"mapped",[38475]],[[63826,63826],"mapped",[21202]],[[63827,63827],"mapped",[32907]],[[63828,63828],"mapped",[20956]],[[63829,63829],"mapped",[20940]],[[63830,63830],"mapped",[31260]],[[63831,63831],"mapped",[32190]],[[63832,63832],"mapped",[33777]],[[63833,63833],"mapped",[38517]],[[63834,63834],"mapped",[35712]],[[63835,63835],"mapped",[25295]],[[63836,63836],"mapped",[27138]],[[63837,63837],"mapped",[35582]],[[63838,63838],"mapped",[20025]],[[63839,63839],"mapped",[23527]],[[63840,63840],"mapped",[24594]],[[63841,63841],"mapped",[29575]],[[63842,63842],"mapped",[30064]],[[63843,63843],"mapped",[21271]],[[63844,63844],"mapped",[30971]],[[63845,63845],"mapped",[20415]],[[63846,63846],"mapped",[24489]],[[63847,63847],"mapped",[19981]],[[63848,63848],"mapped",[27852]],[[63849,63849],"mapped",[25976]],[[63850,63850],"mapped",[32034]],[[63851,63851],"mapped",[21443]],[[63852,63852],"mapped",[22622]],[[63853,63853],"mapped",[30465]],[[63854,63854],"mapped",[33865]],[[63855,63855],"mapped",[35498]],[[63856,63856],"mapped",[27578]],[[63857,63857],"mapped",[36784]],[[63858,63858],"mapped",[27784]],[[63859,63859],"mapped",[25342]],[[63860,63860],"mapped",[33509]],[[63861,63861],"mapped",[25504]],[[63862,63862],"mapped",[30053]],[[63863,63863],"mapped",[20142]],[[63864,63864],"mapped",[20841]],[[63865,63865],"mapped",[20937]],[[63866,63866],"mapped",[26753]],[[63867,63867],"mapped",[31975]],[[63868,63868],"mapped",[33391]],[[63869,63869],"mapped",[35538]],[[63870,63870],"mapped",[37327]],[[63871,63871],"mapped",[21237]],[[63872,63872],"mapped",[21570]],[[63873,63873],"mapped",[22899]],[[63874,63874],"mapped",[24300]],[[63875,63875],"mapped",[26053]],[[63876,63876],"mapped",[28670]],[[63877,63877],"mapped",[31018]],[[63878,63878],"mapped",[38317]],[[63879,63879],"mapped",[39530]],[[63880,63880],"mapped",[40599]],[[63881,63881],"mapped",[40654]],[[63882,63882],"mapped",[21147]],[[63883,63883],"mapped",[26310]],[[63884,63884],"mapped",[27511]],[[63885,63885],"mapped",[36706]],[[63886,63886],"mapped",[24180]],[[63887,63887],"mapped",[24976]],[[63888,63888],"mapped",[25088]],[[63889,63889],"mapped",[25754]],[[63890,63890],"mapped",[28451]],[[63891,63891],"mapped",[29001]],[[63892,63892],"mapped",[29833]],[[63893,63893],"mapped",[31178]],[[63894,63894],"mapped",[32244]],[[63895,63895],"mapped",[32879]],[[63896,63896],"mapped",[36646]],[[63897,63897],"mapped",[34030]],[[63898,63898],"mapped",[36899]],[[63899,63899],"mapped",[37706]],[[63900,63900],"mapped",[21015]],[[63901,63901],"mapped",[21155]],[[63902,63902],"mapped",[21693]],[[63903,63903],"mapped",[28872]],[[63904,63904],"mapped",[35010]],[[63905,63905],"mapped",[35498]],[[63906,63906],"mapped",[24265]],[[63907,63907],"mapped",[24565]],[[63908,63908],"mapped",[25467]],[[63909,63909],"mapped",[27566]],[[63910,63910],"mapped",[31806]],[[63911,63911],"mapped",[29557]],[[63912,63912],"mapped",[20196]],[[63913,63913],"mapped",[22265]],[[63914,63914],"mapped",[23527]],[[63915,63915],"mapped",[23994]],[[63916,63916],"mapped",[24604]],[[63917,63917],"mapped",[29618]],[[63918,63918],"mapped",[29801]],[[63919,63919],"mapped",[32666]],[[63920,63920],"mapped",[32838]],[[63921,63921],"mapped",[37428]],[[63922,63922],"mapped",[38646]],[[63923,63923],"mapped",[38728]],[[63924,63924],"mapped",[38936]],[[63925,63925],"mapped",[20363]],[[63926,63926],"mapped",[31150]],[[63927,63927],"mapped",[37300]],[[63928,63928],"mapped",[38584]],[[63929,63929],"mapped",[24801]],[[63930,63930],"mapped",[20102]],[[63931,63931],"mapped",[20698]],[[63932,63932],"mapped",[23534]],[[63933,63933],"mapped",[23615]],[[63934,63934],"mapped",[26009]],[[63935,63935],"mapped",[27138]],[[63936,63936],"mapped",[29134]],[[63937,63937],"mapped",[30274]],[[63938,63938],"mapped",[34044]],[[63939,63939],"mapped",[36988]],[[63940,63940],"mapped",[40845]],[[63941,63941],"mapped",[26248]],[[63942,63942],"mapped",[38446]],[[63943,63943],"mapped",[21129]],[[63944,63944],"mapped",[26491]],[[63945,63945],"mapped",[26611]],[[63946,63946],"mapped",[27969]],[[63947,63947],"mapped",[28316]],[[63948,63948],"mapped",[29705]],[[63949,63949],"mapped",[30041]],[[63950,63950],"mapped",[30827]],[[63951,63951],"mapped",[32016]],[[63952,63952],"mapped",[39006]],[[63953,63953],"mapped",[20845]],[[63954,63954],"mapped",[25134]],[[63955,63955],"mapped",[38520]],[[63956,63956],"mapped",[20523]],[[63957,63957],"mapped",[23833]],[[63958,63958],"mapped",[28138]],[[63959,63959],"mapped",[36650]],[[63960,63960],"mapped",[24459]],[[63961,63961],"mapped",[24900]],[[63962,63962],"mapped",[26647]],[[63963,63963],"mapped",[29575]],[[63964,63964],"mapped",[38534]],[[63965,63965],"mapped",[21033]],[[63966,63966],"mapped",[21519]],[[63967,63967],"mapped",[23653]],[[63968,63968],"mapped",[26131]],[[63969,63969],"mapped",[26446]],[[63970,63970],"mapped",[26792]],[[63971,63971],"mapped",[27877]],[[63972,63972],"mapped",[29702]],[[63973,63973],"mapped",[30178]],[[63974,63974],"mapped",[32633]],[[63975,63975],"mapped",[35023]],[[63976,63976],"mapped",[35041]],[[63977,63977],"mapped",[37324]],[[63978,63978],"mapped",[38626]],[[63979,63979],"mapped",[21311]],[[63980,63980],"mapped",[28346]],[[63981,63981],"mapped",[21533]],[[63982,63982],"mapped",[29136]],[[63983,63983],"mapped",[29848]],[[63984,63984],"mapped",[34298]],[[63985,63985],"mapped",[38563]],[[63986,63986],"mapped",[40023]],[[63987,63987],"mapped",[40607]],[[63988,63988],"mapped",[26519]],[[63989,63989],"mapped",[28107]],[[63990,63990],"mapped",[33256]],[[63991,63991],"mapped",[31435]],[[63992,63992],"mapped",[31520]],[[63993,63993],"mapped",[31890]],[[63994,63994],"mapped",[29376]],[[63995,63995],"mapped",[28825]],[[63996,63996],"mapped",[35672]],[[63997,63997],"mapped",[20160]],[[63998,63998],"mapped",[33590]],[[63999,63999],"mapped",[21050]],[[64000,64000],"mapped",[20999]],[[64001,64001],"mapped",[24230]],[[64002,64002],"mapped",[25299]],[[64003,64003],"mapped",[31958]],[[64004,64004],"mapped",[23429]],[[64005,64005],"mapped",[27934]],[[64006,64006],"mapped",[26292]],[[64007,64007],"mapped",[36667]],[[64008,64008],"mapped",[34892]],[[64009,64009],"mapped",[38477]],[[64010,64010],"mapped",[35211]],[[64011,64011],"mapped",[24275]],[[64012,64012],"mapped",[20800]],[[64013,64013],"mapped",[21952]],[[64014,64015],"valid"],[[64016,64016],"mapped",[22618]],[[64017,64017],"valid"],[[64018,64018],"mapped",[26228]],[[64019,64020],"valid"],[[64021,64021],"mapped",[20958]],[[64022,64022],"mapped",[29482]],[[64023,64023],"mapped",[30410]],[[64024,64024],"mapped",[31036]],[[64025,64025],"mapped",[31070]],[[64026,64026],"mapped",[31077]],[[64027,64027],"mapped",[31119]],[[64028,64028],"mapped",[38742]],[[64029,64029],"mapped",[31934]],[[64030,64030],"mapped",[32701]],[[64031,64031],"valid"],[[64032,64032],"mapped",[34322]],[[64033,64033],"valid"],[[64034,64034],"mapped",[35576]],[[64035,64036],"valid"],[[64037,64037],"mapped",[36920]],[[64038,64038],"mapped",[37117]],[[64039,64041],"valid"],[[64042,64042],"mapped",[39151]],[[64043,64043],"mapped",[39164]],[[64044,64044],"mapped",[39208]],[[64045,64045],"mapped",[40372]],[[64046,64046],"mapped",[37086]],[[64047,64047],"mapped",[38583]],[[64048,64048],"mapped",[20398]],[[64049,64049],"mapped",[20711]],[[64050,64050],"mapped",[20813]],[[64051,64051],"mapped",[21193]],[[64052,64052],"mapped",[21220]],[[64053,64053],"mapped",[21329]],[[64054,64054],"mapped",[21917]],[[64055,64055],"mapped",[22022]],[[64056,64056],"mapped",[22120]],[[64057,64057],"mapped",[22592]],[[64058,64058],"mapped",[22696]],[[64059,64059],"mapped",[23652]],[[64060,64060],"mapped",[23662]],[[64061,64061],"mapped",[24724]],[[64062,64062],"mapped",[24936]],[[64063,64063],"mapped",[24974]],[[64064,64064],"mapped",[25074]],[[64065,64065],"mapped",[25935]],[[64066,64066],"mapped",[26082]],[[64067,64067],"mapped",[26257]],[[64068,64068],"mapped",[26757]],[[64069,64069],"mapped",[28023]],[[64070,64070],"mapped",[28186]],[[64071,64071],"mapped",[28450]],[[64072,64072],"mapped",[29038]],[[64073,64073],"mapped",[29227]],[[64074,64074],"mapped",[29730]],[[64075,64075],"mapped",[30865]],[[64076,64076],"mapped",[31038]],[[64077,64077],"mapped",[31049]],[[64078,64078],"mapped",[31048]],[[64079,64079],"mapped",[31056]],[[64080,64080],"mapped",[31062]],[[64081,64081],"mapped",[31069]],[[64082,64082],"mapped",[31117]],[[64083,64083],"mapped",[31118]],[[64084,64084],"mapped",[31296]],[[64085,64085],"mapped",[31361]],[[64086,64086],"mapped",[31680]],[[64087,64087],"mapped",[32244]],[[64088,64088],"mapped",[32265]],[[64089,64089],"mapped",[32321]],[[64090,64090],"mapped",[32626]],[[64091,64091],"mapped",[32773]],[[64092,64092],"mapped",[33261]],[[64093,64094],"mapped",[33401]],[[64095,64095],"mapped",[33879]],[[64096,64096],"mapped",[35088]],[[64097,64097],"mapped",[35222]],[[64098,64098],"mapped",[35585]],[[64099,64099],"mapped",[35641]],[[64100,64100],"mapped",[36051]],[[64101,64101],"mapped",[36104]],[[64102,64102],"mapped",[36790]],[[64103,64103],"mapped",[36920]],[[64104,64104],"mapped",[38627]],[[64105,64105],"mapped",[38911]],[[64106,64106],"mapped",[38971]],[[64107,64107],"mapped",[24693]],[[64108,64108],"mapped",[148206]],[[64109,64109],"mapped",[33304]],[[64110,64111],"disallowed"],[[64112,64112],"mapped",[20006]],[[64113,64113],"mapped",[20917]],[[64114,64114],"mapped",[20840]],[[64115,64115],"mapped",[20352]],[[64116,64116],"mapped",[20805]],[[64117,64117],"mapped",[20864]],[[64118,64118],"mapped",[21191]],[[64119,64119],"mapped",[21242]],[[64120,64120],"mapped",[21917]],[[64121,64121],"mapped",[21845]],[[64122,64122],"mapped",[21913]],[[64123,64123],"mapped",[21986]],[[64124,64124],"mapped",[22618]],[[64125,64125],"mapped",[22707]],[[64126,64126],"mapped",[22852]],[[64127,64127],"mapped",[22868]],[[64128,64128],"mapped",[23138]],[[64129,64129],"mapped",[23336]],[[64130,64130],"mapped",[24274]],[[64131,64131],"mapped",[24281]],[[64132,64132],"mapped",[24425]],[[64133,64133],"mapped",[24493]],[[64134,64134],"mapped",[24792]],[[64135,64135],"mapped",[24910]],[[64136,64136],"mapped",[24840]],[[64137,64137],"mapped",[24974]],[[64138,64138],"mapped",[24928]],[[64139,64139],"mapped",[25074]],[[64140,64140],"mapped",[25140]],[[64141,64141],"mapped",[25540]],[[64142,64142],"mapped",[25628]],[[64143,64143],"mapped",[25682]],[[64144,64144],"mapped",[25942]],[[64145,64145],"mapped",[26228]],[[64146,64146],"mapped",[26391]],[[64147,64147],"mapped",[26395]],[[64148,64148],"mapped",[26454]],[[64149,64149],"mapped",[27513]],[[64150,64150],"mapped",[27578]],[[64151,64151],"mapped",[27969]],[[64152,64152],"mapped",[28379]],[[64153,64153],"mapped",[28363]],[[64154,64154],"mapped",[28450]],[[64155,64155],"mapped",[28702]],[[64156,64156],"mapped",[29038]],[[64157,64157],"mapped",[30631]],[[64158,64158],"mapped",[29237]],[[64159,64159],"mapped",[29359]],[[64160,64160],"mapped",[29482]],[[64161,64161],"mapped",[29809]],[[64162,64162],"mapped",[29958]],[[64163,64163],"mapped",[30011]],[[64164,64164],"mapped",[30237]],[[64165,64165],"mapped",[30239]],[[64166,64166],"mapped",[30410]],[[64167,64167],"mapped",[30427]],[[64168,64168],"mapped",[30452]],[[64169,64169],"mapped",[30538]],[[64170,64170],"mapped",[30528]],[[64171,64171],"mapped",[30924]],[[64172,64172],"mapped",[31409]],[[64173,64173],"mapped",[31680]],[[64174,64174],"mapped",[31867]],[[64175,64175],"mapped",[32091]],[[64176,64176],"mapped",[32244]],[[64177,64177],"mapped",[32574]],[[64178,64178],"mapped",[32773]],[[64179,64179],"mapped",[33618]],[[64180,64180],"mapped",[33775]],[[64181,64181],"mapped",[34681]],[[64182,64182],"mapped",[35137]],[[64183,64183],"mapped",[35206]],[[64184,64184],"mapped",[35222]],[[64185,64185],"mapped",[35519]],[[64186,64186],"mapped",[35576]],[[64187,64187],"mapped",[35531]],[[64188,64188],"mapped",[35585]],[[64189,64189],"mapped",[35582]],[[64190,64190],"mapped",[35565]],[[64191,64191],"mapped",[35641]],[[64192,64192],"mapped",[35722]],[[64193,64193],"mapped",[36104]],[[64194,64194],"mapped",[36664]],[[64195,64195],"mapped",[36978]],[[64196,64196],"mapped",[37273]],[[64197,64197],"mapped",[37494]],[[64198,64198],"mapped",[38524]],[[64199,64199],"mapped",[38627]],[[64200,64200],"mapped",[38742]],[[64201,64201],"mapped",[38875]],[[64202,64202],"mapped",[38911]],[[64203,64203],"mapped",[38923]],[[64204,64204],"mapped",[38971]],[[64205,64205],"mapped",[39698]],[[64206,64206],"mapped",[40860]],[[64207,64207],"mapped",[141386]],[[64208,64208],"mapped",[141380]],[[64209,64209],"mapped",[144341]],[[64210,64210],"mapped",[15261]],[[64211,64211],"mapped",[16408]],[[64212,64212],"mapped",[16441]],[[64213,64213],"mapped",[152137]],[[64214,64214],"mapped",[154832]],[[64215,64215],"mapped",[163539]],[[64216,64216],"mapped",[40771]],[[64217,64217],"mapped",[40846]],[[64218,64255],"disallowed"],[[64256,64256],"mapped",[102,102]],[[64257,64257],"mapped",[102,105]],[[64258,64258],"mapped",[102,108]],[[64259,64259],"mapped",[102,102,105]],[[64260,64260],"mapped",[102,102,108]],[[64261,64262],"mapped",[115,116]],[[64263,64274],"disallowed"],[[64275,64275],"mapped",[1396,1398]],[[64276,64276],"mapped",[1396,1381]],[[64277,64277],"mapped",[1396,1387]],[[64278,64278],"mapped",[1406,1398]],[[64279,64279],"mapped",[1396,1389]],[[64280,64284],"disallowed"],[[64285,64285],"mapped",[1497,1460]],[[64286,64286],"valid"],[[64287,64287],"mapped",[1522,1463]],[[64288,64288],"mapped",[1506]],[[64289,64289],"mapped",[1488]],[[64290,64290],"mapped",[1491]],[[64291,64291],"mapped",[1492]],[[64292,64292],"mapped",[1499]],[[64293,64293],"mapped",[1500]],[[64294,64294],"mapped",[1501]],[[64295,64295],"mapped",[1512]],[[64296,64296],"mapped",[1514]],[[64297,64297],"disallowed_STD3_mapped",[43]],[[64298,64298],"mapped",[1513,1473]],[[64299,64299],"mapped",[1513,1474]],[[64300,64300],"mapped",[1513,1468,1473]],[[64301,64301],"mapped",[1513,1468,1474]],[[64302,64302],"mapped",[1488,1463]],[[64303,64303],"mapped",[1488,1464]],[[64304,64304],"mapped",[1488,1468]],[[64305,64305],"mapped",[1489,1468]],[[64306,64306],"mapped",[1490,1468]],[[64307,64307],"mapped",[1491,1468]],[[64308,64308],"mapped",[1492,1468]],[[64309,64309],"mapped",[1493,1468]],[[64310,64310],"mapped",[1494,1468]],[[64311,64311],"disallowed"],[[64312,64312],"mapped",[1496,1468]],[[64313,64313],"mapped",[1497,1468]],[[64314,64314],"mapped",[1498,1468]],[[64315,64315],"mapped",[1499,1468]],[[64316,64316],"mapped",[1500,1468]],[[64317,64317],"disallowed"],[[64318,64318],"mapped",[1502,1468]],[[64319,64319],"disallowed"],[[64320,64320],"mapped",[1504,1468]],[[64321,64321],"mapped",[1505,1468]],[[64322,64322],"disallowed"],[[64323,64323],"mapped",[1507,1468]],[[64324,64324],"mapped",[1508,1468]],[[64325,64325],"disallowed"],[[64326,64326],"mapped",[1510,1468]],[[64327,64327],"mapped",[1511,1468]],[[64328,64328],"mapped",[1512,1468]],[[64329,64329],"mapped",[1513,1468]],[[64330,64330],"mapped",[1514,1468]],[[64331,64331],"mapped",[1493,1465]],[[64332,64332],"mapped",[1489,1471]],[[64333,64333],"mapped",[1499,1471]],[[64334,64334],"mapped",[1508,1471]],[[64335,64335],"mapped",[1488,1500]],[[64336,64337],"mapped",[1649]],[[64338,64341],"mapped",[1659]],[[64342,64345],"mapped",[1662]],[[64346,64349],"mapped",[1664]],[[64350,64353],"mapped",[1658]],[[64354,64357],"mapped",[1663]],[[64358,64361],"mapped",[1657]],[[64362,64365],"mapped",[1700]],[[64366,64369],"mapped",[1702]],[[64370,64373],"mapped",[1668]],[[64374,64377],"mapped",[1667]],[[64378,64381],"mapped",[1670]],[[64382,64385],"mapped",[1671]],[[64386,64387],"mapped",[1677]],[[64388,64389],"mapped",[1676]],[[64390,64391],"mapped",[1678]],[[64392,64393],"mapped",[1672]],[[64394,64395],"mapped",[1688]],[[64396,64397],"mapped",[1681]],[[64398,64401],"mapped",[1705]],[[64402,64405],"mapped",[1711]],[[64406,64409],"mapped",[1715]],[[64410,64413],"mapped",[1713]],[[64414,64415],"mapped",[1722]],[[64416,64419],"mapped",[1723]],[[64420,64421],"mapped",[1728]],[[64422,64425],"mapped",[1729]],[[64426,64429],"mapped",[1726]],[[64430,64431],"mapped",[1746]],[[64432,64433],"mapped",[1747]],[[64434,64449],"valid",[],"NV8"],[[64450,64466],"disallowed"],[[64467,64470],"mapped",[1709]],[[64471,64472],"mapped",[1735]],[[64473,64474],"mapped",[1734]],[[64475,64476],"mapped",[1736]],[[64477,64477],"mapped",[1735,1652]],[[64478,64479],"mapped",[1739]],[[64480,64481],"mapped",[1733]],[[64482,64483],"mapped",[1737]],[[64484,64487],"mapped",[1744]],[[64488,64489],"mapped",[1609]],[[64490,64491],"mapped",[1574,1575]],[[64492,64493],"mapped",[1574,1749]],[[64494,64495],"mapped",[1574,1608]],[[64496,64497],"mapped",[1574,1735]],[[64498,64499],"mapped",[1574,1734]],[[64500,64501],"mapped",[1574,1736]],[[64502,64504],"mapped",[1574,1744]],[[64505,64507],"mapped",[1574,1609]],[[64508,64511],"mapped",[1740]],[[64512,64512],"mapped",[1574,1580]],[[64513,64513],"mapped",[1574,1581]],[[64514,64514],"mapped",[1574,1605]],[[64515,64515],"mapped",[1574,1609]],[[64516,64516],"mapped",[1574,1610]],[[64517,64517],"mapped",[1576,1580]],[[64518,64518],"mapped",[1576,1581]],[[64519,64519],"mapped",[1576,1582]],[[64520,64520],"mapped",[1576,1605]],[[64521,64521],"mapped",[1576,1609]],[[64522,64522],"mapped",[1576,1610]],[[64523,64523],"mapped",[1578,1580]],[[64524,64524],"mapped",[1578,1581]],[[64525,64525],"mapped",[1578,1582]],[[64526,64526],"mapped",[1578,1605]],[[64527,64527],"mapped",[1578,1609]],[[64528,64528],"mapped",[1578,1610]],[[64529,64529],"mapped",[1579,1580]],[[64530,64530],"mapped",[1579,1605]],[[64531,64531],"mapped",[1579,1609]],[[64532,64532],"mapped",[1579,1610]],[[64533,64533],"mapped",[1580,1581]],[[64534,64534],"mapped",[1580,1605]],[[64535,64535],"mapped",[1581,1580]],[[64536,64536],"mapped",[1581,1605]],[[64537,64537],"mapped",[1582,1580]],[[64538,64538],"mapped",[1582,1581]],[[64539,64539],"mapped",[1582,1605]],[[64540,64540],"mapped",[1587,1580]],[[64541,64541],"mapped",[1587,1581]],[[64542,64542],"mapped",[1587,1582]],[[64543,64543],"mapped",[1587,1605]],[[64544,64544],"mapped",[1589,1581]],[[64545,64545],"mapped",[1589,1605]],[[64546,64546],"mapped",[1590,1580]],[[64547,64547],"mapped",[1590,1581]],[[64548,64548],"mapped",[1590,1582]],[[64549,64549],"mapped",[1590,1605]],[[64550,64550],"mapped",[1591,1581]],[[64551,64551],"mapped",[1591,1605]],[[64552,64552],"mapped",[1592,1605]],[[64553,64553],"mapped",[1593,1580]],[[64554,64554],"mapped",[1593,1605]],[[64555,64555],"mapped",[1594,1580]],[[64556,64556],"mapped",[1594,1605]],[[64557,64557],"mapped",[1601,1580]],[[64558,64558],"mapped",[1601,1581]],[[64559,64559],"mapped",[1601,1582]],[[64560,64560],"mapped",[1601,1605]],[[64561,64561],"mapped",[1601,1609]],[[64562,64562],"mapped",[1601,1610]],[[64563,64563],"mapped",[1602,1581]],[[64564,64564],"mapped",[1602,1605]],[[64565,64565],"mapped",[1602,1609]],[[64566,64566],"mapped",[1602,1610]],[[64567,64567],"mapped",[1603,1575]],[[64568,64568],"mapped",[1603,1580]],[[64569,64569],"mapped",[1603,1581]],[[64570,64570],"mapped",[1603,1582]],[[64571,64571],"mapped",[1603,1604]],[[64572,64572],"mapped",[1603,1605]],[[64573,64573],"mapped",[1603,1609]],[[64574,64574],"mapped",[1603,1610]],[[64575,64575],"mapped",[1604,1580]],[[64576,64576],"mapped",[1604,1581]],[[64577,64577],"mapped",[1604,1582]],[[64578,64578],"mapped",[1604,1605]],[[64579,64579],"mapped",[1604,1609]],[[64580,64580],"mapped",[1604,1610]],[[64581,64581],"mapped",[1605,1580]],[[64582,64582],"mapped",[1605,1581]],[[64583,64583],"mapped",[1605,1582]],[[64584,64584],"mapped",[1605,1605]],[[64585,64585],"mapped",[1605,1609]],[[64586,64586],"mapped",[1605,1610]],[[64587,64587],"mapped",[1606,1580]],[[64588,64588],"mapped",[1606,1581]],[[64589,64589],"mapped",[1606,1582]],[[64590,64590],"mapped",[1606,1605]],[[64591,64591],"mapped",[1606,1609]],[[64592,64592],"mapped",[1606,1610]],[[64593,64593],"mapped",[1607,1580]],[[64594,64594],"mapped",[1607,1605]],[[64595,64595],"mapped",[1607,1609]],[[64596,64596],"mapped",[1607,1610]],[[64597,64597],"mapped",[1610,1580]],[[64598,64598],"mapped",[1610,1581]],[[64599,64599],"mapped",[1610,1582]],[[64600,64600],"mapped",[1610,1605]],[[64601,64601],"mapped",[1610,1609]],[[64602,64602],"mapped",[1610,1610]],[[64603,64603],"mapped",[1584,1648]],[[64604,64604],"mapped",[1585,1648]],[[64605,64605],"mapped",[1609,1648]],[[64606,64606],"disallowed_STD3_mapped",[32,1612,1617]],[[64607,64607],"disallowed_STD3_mapped",[32,1613,1617]],[[64608,64608],"disallowed_STD3_mapped",[32,1614,1617]],[[64609,64609],"disallowed_STD3_mapped",[32,1615,1617]],[[64610,64610],"disallowed_STD3_mapped",[32,1616,1617]],[[64611,64611],"disallowed_STD3_mapped",[32,1617,1648]],[[64612,64612],"mapped",[1574,1585]],[[64613,64613],"mapped",[1574,1586]],[[64614,64614],"mapped",[1574,1605]],[[64615,64615],"mapped",[1574,1606]],[[64616,64616],"mapped",[1574,1609]],[[64617,64617],"mapped",[1574,1610]],[[64618,64618],"mapped",[1576,1585]],[[64619,64619],"mapped",[1576,1586]],[[64620,64620],"mapped",[1576,1605]],[[64621,64621],"mapped",[1576,1606]],[[64622,64622],"mapped",[1576,1609]],[[64623,64623],"mapped",[1576,1610]],[[64624,64624],"mapped",[1578,1585]],[[64625,64625],"mapped",[1578,1586]],[[64626,64626],"mapped",[1578,1605]],[[64627,64627],"mapped",[1578,1606]],[[64628,64628],"mapped",[1578,1609]],[[64629,64629],"mapped",[1578,1610]],[[64630,64630],"mapped",[1579,1585]],[[64631,64631],"mapped",[1579,1586]],[[64632,64632],"mapped",[1579,1605]],[[64633,64633],"mapped",[1579,1606]],[[64634,64634],"mapped",[1579,1609]],[[64635,64635],"mapped",[1579,1610]],[[64636,64636],"mapped",[1601,1609]],[[64637,64637],"mapped",[1601,1610]],[[64638,64638],"mapped",[1602,1609]],[[64639,64639],"mapped",[1602,1610]],[[64640,64640],"mapped",[1603,1575]],[[64641,64641],"mapped",[1603,1604]],[[64642,64642],"mapped",[1603,1605]],[[64643,64643],"mapped",[1603,1609]],[[64644,64644],"mapped",[1603,1610]],[[64645,64645],"mapped",[1604,1605]],[[64646,64646],"mapped",[1604,1609]],[[64647,64647],"mapped",[1604,1610]],[[64648,64648],"mapped",[1605,1575]],[[64649,64649],"mapped",[1605,1605]],[[64650,64650],"mapped",[1606,1585]],[[64651,64651],"mapped",[1606,1586]],[[64652,64652],"mapped",[1606,1605]],[[64653,64653],"mapped",[1606,1606]],[[64654,64654],"mapped",[1606,1609]],[[64655,64655],"mapped",[1606,1610]],[[64656,64656],"mapped",[1609,1648]],[[64657,64657],"mapped",[1610,1585]],[[64658,64658],"mapped",[1610,1586]],[[64659,64659],"mapped",[1610,1605]],[[64660,64660],"mapped",[1610,1606]],[[64661,64661],"mapped",[1610,1609]],[[64662,64662],"mapped",[1610,1610]],[[64663,64663],"mapped",[1574,1580]],[[64664,64664],"mapped",[1574,1581]],[[64665,64665],"mapped",[1574,1582]],[[64666,64666],"mapped",[1574,1605]],[[64667,64667],"mapped",[1574,1607]],[[64668,64668],"mapped",[1576,1580]],[[64669,64669],"mapped",[1576,1581]],[[64670,64670],"mapped",[1576,1582]],[[64671,64671],"mapped",[1576,1605]],[[64672,64672],"mapped",[1576,1607]],[[64673,64673],"mapped",[1578,1580]],[[64674,64674],"mapped",[1578,1581]],[[64675,64675],"mapped",[1578,1582]],[[64676,64676],"mapped",[1578,1605]],[[64677,64677],"mapped",[1578,1607]],[[64678,64678],"mapped",[1579,1605]],[[64679,64679],"mapped",[1580,1581]],[[64680,64680],"mapped",[1580,1605]],[[64681,64681],"mapped",[1581,1580]],[[64682,64682],"mapped",[1581,1605]],[[64683,64683],"mapped",[1582,1580]],[[64684,64684],"mapped",[1582,1605]],[[64685,64685],"mapped",[1587,1580]],[[64686,64686],"mapped",[1587,1581]],[[64687,64687],"mapped",[1587,1582]],[[64688,64688],"mapped",[1587,1605]],[[64689,64689],"mapped",[1589,1581]],[[64690,64690],"mapped",[1589,1582]],[[64691,64691],"mapped",[1589,1605]],[[64692,64692],"mapped",[1590,1580]],[[64693,64693],"mapped",[1590,1581]],[[64694,64694],"mapped",[1590,1582]],[[64695,64695],"mapped",[1590,1605]],[[64696,64696],"mapped",[1591,1581]],[[64697,64697],"mapped",[1592,1605]],[[64698,64698],"mapped",[1593,1580]],[[64699,64699],"mapped",[1593,1605]],[[64700,64700],"mapped",[1594,1580]],[[64701,64701],"mapped",[1594,1605]],[[64702,64702],"mapped",[1601,1580]],[[64703,64703],"mapped",[1601,1581]],[[64704,64704],"mapped",[1601,1582]],[[64705,64705],"mapped",[1601,1605]],[[64706,64706],"mapped",[1602,1581]],[[64707,64707],"mapped",[1602,1605]],[[64708,64708],"mapped",[1603,1580]],[[64709,64709],"mapped",[1603,1581]],[[64710,64710],"mapped",[1603,1582]],[[64711,64711],"mapped",[1603,1604]],[[64712,64712],"mapped",[1603,1605]],[[64713,64713],"mapped",[1604,1580]],[[64714,64714],"mapped",[1604,1581]],[[64715,64715],"mapped",[1604,1582]],[[64716,64716],"mapped",[1604,1605]],[[64717,64717],"mapped",[1604,1607]],[[64718,64718],"mapped",[1605,1580]],[[64719,64719],"mapped",[1605,1581]],[[64720,64720],"mapped",[1605,1582]],[[64721,64721],"mapped",[1605,1605]],[[64722,64722],"mapped",[1606,1580]],[[64723,64723],"mapped",[1606,1581]],[[64724,64724],"mapped",[1606,1582]],[[64725,64725],"mapped",[1606,1605]],[[64726,64726],"mapped",[1606,1607]],[[64727,64727],"mapped",[1607,1580]],[[64728,64728],"mapped",[1607,1605]],[[64729,64729],"mapped",[1607,1648]],[[64730,64730],"mapped",[1610,1580]],[[64731,64731],"mapped",[1610,1581]],[[64732,64732],"mapped",[1610,1582]],[[64733,64733],"mapped",[1610,1605]],[[64734,64734],"mapped",[1610,1607]],[[64735,64735],"mapped",[1574,1605]],[[64736,64736],"mapped",[1574,1607]],[[64737,64737],"mapped",[1576,1605]],[[64738,64738],"mapped",[1576,1607]],[[64739,64739],"mapped",[1578,1605]],[[64740,64740],"mapped",[1578,1607]],[[64741,64741],"mapped",[1579,1605]],[[64742,64742],"mapped",[1579,1607]],[[64743,64743],"mapped",[1587,1605]],[[64744,64744],"mapped",[1587,1607]],[[64745,64745],"mapped",[1588,1605]],[[64746,64746],"mapped",[1588,1607]],[[64747,64747],"mapped",[1603,1604]],[[64748,64748],"mapped",[1603,1605]],[[64749,64749],"mapped",[1604,1605]],[[64750,64750],"mapped",[1606,1605]],[[64751,64751],"mapped",[1606,1607]],[[64752,64752],"mapped",[1610,1605]],[[64753,64753],"mapped",[1610,1607]],[[64754,64754],"mapped",[1600,1614,1617]],[[64755,64755],"mapped",[1600,1615,1617]],[[64756,64756],"mapped",[1600,1616,1617]],[[64757,64757],"mapped",[1591,1609]],[[64758,64758],"mapped",[1591,1610]],[[64759,64759],"mapped",[1593,1609]],[[64760,64760],"mapped",[1593,1610]],[[64761,64761],"mapped",[1594,1609]],[[64762,64762],"mapped",[1594,1610]],[[64763,64763],"mapped",[1587,1609]],[[64764,64764],"mapped",[1587,1610]],[[64765,64765],"mapped",[1588,1609]],[[64766,64766],"mapped",[1588,1610]],[[64767,64767],"mapped",[1581,1609]],[[64768,64768],"mapped",[1581,1610]],[[64769,64769],"mapped",[1580,1609]],[[64770,64770],"mapped",[1580,1610]],[[64771,64771],"mapped",[1582,1609]],[[64772,64772],"mapped",[1582,1610]],[[64773,64773],"mapped",[1589,1609]],[[64774,64774],"mapped",[1589,1610]],[[64775,64775],"mapped",[1590,1609]],[[64776,64776],"mapped",[1590,1610]],[[64777,64777],"mapped",[1588,1580]],[[64778,64778],"mapped",[1588,1581]],[[64779,64779],"mapped",[1588,1582]],[[64780,64780],"mapped",[1588,1605]],[[64781,64781],"mapped",[1588,1585]],[[64782,64782],"mapped",[1587,1585]],[[64783,64783],"mapped",[1589,1585]],[[64784,64784],"mapped",[1590,1585]],[[64785,64785],"mapped",[1591,1609]],[[64786,64786],"mapped",[1591,1610]],[[64787,64787],"mapped",[1593,1609]],[[64788,64788],"mapped",[1593,1610]],[[64789,64789],"mapped",[1594,1609]],[[64790,64790],"mapped",[1594,1610]],[[64791,64791],"mapped",[1587,1609]],[[64792,64792],"mapped",[1587,1610]],[[64793,64793],"mapped",[1588,1609]],[[64794,64794],"mapped",[1588,1610]],[[64795,64795],"mapped",[1581,1609]],[[64796,64796],"mapped",[1581,1610]],[[64797,64797],"mapped",[1580,1609]],[[64798,64798],"mapped",[1580,1610]],[[64799,64799],"mapped",[1582,1609]],[[64800,64800],"mapped",[1582,1610]],[[64801,64801],"mapped",[1589,1609]],[[64802,64802],"mapped",[1589,1610]],[[64803,64803],"mapped",[1590,1609]],[[64804,64804],"mapped",[1590,1610]],[[64805,64805],"mapped",[1588,1580]],[[64806,64806],"mapped",[1588,1581]],[[64807,64807],"mapped",[1588,1582]],[[64808,64808],"mapped",[1588,1605]],[[64809,64809],"mapped",[1588,1585]],[[64810,64810],"mapped",[1587,1585]],[[64811,64811],"mapped",[1589,1585]],[[64812,64812],"mapped",[1590,1585]],[[64813,64813],"mapped",[1588,1580]],[[64814,64814],"mapped",[1588,1581]],[[64815,64815],"mapped",[1588,1582]],[[64816,64816],"mapped",[1588,1605]],[[64817,64817],"mapped",[1587,1607]],[[64818,64818],"mapped",[1588,1607]],[[64819,64819],"mapped",[1591,1605]],[[64820,64820],"mapped",[1587,1580]],[[64821,64821],"mapped",[1587,1581]],[[64822,64822],"mapped",[1587,1582]],[[64823,64823],"mapped",[1588,1580]],[[64824,64824],"mapped",[1588,1581]],[[64825,64825],"mapped",[1588,1582]],[[64826,64826],"mapped",[1591,1605]],[[64827,64827],"mapped",[1592,1605]],[[64828,64829],"mapped",[1575,1611]],[[64830,64831],"valid",[],"NV8"],[[64832,64847],"disallowed"],[[64848,64848],"mapped",[1578,1580,1605]],[[64849,64850],"mapped",[1578,1581,1580]],[[64851,64851],"mapped",[1578,1581,1605]],[[64852,64852],"mapped",[1578,1582,1605]],[[64853,64853],"mapped",[1578,1605,1580]],[[64854,64854],"mapped",[1578,1605,1581]],[[64855,64855],"mapped",[1578,1605,1582]],[[64856,64857],"mapped",[1580,1605,1581]],[[64858,64858],"mapped",[1581,1605,1610]],[[64859,64859],"mapped",[1581,1605,1609]],[[64860,64860],"mapped",[1587,1581,1580]],[[64861,64861],"mapped",[1587,1580,1581]],[[64862,64862],"mapped",[1587,1580,1609]],[[64863,64864],"mapped",[1587,1605,1581]],[[64865,64865],"mapped",[1587,1605,1580]],[[64866,64867],"mapped",[1587,1605,1605]],[[64868,64869],"mapped",[1589,1581,1581]],[[64870,64870],"mapped",[1589,1605,1605]],[[64871,64872],"mapped",[1588,1581,1605]],[[64873,64873],"mapped",[1588,1580,1610]],[[64874,64875],"mapped",[1588,1605,1582]],[[64876,64877],"mapped",[1588,1605,1605]],[[64878,64878],"mapped",[1590,1581,1609]],[[64879,64880],"mapped",[1590,1582,1605]],[[64881,64882],"mapped",[1591,1605,1581]],[[64883,64883],"mapped",[1591,1605,1605]],[[64884,64884],"mapped",[1591,1605,1610]],[[64885,64885],"mapped",[1593,1580,1605]],[[64886,64887],"mapped",[1593,1605,1605]],[[64888,64888],"mapped",[1593,1605,1609]],[[64889,64889],"mapped",[1594,1605,1605]],[[64890,64890],"mapped",[1594,1605,1610]],[[64891,64891],"mapped",[1594,1605,1609]],[[64892,64893],"mapped",[1601,1582,1605]],[[64894,64894],"mapped",[1602,1605,1581]],[[64895,64895],"mapped",[1602,1605,1605]],[[64896,64896],"mapped",[1604,1581,1605]],[[64897,64897],"mapped",[1604,1581,1610]],[[64898,64898],"mapped",[1604,1581,1609]],[[64899,64900],"mapped",[1604,1580,1580]],[[64901,64902],"mapped",[1604,1582,1605]],[[64903,64904],"mapped",[1604,1605,1581]],[[64905,64905],"mapped",[1605,1581,1580]],[[64906,64906],"mapped",[1605,1581,1605]],[[64907,64907],"mapped",[1605,1581,1610]],[[64908,64908],"mapped",[1605,1580,1581]],[[64909,64909],"mapped",[1605,1580,1605]],[[64910,64910],"mapped",[1605,1582,1580]],[[64911,64911],"mapped",[1605,1582,1605]],[[64912,64913],"disallowed"],[[64914,64914],"mapped",[1605,1580,1582]],[[64915,64915],"mapped",[1607,1605,1580]],[[64916,64916],"mapped",[1607,1605,1605]],[[64917,64917],"mapped",[1606,1581,1605]],[[64918,64918],"mapped",[1606,1581,1609]],[[64919,64920],"mapped",[1606,1580,1605]],[[64921,64921],"mapped",[1606,1580,1609]],[[64922,64922],"mapped",[1606,1605,1610]],[[64923,64923],"mapped",[1606,1605,1609]],[[64924,64925],"mapped",[1610,1605,1605]],[[64926,64926],"mapped",[1576,1582,1610]],[[64927,64927],"mapped",[1578,1580,1610]],[[64928,64928],"mapped",[1578,1580,1609]],[[64929,64929],"mapped",[1578,1582,1610]],[[64930,64930],"mapped",[1578,1582,1609]],[[64931,64931],"mapped",[1578,1605,1610]],[[64932,64932],"mapped",[1578,1605,1609]],[[64933,64933],"mapped",[1580,1605,1610]],[[64934,64934],"mapped",[1580,1581,1609]],[[64935,64935],"mapped",[1580,1605,1609]],[[64936,64936],"mapped",[1587,1582,1609]],[[64937,64937],"mapped",[1589,1581,1610]],[[64938,64938],"mapped",[1588,1581,1610]],[[64939,64939],"mapped",[1590,1581,1610]],[[64940,64940],"mapped",[1604,1580,1610]],[[64941,64941],"mapped",[1604,1605,1610]],[[64942,64942],"mapped",[1610,1581,1610]],[[64943,64943],"mapped",[1610,1580,1610]],[[64944,64944],"mapped",[1610,1605,1610]],[[64945,64945],"mapped",[1605,1605,1610]],[[64946,64946],"mapped",[1602,1605,1610]],[[64947,64947],"mapped",[1606,1581,1610]],[[64948,64948],"mapped",[1602,1605,1581]],[[64949,64949],"mapped",[1604,1581,1605]],[[64950,64950],"mapped",[1593,1605,1610]],[[64951,64951],"mapped",[1603,1605,1610]],[[64952,64952],"mapped",[1606,1580,1581]],[[64953,64953],"mapped",[1605,1582,1610]],[[64954,64954],"mapped",[1604,1580,1605]],[[64955,64955],"mapped",[1603,1605,1605]],[[64956,64956],"mapped",[1604,1580,1605]],[[64957,64957],"mapped",[1606,1580,1581]],[[64958,64958],"mapped",[1580,1581,1610]],[[64959,64959],"mapped",[1581,1580,1610]],[[64960,64960],"mapped",[1605,1580,1610]],[[64961,64961],"mapped",[1601,1605,1610]],[[64962,64962],"mapped",[1576,1581,1610]],[[64963,64963],"mapped",[1603,1605,1605]],[[64964,64964],"mapped",[1593,1580,1605]],[[64965,64965],"mapped",[1589,1605,1605]],[[64966,64966],"mapped",[1587,1582,1610]],[[64967,64967],"mapped",[1606,1580,1610]],[[64968,64975],"disallowed"],[[64976,65007],"disallowed"],[[65008,65008],"mapped",[1589,1604,1746]],[[65009,65009],"mapped",[1602,1604,1746]],[[65010,65010],"mapped",[1575,1604,1604,1607]],[[65011,65011],"mapped",[1575,1603,1576,1585]],[[65012,65012],"mapped",[1605,1581,1605,1583]],[[65013,65013],"mapped",[1589,1604,1593,1605]],[[65014,65014],"mapped",[1585,1587,1608,1604]],[[65015,65015],"mapped",[1593,1604,1610,1607]],[[65016,65016],"mapped",[1608,1587,1604,1605]],[[65017,65017],"mapped",[1589,1604,1609]],[[65018,65018],"disallowed_STD3_mapped",[1589,1604,1609,32,1575,1604,1604,1607,32,1593,1604,1610,1607,32,1608,1587,1604,1605]],[[65019,65019],"disallowed_STD3_mapped",[1580,1604,32,1580,1604,1575,1604,1607]],[[65020,65020],"mapped",[1585,1740,1575,1604]],[[65021,65021],"valid",[],"NV8"],[[65022,65023],"disallowed"],[[65024,65039],"ignored"],[[65040,65040],"disallowed_STD3_mapped",[44]],[[65041,65041],"mapped",[12289]],[[65042,65042],"disallowed"],[[65043,65043],"disallowed_STD3_mapped",[58]],[[65044,65044],"disallowed_STD3_mapped",[59]],[[65045,65045],"disallowed_STD3_mapped",[33]],[[65046,65046],"disallowed_STD3_mapped",[63]],[[65047,65047],"mapped",[12310]],[[65048,65048],"mapped",[12311]],[[65049,65049],"disallowed"],[[65050,65055],"disallowed"],[[65056,65059],"valid"],[[65060,65062],"valid"],[[65063,65069],"valid"],[[65070,65071],"valid"],[[65072,65072],"disallowed"],[[65073,65073],"mapped",[8212]],[[65074,65074],"mapped",[8211]],[[65075,65076],"disallowed_STD3_mapped",[95]],[[65077,65077],"disallowed_STD3_mapped",[40]],[[65078,65078],"disallowed_STD3_mapped",[41]],[[65079,65079],"disallowed_STD3_mapped",[123]],[[65080,65080],"disallowed_STD3_mapped",[125]],[[65081,65081],"mapped",[12308]],[[65082,65082],"mapped",[12309]],[[65083,65083],"mapped",[12304]],[[65084,65084],"mapped",[12305]],[[65085,65085],"mapped",[12298]],[[65086,65086],"mapped",[12299]],[[65087,65087],"mapped",[12296]],[[65088,65088],"mapped",[12297]],[[65089,65089],"mapped",[12300]],[[65090,65090],"mapped",[12301]],[[65091,65091],"mapped",[12302]],[[65092,65092],"mapped",[12303]],[[65093,65094],"valid",[],"NV8"],[[65095,65095],"disallowed_STD3_mapped",[91]],[[65096,65096],"disallowed_STD3_mapped",[93]],[[65097,65100],"disallowed_STD3_mapped",[32,773]],[[65101,65103],"disallowed_STD3_mapped",[95]],[[65104,65104],"disallowed_STD3_mapped",[44]],[[65105,65105],"mapped",[12289]],[[65106,65106],"disallowed"],[[65107,65107],"disallowed"],[[65108,65108],"disallowed_STD3_mapped",[59]],[[65109,65109],"disallowed_STD3_mapped",[58]],[[65110,65110],"disallowed_STD3_mapped",[63]],[[65111,65111],"disallowed_STD3_mapped",[33]],[[65112,65112],"mapped",[8212]],[[65113,65113],"disallowed_STD3_mapped",[40]],[[65114,65114],"disallowed_STD3_mapped",[41]],[[65115,65115],"disallowed_STD3_mapped",[123]],[[65116,65116],"disallowed_STD3_mapped",[125]],[[65117,65117],"mapped",[12308]],[[65118,65118],"mapped",[12309]],[[65119,65119],"disallowed_STD3_mapped",[35]],[[65120,65120],"disallowed_STD3_mapped",[38]],[[65121,65121],"disallowed_STD3_mapped",[42]],[[65122,65122],"disallowed_STD3_mapped",[43]],[[65123,65123],"mapped",[45]],[[65124,65124],"disallowed_STD3_mapped",[60]],[[65125,65125],"disallowed_STD3_mapped",[62]],[[65126,65126],"disallowed_STD3_mapped",[61]],[[65127,65127],"disallowed"],[[65128,65128],"disallowed_STD3_mapped",[92]],[[65129,65129],"disallowed_STD3_mapped",[36]],[[65130,65130],"disallowed_STD3_mapped",[37]],[[65131,65131],"disallowed_STD3_mapped",[64]],[[65132,65135],"disallowed"],[[65136,65136],"disallowed_STD3_mapped",[32,1611]],[[65137,65137],"mapped",[1600,1611]],[[65138,65138],"disallowed_STD3_mapped",[32,1612]],[[65139,65139],"valid"],[[65140,65140],"disallowed_STD3_mapped",[32,1613]],[[65141,65141],"disallowed"],[[65142,65142],"disallowed_STD3_mapped",[32,1614]],[[65143,65143],"mapped",[1600,1614]],[[65144,65144],"disallowed_STD3_mapped",[32,1615]],[[65145,65145],"mapped",[1600,1615]],[[65146,65146],"disallowed_STD3_mapped",[32,1616]],[[65147,65147],"mapped",[1600,1616]],[[65148,65148],"disallowed_STD3_mapped",[32,1617]],[[65149,65149],"mapped",[1600,1617]],[[65150,65150],"disallowed_STD3_mapped",[32,1618]],[[65151,65151],"mapped",[1600,1618]],[[65152,65152],"mapped",[1569]],[[65153,65154],"mapped",[1570]],[[65155,65156],"mapped",[1571]],[[65157,65158],"mapped",[1572]],[[65159,65160],"mapped",[1573]],[[65161,65164],"mapped",[1574]],[[65165,65166],"mapped",[1575]],[[65167,65170],"mapped",[1576]],[[65171,65172],"mapped",[1577]],[[65173,65176],"mapped",[1578]],[[65177,65180],"mapped",[1579]],[[65181,65184],"mapped",[1580]],[[65185,65188],"mapped",[1581]],[[65189,65192],"mapped",[1582]],[[65193,65194],"mapped",[1583]],[[65195,65196],"mapped",[1584]],[[65197,65198],"mapped",[1585]],[[65199,65200],"mapped",[1586]],[[65201,65204],"mapped",[1587]],[[65205,65208],"mapped",[1588]],[[65209,65212],"mapped",[1589]],[[65213,65216],"mapped",[1590]],[[65217,65220],"mapped",[1591]],[[65221,65224],"mapped",[1592]],[[65225,65228],"mapped",[1593]],[[65229,65232],"mapped",[1594]],[[65233,65236],"mapped",[1601]],[[65237,65240],"mapped",[1602]],[[65241,65244],"mapped",[1603]],[[65245,65248],"mapped",[1604]],[[65249,65252],"mapped",[1605]],[[65253,65256],"mapped",[1606]],[[65257,65260],"mapped",[1607]],[[65261,65262],"mapped",[1608]],[[65263,65264],"mapped",[1609]],[[65265,65268],"mapped",[1610]],[[65269,65270],"mapped",[1604,1570]],[[65271,65272],"mapped",[1604,1571]],[[65273,65274],"mapped",[1604,1573]],[[65275,65276],"mapped",[1604,1575]],[[65277,65278],"disallowed"],[[65279,65279],"ignored"],[[65280,65280],"disallowed"],[[65281,65281],"disallowed_STD3_mapped",[33]],[[65282,65282],"disallowed_STD3_mapped",[34]],[[65283,65283],"disallowed_STD3_mapped",[35]],[[65284,65284],"disallowed_STD3_mapped",[36]],[[65285,65285],"disallowed_STD3_mapped",[37]],[[65286,65286],"disallowed_STD3_mapped",[38]],[[65287,65287],"disallowed_STD3_mapped",[39]],[[65288,65288],"disallowed_STD3_mapped",[40]],[[65289,65289],"disallowed_STD3_mapped",[41]],[[65290,65290],"disallowed_STD3_mapped",[42]],[[65291,65291],"disallowed_STD3_mapped",[43]],[[65292,65292],"disallowed_STD3_mapped",[44]],[[65293,65293],"mapped",[45]],[[65294,65294],"mapped",[46]],[[65295,65295],"disallowed_STD3_mapped",[47]],[[65296,65296],"mapped",[48]],[[65297,65297],"mapped",[49]],[[65298,65298],"mapped",[50]],[[65299,65299],"mapped",[51]],[[65300,65300],"mapped",[52]],[[65301,65301],"mapped",[53]],[[65302,65302],"mapped",[54]],[[65303,65303],"mapped",[55]],[[65304,65304],"mapped",[56]],[[65305,65305],"mapped",[57]],[[65306,65306],"disallowed_STD3_mapped",[58]],[[65307,65307],"disallowed_STD3_mapped",[59]],[[65308,65308],"disallowed_STD3_mapped",[60]],[[65309,65309],"disallowed_STD3_mapped",[61]],[[65310,65310],"disallowed_STD3_mapped",[62]],[[65311,65311],"disallowed_STD3_mapped",[63]],[[65312,65312],"disallowed_STD3_mapped",[64]],[[65313,65313],"mapped",[97]],[[65314,65314],"mapped",[98]],[[65315,65315],"mapped",[99]],[[65316,65316],"mapped",[100]],[[65317,65317],"mapped",[101]],[[65318,65318],"mapped",[102]],[[65319,65319],"mapped",[103]],[[65320,65320],"mapped",[104]],[[65321,65321],"mapped",[105]],[[65322,65322],"mapped",[106]],[[65323,65323],"mapped",[107]],[[65324,65324],"mapped",[108]],[[65325,65325],"mapped",[109]],[[65326,65326],"mapped",[110]],[[65327,65327],"mapped",[111]],[[65328,65328],"mapped",[112]],[[65329,65329],"mapped",[113]],[[65330,65330],"mapped",[114]],[[65331,65331],"mapped",[115]],[[65332,65332],"mapped",[116]],[[65333,65333],"mapped",[117]],[[65334,65334],"mapped",[118]],[[65335,65335],"mapped",[119]],[[65336,65336],"mapped",[120]],[[65337,65337],"mapped",[121]],[[65338,65338],"mapped",[122]],[[65339,65339],"disallowed_STD3_mapped",[91]],[[65340,65340],"disallowed_STD3_mapped",[92]],[[65341,65341],"disallowed_STD3_mapped",[93]],[[65342,65342],"disallowed_STD3_mapped",[94]],[[65343,65343],"disallowed_STD3_mapped",[95]],[[65344,65344],"disallowed_STD3_mapped",[96]],[[65345,65345],"mapped",[97]],[[65346,65346],"mapped",[98]],[[65347,65347],"mapped",[99]],[[65348,65348],"mapped",[100]],[[65349,65349],"mapped",[101]],[[65350,65350],"mapped",[102]],[[65351,65351],"mapped",[103]],[[65352,65352],"mapped",[104]],[[65353,65353],"mapped",[105]],[[65354,65354],"mapped",[106]],[[65355,65355],"mapped",[107]],[[65356,65356],"mapped",[108]],[[65357,65357],"mapped",[109]],[[65358,65358],"mapped",[110]],[[65359,65359],"mapped",[111]],[[65360,65360],"mapped",[112]],[[65361,65361],"mapped",[113]],[[65362,65362],"mapped",[114]],[[65363,65363],"mapped",[115]],[[65364,65364],"mapped",[116]],[[65365,65365],"mapped",[117]],[[65366,65366],"mapped",[118]],[[65367,65367],"mapped",[119]],[[65368,65368],"mapped",[120]],[[65369,65369],"mapped",[121]],[[65370,65370],"mapped",[122]],[[65371,65371],"disallowed_STD3_mapped",[123]],[[65372,65372],"disallowed_STD3_mapped",[124]],[[65373,65373],"disallowed_STD3_mapped",[125]],[[65374,65374],"disallowed_STD3_mapped",[126]],[[65375,65375],"mapped",[10629]],[[65376,65376],"mapped",[10630]],[[65377,65377],"mapped",[46]],[[65378,65378],"mapped",[12300]],[[65379,65379],"mapped",[12301]],[[65380,65380],"mapped",[12289]],[[65381,65381],"mapped",[12539]],[[65382,65382],"mapped",[12530]],[[65383,65383],"mapped",[12449]],[[65384,65384],"mapped",[12451]],[[65385,65385],"mapped",[12453]],[[65386,65386],"mapped",[12455]],[[65387,65387],"mapped",[12457]],[[65388,65388],"mapped",[12515]],[[65389,65389],"mapped",[12517]],[[65390,65390],"mapped",[12519]],[[65391,65391],"mapped",[12483]],[[65392,65392],"mapped",[12540]],[[65393,65393],"mapped",[12450]],[[65394,65394],"mapped",[12452]],[[65395,65395],"mapped",[12454]],[[65396,65396],"mapped",[12456]],[[65397,65397],"mapped",[12458]],[[65398,65398],"mapped",[12459]],[[65399,65399],"mapped",[12461]],[[65400,65400],"mapped",[12463]],[[65401,65401],"mapped",[12465]],[[65402,65402],"mapped",[12467]],[[65403,65403],"mapped",[12469]],[[65404,65404],"mapped",[12471]],[[65405,65405],"mapped",[12473]],[[65406,65406],"mapped",[12475]],[[65407,65407],"mapped",[12477]],[[65408,65408],"mapped",[12479]],[[65409,65409],"mapped",[12481]],[[65410,65410],"mapped",[12484]],[[65411,65411],"mapped",[12486]],[[65412,65412],"mapped",[12488]],[[65413,65413],"mapped",[12490]],[[65414,65414],"mapped",[12491]],[[65415,65415],"mapped",[12492]],[[65416,65416],"mapped",[12493]],[[65417,65417],"mapped",[12494]],[[65418,65418],"mapped",[12495]],[[65419,65419],"mapped",[12498]],[[65420,65420],"mapped",[12501]],[[65421,65421],"mapped",[12504]],[[65422,65422],"mapped",[12507]],[[65423,65423],"mapped",[12510]],[[65424,65424],"mapped",[12511]],[[65425,65425],"mapped",[12512]],[[65426,65426],"mapped",[12513]],[[65427,65427],"mapped",[12514]],[[65428,65428],"mapped",[12516]],[[65429,65429],"mapped",[12518]],[[65430,65430],"mapped",[12520]],[[65431,65431],"mapped",[12521]],[[65432,65432],"mapped",[12522]],[[65433,65433],"mapped",[12523]],[[65434,65434],"mapped",[12524]],[[65435,65435],"mapped",[12525]],[[65436,65436],"mapped",[12527]],[[65437,65437],"mapped",[12531]],[[65438,65438],"mapped",[12441]],[[65439,65439],"mapped",[12442]],[[65440,65440],"disallowed"],[[65441,65441],"mapped",[4352]],[[65442,65442],"mapped",[4353]],[[65443,65443],"mapped",[4522]],[[65444,65444],"mapped",[4354]],[[65445,65445],"mapped",[4524]],[[65446,65446],"mapped",[4525]],[[65447,65447],"mapped",[4355]],[[65448,65448],"mapped",[4356]],[[65449,65449],"mapped",[4357]],[[65450,65450],"mapped",[4528]],[[65451,65451],"mapped",[4529]],[[65452,65452],"mapped",[4530]],[[65453,65453],"mapped",[4531]],[[65454,65454],"mapped",[4532]],[[65455,65455],"mapped",[4533]],[[65456,65456],"mapped",[4378]],[[65457,65457],"mapped",[4358]],[[65458,65458],"mapped",[4359]],[[65459,65459],"mapped",[4360]],[[65460,65460],"mapped",[4385]],[[65461,65461],"mapped",[4361]],[[65462,65462],"mapped",[4362]],[[65463,65463],"mapped",[4363]],[[65464,65464],"mapped",[4364]],[[65465,65465],"mapped",[4365]],[[65466,65466],"mapped",[4366]],[[65467,65467],"mapped",[4367]],[[65468,65468],"mapped",[4368]],[[65469,65469],"mapped",[4369]],[[65470,65470],"mapped",[4370]],[[65471,65473],"disallowed"],[[65474,65474],"mapped",[4449]],[[65475,65475],"mapped",[4450]],[[65476,65476],"mapped",[4451]],[[65477,65477],"mapped",[4452]],[[65478,65478],"mapped",[4453]],[[65479,65479],"mapped",[4454]],[[65480,65481],"disallowed"],[[65482,65482],"mapped",[4455]],[[65483,65483],"mapped",[4456]],[[65484,65484],"mapped",[4457]],[[65485,65485],"mapped",[4458]],[[65486,65486],"mapped",[4459]],[[65487,65487],"mapped",[4460]],[[65488,65489],"disallowed"],[[65490,65490],"mapped",[4461]],[[65491,65491],"mapped",[4462]],[[65492,65492],"mapped",[4463]],[[65493,65493],"mapped",[4464]],[[65494,65494],"mapped",[4465]],[[65495,65495],"mapped",[4466]],[[65496,65497],"disallowed"],[[65498,65498],"mapped",[4467]],[[65499,65499],"mapped",[4468]],[[65500,65500],"mapped",[4469]],[[65501,65503],"disallowed"],[[65504,65504],"mapped",[162]],[[65505,65505],"mapped",[163]],[[65506,65506],"mapped",[172]],[[65507,65507],"disallowed_STD3_mapped",[32,772]],[[65508,65508],"mapped",[166]],[[65509,65509],"mapped",[165]],[[65510,65510],"mapped",[8361]],[[65511,65511],"disallowed"],[[65512,65512],"mapped",[9474]],[[65513,65513],"mapped",[8592]],[[65514,65514],"mapped",[8593]],[[65515,65515],"mapped",[8594]],[[65516,65516],"mapped",[8595]],[[65517,65517],"mapped",[9632]],[[65518,65518],"mapped",[9675]],[[65519,65528],"disallowed"],[[65529,65531],"disallowed"],[[65532,65532],"disallowed"],[[65533,65533],"disallowed"],[[65534,65535],"disallowed"],[[65536,65547],"valid"],[[65548,65548],"disallowed"],[[65549,65574],"valid"],[[65575,65575],"disallowed"],[[65576,65594],"valid"],[[65595,65595],"disallowed"],[[65596,65597],"valid"],[[65598,65598],"disallowed"],[[65599,65613],"valid"],[[65614,65615],"disallowed"],[[65616,65629],"valid"],[[65630,65663],"disallowed"],[[65664,65786],"valid"],[[65787,65791],"disallowed"],[[65792,65794],"valid",[],"NV8"],[[65795,65798],"disallowed"],[[65799,65843],"valid",[],"NV8"],[[65844,65846],"disallowed"],[[65847,65855],"valid",[],"NV8"],[[65856,65930],"valid",[],"NV8"],[[65931,65932],"valid",[],"NV8"],[[65933,65935],"disallowed"],[[65936,65947],"valid",[],"NV8"],[[65948,65951],"disallowed"],[[65952,65952],"valid",[],"NV8"],[[65953,65999],"disallowed"],[[66000,66044],"valid",[],"NV8"],[[66045,66045],"valid"],[[66046,66175],"disallowed"],[[66176,66204],"valid"],[[66205,66207],"disallowed"],[[66208,66256],"valid"],[[66257,66271],"disallowed"],[[66272,66272],"valid"],[[66273,66299],"valid",[],"NV8"],[[66300,66303],"disallowed"],[[66304,66334],"valid"],[[66335,66335],"valid"],[[66336,66339],"valid",[],"NV8"],[[66340,66351],"disallowed"],[[66352,66368],"valid"],[[66369,66369],"valid",[],"NV8"],[[66370,66377],"valid"],[[66378,66378],"valid",[],"NV8"],[[66379,66383],"disallowed"],[[66384,66426],"valid"],[[66427,66431],"disallowed"],[[66432,66461],"valid"],[[66462,66462],"disallowed"],[[66463,66463],"valid",[],"NV8"],[[66464,66499],"valid"],[[66500,66503],"disallowed"],[[66504,66511],"valid"],[[66512,66517],"valid",[],"NV8"],[[66518,66559],"disallowed"],[[66560,66560],"mapped",[66600]],[[66561,66561],"mapped",[66601]],[[66562,66562],"mapped",[66602]],[[66563,66563],"mapped",[66603]],[[66564,66564],"mapped",[66604]],[[66565,66565],"mapped",[66605]],[[66566,66566],"mapped",[66606]],[[66567,66567],"mapped",[66607]],[[66568,66568],"mapped",[66608]],[[66569,66569],"mapped",[66609]],[[66570,66570],"mapped",[66610]],[[66571,66571],"mapped",[66611]],[[66572,66572],"mapped",[66612]],[[66573,66573],"mapped",[66613]],[[66574,66574],"mapped",[66614]],[[66575,66575],"mapped",[66615]],[[66576,66576],"mapped",[66616]],[[66577,66577],"mapped",[66617]],[[66578,66578],"mapped",[66618]],[[66579,66579],"mapped",[66619]],[[66580,66580],"mapped",[66620]],[[66581,66581],"mapped",[66621]],[[66582,66582],"mapped",[66622]],[[66583,66583],"mapped",[66623]],[[66584,66584],"mapped",[66624]],[[66585,66585],"mapped",[66625]],[[66586,66586],"mapped",[66626]],[[66587,66587],"mapped",[66627]],[[66588,66588],"mapped",[66628]],[[66589,66589],"mapped",[66629]],[[66590,66590],"mapped",[66630]],[[66591,66591],"mapped",[66631]],[[66592,66592],"mapped",[66632]],[[66593,66593],"mapped",[66633]],[[66594,66594],"mapped",[66634]],[[66595,66595],"mapped",[66635]],[[66596,66596],"mapped",[66636]],[[66597,66597],"mapped",[66637]],[[66598,66598],"mapped",[66638]],[[66599,66599],"mapped",[66639]],[[66600,66637],"valid"],[[66638,66717],"valid"],[[66718,66719],"disallowed"],[[66720,66729],"valid"],[[66730,66815],"disallowed"],[[66816,66855],"valid"],[[66856,66863],"disallowed"],[[66864,66915],"valid"],[[66916,66926],"disallowed"],[[66927,66927],"valid",[],"NV8"],[[66928,67071],"disallowed"],[[67072,67382],"valid"],[[67383,67391],"disallowed"],[[67392,67413],"valid"],[[67414,67423],"disallowed"],[[67424,67431],"valid"],[[67432,67583],"disallowed"],[[67584,67589],"valid"],[[67590,67591],"disallowed"],[[67592,67592],"valid"],[[67593,67593],"disallowed"],[[67594,67637],"valid"],[[67638,67638],"disallowed"],[[67639,67640],"valid"],[[67641,67643],"disallowed"],[[67644,67644],"valid"],[[67645,67646],"disallowed"],[[67647,67647],"valid"],[[67648,67669],"valid"],[[67670,67670],"disallowed"],[[67671,67679],"valid",[],"NV8"],[[67680,67702],"valid"],[[67703,67711],"valid",[],"NV8"],[[67712,67742],"valid"],[[67743,67750],"disallowed"],[[67751,67759],"valid",[],"NV8"],[[67760,67807],"disallowed"],[[67808,67826],"valid"],[[67827,67827],"disallowed"],[[67828,67829],"valid"],[[67830,67834],"disallowed"],[[67835,67839],"valid",[],"NV8"],[[67840,67861],"valid"],[[67862,67865],"valid",[],"NV8"],[[67866,67867],"valid",[],"NV8"],[[67868,67870],"disallowed"],[[67871,67871],"valid",[],"NV8"],[[67872,67897],"valid"],[[67898,67902],"disallowed"],[[67903,67903],"valid",[],"NV8"],[[67904,67967],"disallowed"],[[67968,68023],"valid"],[[68024,68027],"disallowed"],[[68028,68029],"valid",[],"NV8"],[[68030,68031],"valid"],[[68032,68047],"valid",[],"NV8"],[[68048,68049],"disallowed"],[[68050,68095],"valid",[],"NV8"],[[68096,68099],"valid"],[[68100,68100],"disallowed"],[[68101,68102],"valid"],[[68103,68107],"disallowed"],[[68108,68115],"valid"],[[68116,68116],"disallowed"],[[68117,68119],"valid"],[[68120,68120],"disallowed"],[[68121,68147],"valid"],[[68148,68151],"disallowed"],[[68152,68154],"valid"],[[68155,68158],"disallowed"],[[68159,68159],"valid"],[[68160,68167],"valid",[],"NV8"],[[68168,68175],"disallowed"],[[68176,68184],"valid",[],"NV8"],[[68185,68191],"disallowed"],[[68192,68220],"valid"],[[68221,68223],"valid",[],"NV8"],[[68224,68252],"valid"],[[68253,68255],"valid",[],"NV8"],[[68256,68287],"disallowed"],[[68288,68295],"valid"],[[68296,68296],"valid",[],"NV8"],[[68297,68326],"valid"],[[68327,68330],"disallowed"],[[68331,68342],"valid",[],"NV8"],[[68343,68351],"disallowed"],[[68352,68405],"valid"],[[68406,68408],"disallowed"],[[68409,68415],"valid",[],"NV8"],[[68416,68437],"valid"],[[68438,68439],"disallowed"],[[68440,68447],"valid",[],"NV8"],[[68448,68466],"valid"],[[68467,68471],"disallowed"],[[68472,68479],"valid",[],"NV8"],[[68480,68497],"valid"],[[68498,68504],"disallowed"],[[68505,68508],"valid",[],"NV8"],[[68509,68520],"disallowed"],[[68521,68527],"valid",[],"NV8"],[[68528,68607],"disallowed"],[[68608,68680],"valid"],[[68681,68735],"disallowed"],[[68736,68736],"mapped",[68800]],[[68737,68737],"mapped",[68801]],[[68738,68738],"mapped",[68802]],[[68739,68739],"mapped",[68803]],[[68740,68740],"mapped",[68804]],[[68741,68741],"mapped",[68805]],[[68742,68742],"mapped",[68806]],[[68743,68743],"mapped",[68807]],[[68744,68744],"mapped",[68808]],[[68745,68745],"mapped",[68809]],[[68746,68746],"mapped",[68810]],[[68747,68747],"mapped",[68811]],[[68748,68748],"mapped",[68812]],[[68749,68749],"mapped",[68813]],[[68750,68750],"mapped",[68814]],[[68751,68751],"mapped",[68815]],[[68752,68752],"mapped",[68816]],[[68753,68753],"mapped",[68817]],[[68754,68754],"mapped",[68818]],[[68755,68755],"mapped",[68819]],[[68756,68756],"mapped",[68820]],[[68757,68757],"mapped",[68821]],[[68758,68758],"mapped",[68822]],[[68759,68759],"mapped",[68823]],[[68760,68760],"mapped",[68824]],[[68761,68761],"mapped",[68825]],[[68762,68762],"mapped",[68826]],[[68763,68763],"mapped",[68827]],[[68764,68764],"mapped",[68828]],[[68765,68765],"mapped",[68829]],[[68766,68766],"mapped",[68830]],[[68767,68767],"mapped",[68831]],[[68768,68768],"mapped",[68832]],[[68769,68769],"mapped",[68833]],[[68770,68770],"mapped",[68834]],[[68771,68771],"mapped",[68835]],[[68772,68772],"mapped",[68836]],[[68773,68773],"mapped",[68837]],[[68774,68774],"mapped",[68838]],[[68775,68775],"mapped",[68839]],[[68776,68776],"mapped",[68840]],[[68777,68777],"mapped",[68841]],[[68778,68778],"mapped",[68842]],[[68779,68779],"mapped",[68843]],[[68780,68780],"mapped",[68844]],[[68781,68781],"mapped",[68845]],[[68782,68782],"mapped",[68846]],[[68783,68783],"mapped",[68847]],[[68784,68784],"mapped",[68848]],[[68785,68785],"mapped",[68849]],[[68786,68786],"mapped",[68850]],[[68787,68799],"disallowed"],[[68800,68850],"valid"],[[68851,68857],"disallowed"],[[68858,68863],"valid",[],"NV8"],[[68864,69215],"disallowed"],[[69216,69246],"valid",[],"NV8"],[[69247,69631],"disallowed"],[[69632,69702],"valid"],[[69703,69709],"valid",[],"NV8"],[[69710,69713],"disallowed"],[[69714,69733],"valid",[],"NV8"],[[69734,69743],"valid"],[[69744,69758],"disallowed"],[[69759,69759],"valid"],[[69760,69818],"valid"],[[69819,69820],"valid",[],"NV8"],[[69821,69821],"disallowed"],[[69822,69825],"valid",[],"NV8"],[[69826,69839],"disallowed"],[[69840,69864],"valid"],[[69865,69871],"disallowed"],[[69872,69881],"valid"],[[69882,69887],"disallowed"],[[69888,69940],"valid"],[[69941,69941],"disallowed"],[[69942,69951],"valid"],[[69952,69955],"valid",[],"NV8"],[[69956,69967],"disallowed"],[[69968,70003],"valid"],[[70004,70005],"valid",[],"NV8"],[[70006,70006],"valid"],[[70007,70015],"disallowed"],[[70016,70084],"valid"],[[70085,70088],"valid",[],"NV8"],[[70089,70089],"valid",[],"NV8"],[[70090,70092],"valid"],[[70093,70093],"valid",[],"NV8"],[[70094,70095],"disallowed"],[[70096,70105],"valid"],[[70106,70106],"valid"],[[70107,70107],"valid",[],"NV8"],[[70108,70108],"valid"],[[70109,70111],"valid",[],"NV8"],[[70112,70112],"disallowed"],[[70113,70132],"valid",[],"NV8"],[[70133,70143],"disallowed"],[[70144,70161],"valid"],[[70162,70162],"disallowed"],[[70163,70199],"valid"],[[70200,70205],"valid",[],"NV8"],[[70206,70271],"disallowed"],[[70272,70278],"valid"],[[70279,70279],"disallowed"],[[70280,70280],"valid"],[[70281,70281],"disallowed"],[[70282,70285],"valid"],[[70286,70286],"disallowed"],[[70287,70301],"valid"],[[70302,70302],"disallowed"],[[70303,70312],"valid"],[[70313,70313],"valid",[],"NV8"],[[70314,70319],"disallowed"],[[70320,70378],"valid"],[[70379,70383],"disallowed"],[[70384,70393],"valid"],[[70394,70399],"disallowed"],[[70400,70400],"valid"],[[70401,70403],"valid"],[[70404,70404],"disallowed"],[[70405,70412],"valid"],[[70413,70414],"disallowed"],[[70415,70416],"valid"],[[70417,70418],"disallowed"],[[70419,70440],"valid"],[[70441,70441],"disallowed"],[[70442,70448],"valid"],[[70449,70449],"disallowed"],[[70450,70451],"valid"],[[70452,70452],"disallowed"],[[70453,70457],"valid"],[[70458,70459],"disallowed"],[[70460,70468],"valid"],[[70469,70470],"disallowed"],[[70471,70472],"valid"],[[70473,70474],"disallowed"],[[70475,70477],"valid"],[[70478,70479],"disallowed"],[[70480,70480],"valid"],[[70481,70486],"disallowed"],[[70487,70487],"valid"],[[70488,70492],"disallowed"],[[70493,70499],"valid"],[[70500,70501],"disallowed"],[[70502,70508],"valid"],[[70509,70511],"disallowed"],[[70512,70516],"valid"],[[70517,70783],"disallowed"],[[70784,70853],"valid"],[[70854,70854],"valid",[],"NV8"],[[70855,70855],"valid"],[[70856,70863],"disallowed"],[[70864,70873],"valid"],[[70874,71039],"disallowed"],[[71040,71093],"valid"],[[71094,71095],"disallowed"],[[71096,71104],"valid"],[[71105,71113],"valid",[],"NV8"],[[71114,71127],"valid",[],"NV8"],[[71128,71133],"valid"],[[71134,71167],"disallowed"],[[71168,71232],"valid"],[[71233,71235],"valid",[],"NV8"],[[71236,71236],"valid"],[[71237,71247],"disallowed"],[[71248,71257],"valid"],[[71258,71295],"disallowed"],[[71296,71351],"valid"],[[71352,71359],"disallowed"],[[71360,71369],"valid"],[[71370,71423],"disallowed"],[[71424,71449],"valid"],[[71450,71452],"disallowed"],[[71453,71467],"valid"],[[71468,71471],"disallowed"],[[71472,71481],"valid"],[[71482,71487],"valid",[],"NV8"],[[71488,71839],"disallowed"],[[71840,71840],"mapped",[71872]],[[71841,71841],"mapped",[71873]],[[71842,71842],"mapped",[71874]],[[71843,71843],"mapped",[71875]],[[71844,71844],"mapped",[71876]],[[71845,71845],"mapped",[71877]],[[71846,71846],"mapped",[71878]],[[71847,71847],"mapped",[71879]],[[71848,71848],"mapped",[71880]],[[71849,71849],"mapped",[71881]],[[71850,71850],"mapped",[71882]],[[71851,71851],"mapped",[71883]],[[71852,71852],"mapped",[71884]],[[71853,71853],"mapped",[71885]],[[71854,71854],"mapped",[71886]],[[71855,71855],"mapped",[71887]],[[71856,71856],"mapped",[71888]],[[71857,71857],"mapped",[71889]],[[71858,71858],"mapped",[71890]],[[71859,71859],"mapped",[71891]],[[71860,71860],"mapped",[71892]],[[71861,71861],"mapped",[71893]],[[71862,71862],"mapped",[71894]],[[71863,71863],"mapped",[71895]],[[71864,71864],"mapped",[71896]],[[71865,71865],"mapped",[71897]],[[71866,71866],"mapped",[71898]],[[71867,71867],"mapped",[71899]],[[71868,71868],"mapped",[71900]],[[71869,71869],"mapped",[71901]],[[71870,71870],"mapped",[71902]],[[71871,71871],"mapped",[71903]],[[71872,71913],"valid"],[[71914,71922],"valid",[],"NV8"],[[71923,71934],"disallowed"],[[71935,71935],"valid"],[[71936,72383],"disallowed"],[[72384,72440],"valid"],[[72441,73727],"disallowed"],[[73728,74606],"valid"],[[74607,74648],"valid"],[[74649,74649],"valid"],[[74650,74751],"disallowed"],[[74752,74850],"valid",[],"NV8"],[[74851,74862],"valid",[],"NV8"],[[74863,74863],"disallowed"],[[74864,74867],"valid",[],"NV8"],[[74868,74868],"valid",[],"NV8"],[[74869,74879],"disallowed"],[[74880,75075],"valid"],[[75076,77823],"disallowed"],[[77824,78894],"valid"],[[78895,82943],"disallowed"],[[82944,83526],"valid"],[[83527,92159],"disallowed"],[[92160,92728],"valid"],[[92729,92735],"disallowed"],[[92736,92766],"valid"],[[92767,92767],"disallowed"],[[92768,92777],"valid"],[[92778,92781],"disallowed"],[[92782,92783],"valid",[],"NV8"],[[92784,92879],"disallowed"],[[92880,92909],"valid"],[[92910,92911],"disallowed"],[[92912,92916],"valid"],[[92917,92917],"valid",[],"NV8"],[[92918,92927],"disallowed"],[[92928,92982],"valid"],[[92983,92991],"valid",[],"NV8"],[[92992,92995],"valid"],[[92996,92997],"valid",[],"NV8"],[[92998,93007],"disallowed"],[[93008,93017],"valid"],[[93018,93018],"disallowed"],[[93019,93025],"valid",[],"NV8"],[[93026,93026],"disallowed"],[[93027,93047],"valid"],[[93048,93052],"disallowed"],[[93053,93071],"valid"],[[93072,93951],"disallowed"],[[93952,94020],"valid"],[[94021,94031],"disallowed"],[[94032,94078],"valid"],[[94079,94094],"disallowed"],[[94095,94111],"valid"],[[94112,110591],"disallowed"],[[110592,110593],"valid"],[[110594,113663],"disallowed"],[[113664,113770],"valid"],[[113771,113775],"disallowed"],[[113776,113788],"valid"],[[113789,113791],"disallowed"],[[113792,113800],"valid"],[[113801,113807],"disallowed"],[[113808,113817],"valid"],[[113818,113819],"disallowed"],[[113820,113820],"valid",[],"NV8"],[[113821,113822],"valid"],[[113823,113823],"valid",[],"NV8"],[[113824,113827],"ignored"],[[113828,118783],"disallowed"],[[118784,119029],"valid",[],"NV8"],[[119030,119039],"disallowed"],[[119040,119078],"valid",[],"NV8"],[[119079,119080],"disallowed"],[[119081,119081],"valid",[],"NV8"],[[119082,119133],"valid",[],"NV8"],[[119134,119134],"mapped",[119127,119141]],[[119135,119135],"mapped",[119128,119141]],[[119136,119136],"mapped",[119128,119141,119150]],[[119137,119137],"mapped",[119128,119141,119151]],[[119138,119138],"mapped",[119128,119141,119152]],[[119139,119139],"mapped",[119128,119141,119153]],[[119140,119140],"mapped",[119128,119141,119154]],[[119141,119154],"valid",[],"NV8"],[[119155,119162],"disallowed"],[[119163,119226],"valid",[],"NV8"],[[119227,119227],"mapped",[119225,119141]],[[119228,119228],"mapped",[119226,119141]],[[119229,119229],"mapped",[119225,119141,119150]],[[119230,119230],"mapped",[119226,119141,119150]],[[119231,119231],"mapped",[119225,119141,119151]],[[119232,119232],"mapped",[119226,119141,119151]],[[119233,119261],"valid",[],"NV8"],[[119262,119272],"valid",[],"NV8"],[[119273,119295],"disallowed"],[[119296,119365],"valid",[],"NV8"],[[119366,119551],"disallowed"],[[119552,119638],"valid",[],"NV8"],[[119639,119647],"disallowed"],[[119648,119665],"valid",[],"NV8"],[[119666,119807],"disallowed"],[[119808,119808],"mapped",[97]],[[119809,119809],"mapped",[98]],[[119810,119810],"mapped",[99]],[[119811,119811],"mapped",[100]],[[119812,119812],"mapped",[101]],[[119813,119813],"mapped",[102]],[[119814,119814],"mapped",[103]],[[119815,119815],"mapped",[104]],[[119816,119816],"mapped",[105]],[[119817,119817],"mapped",[106]],[[119818,119818],"mapped",[107]],[[119819,119819],"mapped",[108]],[[119820,119820],"mapped",[109]],[[119821,119821],"mapped",[110]],[[119822,119822],"mapped",[111]],[[119823,119823],"mapped",[112]],[[119824,119824],"mapped",[113]],[[119825,119825],"mapped",[114]],[[119826,119826],"mapped",[115]],[[119827,119827],"mapped",[116]],[[119828,119828],"mapped",[117]],[[119829,119829],"mapped",[118]],[[119830,119830],"mapped",[119]],[[119831,119831],"mapped",[120]],[[119832,119832],"mapped",[121]],[[119833,119833],"mapped",[122]],[[119834,119834],"mapped",[97]],[[119835,119835],"mapped",[98]],[[119836,119836],"mapped",[99]],[[119837,119837],"mapped",[100]],[[119838,119838],"mapped",[101]],[[119839,119839],"mapped",[102]],[[119840,119840],"mapped",[103]],[[119841,119841],"mapped",[104]],[[119842,119842],"mapped",[105]],[[119843,119843],"mapped",[106]],[[119844,119844],"mapped",[107]],[[119845,119845],"mapped",[108]],[[119846,119846],"mapped",[109]],[[119847,119847],"mapped",[110]],[[119848,119848],"mapped",[111]],[[119849,119849],"mapped",[112]],[[119850,119850],"mapped",[113]],[[119851,119851],"mapped",[114]],[[119852,119852],"mapped",[115]],[[119853,119853],"mapped",[116]],[[119854,119854],"mapped",[117]],[[119855,119855],"mapped",[118]],[[119856,119856],"mapped",[119]],[[119857,119857],"mapped",[120]],[[119858,119858],"mapped",[121]],[[119859,119859],"mapped",[122]],[[119860,119860],"mapped",[97]],[[119861,119861],"mapped",[98]],[[119862,119862],"mapped",[99]],[[119863,119863],"mapped",[100]],[[119864,119864],"mapped",[101]],[[119865,119865],"mapped",[102]],[[119866,119866],"mapped",[103]],[[119867,119867],"mapped",[104]],[[119868,119868],"mapped",[105]],[[119869,119869],"mapped",[106]],[[119870,119870],"mapped",[107]],[[119871,119871],"mapped",[108]],[[119872,119872],"mapped",[109]],[[119873,119873],"mapped",[110]],[[119874,119874],"mapped",[111]],[[119875,119875],"mapped",[112]],[[119876,119876],"mapped",[113]],[[119877,119877],"mapped",[114]],[[119878,119878],"mapped",[115]],[[119879,119879],"mapped",[116]],[[119880,119880],"mapped",[117]],[[119881,119881],"mapped",[118]],[[119882,119882],"mapped",[119]],[[119883,119883],"mapped",[120]],[[119884,119884],"mapped",[121]],[[119885,119885],"mapped",[122]],[[119886,119886],"mapped",[97]],[[119887,119887],"mapped",[98]],[[119888,119888],"mapped",[99]],[[119889,119889],"mapped",[100]],[[119890,119890],"mapped",[101]],[[119891,119891],"mapped",[102]],[[119892,119892],"mapped",[103]],[[119893,119893],"disallowed"],[[119894,119894],"mapped",[105]],[[119895,119895],"mapped",[106]],[[119896,119896],"mapped",[107]],[[119897,119897],"mapped",[108]],[[119898,119898],"mapped",[109]],[[119899,119899],"mapped",[110]],[[119900,119900],"mapped",[111]],[[119901,119901],"mapped",[112]],[[119902,119902],"mapped",[113]],[[119903,119903],"mapped",[114]],[[119904,119904],"mapped",[115]],[[119905,119905],"mapped",[116]],[[119906,119906],"mapped",[117]],[[119907,119907],"mapped",[118]],[[119908,119908],"mapped",[119]],[[119909,119909],"mapped",[120]],[[119910,119910],"mapped",[121]],[[119911,119911],"mapped",[122]],[[119912,119912],"mapped",[97]],[[119913,119913],"mapped",[98]],[[119914,119914],"mapped",[99]],[[119915,119915],"mapped",[100]],[[119916,119916],"mapped",[101]],[[119917,119917],"mapped",[102]],[[119918,119918],"mapped",[103]],[[119919,119919],"mapped",[104]],[[119920,119920],"mapped",[105]],[[119921,119921],"mapped",[106]],[[119922,119922],"mapped",[107]],[[119923,119923],"mapped",[108]],[[119924,119924],"mapped",[109]],[[119925,119925],"mapped",[110]],[[119926,119926],"mapped",[111]],[[119927,119927],"mapped",[112]],[[119928,119928],"mapped",[113]],[[119929,119929],"mapped",[114]],[[119930,119930],"mapped",[115]],[[119931,119931],"mapped",[116]],[[119932,119932],"mapped",[117]],[[119933,119933],"mapped",[118]],[[119934,119934],"mapped",[119]],[[119935,119935],"mapped",[120]],[[119936,119936],"mapped",[121]],[[119937,119937],"mapped",[122]],[[119938,119938],"mapped",[97]],[[119939,119939],"mapped",[98]],[[119940,119940],"mapped",[99]],[[119941,119941],"mapped",[100]],[[119942,119942],"mapped",[101]],[[119943,119943],"mapped",[102]],[[119944,119944],"mapped",[103]],[[119945,119945],"mapped",[104]],[[119946,119946],"mapped",[105]],[[119947,119947],"mapped",[106]],[[119948,119948],"mapped",[107]],[[119949,119949],"mapped",[108]],[[119950,119950],"mapped",[109]],[[119951,119951],"mapped",[110]],[[119952,119952],"mapped",[111]],[[119953,119953],"mapped",[112]],[[119954,119954],"mapped",[113]],[[119955,119955],"mapped",[114]],[[119956,119956],"mapped",[115]],[[119957,119957],"mapped",[116]],[[119958,119958],"mapped",[117]],[[119959,119959],"mapped",[118]],[[119960,119960],"mapped",[119]],[[119961,119961],"mapped",[120]],[[119962,119962],"mapped",[121]],[[119963,119963],"mapped",[122]],[[119964,119964],"mapped",[97]],[[119965,119965],"disallowed"],[[119966,119966],"mapped",[99]],[[119967,119967],"mapped",[100]],[[119968,119969],"disallowed"],[[119970,119970],"mapped",[103]],[[119971,119972],"disallowed"],[[119973,119973],"mapped",[106]],[[119974,119974],"mapped",[107]],[[119975,119976],"disallowed"],[[119977,119977],"mapped",[110]],[[119978,119978],"mapped",[111]],[[119979,119979],"mapped",[112]],[[119980,119980],"mapped",[113]],[[119981,119981],"disallowed"],[[119982,119982],"mapped",[115]],[[119983,119983],"mapped",[116]],[[119984,119984],"mapped",[117]],[[119985,119985],"mapped",[118]],[[119986,119986],"mapped",[119]],[[119987,119987],"mapped",[120]],[[119988,119988],"mapped",[121]],[[119989,119989],"mapped",[122]],[[119990,119990],"mapped",[97]],[[119991,119991],"mapped",[98]],[[119992,119992],"mapped",[99]],[[119993,119993],"mapped",[100]],[[119994,119994],"disallowed"],[[119995,119995],"mapped",[102]],[[119996,119996],"disallowed"],[[119997,119997],"mapped",[104]],[[119998,119998],"mapped",[105]],[[119999,119999],"mapped",[106]],[[120000,120000],"mapped",[107]],[[120001,120001],"mapped",[108]],[[120002,120002],"mapped",[109]],[[120003,120003],"mapped",[110]],[[120004,120004],"disallowed"],[[120005,120005],"mapped",[112]],[[120006,120006],"mapped",[113]],[[120007,120007],"mapped",[114]],[[120008,120008],"mapped",[115]],[[120009,120009],"mapped",[116]],[[120010,120010],"mapped",[117]],[[120011,120011],"mapped",[118]],[[120012,120012],"mapped",[119]],[[120013,120013],"mapped",[120]],[[120014,120014],"mapped",[121]],[[120015,120015],"mapped",[122]],[[120016,120016],"mapped",[97]],[[120017,120017],"mapped",[98]],[[120018,120018],"mapped",[99]],[[120019,120019],"mapped",[100]],[[120020,120020],"mapped",[101]],[[120021,120021],"mapped",[102]],[[120022,120022],"mapped",[103]],[[120023,120023],"mapped",[104]],[[120024,120024],"mapped",[105]],[[120025,120025],"mapped",[106]],[[120026,120026],"mapped",[107]],[[120027,120027],"mapped",[108]],[[120028,120028],"mapped",[109]],[[120029,120029],"mapped",[110]],[[120030,120030],"mapped",[111]],[[120031,120031],"mapped",[112]],[[120032,120032],"mapped",[113]],[[120033,120033],"mapped",[114]],[[120034,120034],"mapped",[115]],[[120035,120035],"mapped",[116]],[[120036,120036],"mapped",[117]],[[120037,120037],"mapped",[118]],[[120038,120038],"mapped",[119]],[[120039,120039],"mapped",[120]],[[120040,120040],"mapped",[121]],[[120041,120041],"mapped",[122]],[[120042,120042],"mapped",[97]],[[120043,120043],"mapped",[98]],[[120044,120044],"mapped",[99]],[[120045,120045],"mapped",[100]],[[120046,120046],"mapped",[101]],[[120047,120047],"mapped",[102]],[[120048,120048],"mapped",[103]],[[120049,120049],"mapped",[104]],[[120050,120050],"mapped",[105]],[[120051,120051],"mapped",[106]],[[120052,120052],"mapped",[107]],[[120053,120053],"mapped",[108]],[[120054,120054],"mapped",[109]],[[120055,120055],"mapped",[110]],[[120056,120056],"mapped",[111]],[[120057,120057],"mapped",[112]],[[120058,120058],"mapped",[113]],[[120059,120059],"mapped",[114]],[[120060,120060],"mapped",[115]],[[120061,120061],"mapped",[116]],[[120062,120062],"mapped",[117]],[[120063,120063],"mapped",[118]],[[120064,120064],"mapped",[119]],[[120065,120065],"mapped",[120]],[[120066,120066],"mapped",[121]],[[120067,120067],"mapped",[122]],[[120068,120068],"mapped",[97]],[[120069,120069],"mapped",[98]],[[120070,120070],"disallowed"],[[120071,120071],"mapped",[100]],[[120072,120072],"mapped",[101]],[[120073,120073],"mapped",[102]],[[120074,120074],"mapped",[103]],[[120075,120076],"disallowed"],[[120077,120077],"mapped",[106]],[[120078,120078],"mapped",[107]],[[120079,120079],"mapped",[108]],[[120080,120080],"mapped",[109]],[[120081,120081],"mapped",[110]],[[120082,120082],"mapped",[111]],[[120083,120083],"mapped",[112]],[[120084,120084],"mapped",[113]],[[120085,120085],"disallowed"],[[120086,120086],"mapped",[115]],[[120087,120087],"mapped",[116]],[[120088,120088],"mapped",[117]],[[120089,120089],"mapped",[118]],[[120090,120090],"mapped",[119]],[[120091,120091],"mapped",[120]],[[120092,120092],"mapped",[121]],[[120093,120093],"disallowed"],[[120094,120094],"mapped",[97]],[[120095,120095],"mapped",[98]],[[120096,120096],"mapped",[99]],[[120097,120097],"mapped",[100]],[[120098,120098],"mapped",[101]],[[120099,120099],"mapped",[102]],[[120100,120100],"mapped",[103]],[[120101,120101],"mapped",[104]],[[120102,120102],"mapped",[105]],[[120103,120103],"mapped",[106]],[[120104,120104],"mapped",[107]],[[120105,120105],"mapped",[108]],[[120106,120106],"mapped",[109]],[[120107,120107],"mapped",[110]],[[120108,120108],"mapped",[111]],[[120109,120109],"mapped",[112]],[[120110,120110],"mapped",[113]],[[120111,120111],"mapped",[114]],[[120112,120112],"mapped",[115]],[[120113,120113],"mapped",[116]],[[120114,120114],"mapped",[117]],[[120115,120115],"mapped",[118]],[[120116,120116],"mapped",[119]],[[120117,120117],"mapped",[120]],[[120118,120118],"mapped",[121]],[[120119,120119],"mapped",[122]],[[120120,120120],"mapped",[97]],[[120121,120121],"mapped",[98]],[[120122,120122],"disallowed"],[[120123,120123],"mapped",[100]],[[120124,120124],"mapped",[101]],[[120125,120125],"mapped",[102]],[[120126,120126],"mapped",[103]],[[120127,120127],"disallowed"],[[120128,120128],"mapped",[105]],[[120129,120129],"mapped",[106]],[[120130,120130],"mapped",[107]],[[120131,120131],"mapped",[108]],[[120132,120132],"mapped",[109]],[[120133,120133],"disallowed"],[[120134,120134],"mapped",[111]],[[120135,120137],"disallowed"],[[120138,120138],"mapped",[115]],[[120139,120139],"mapped",[116]],[[120140,120140],"mapped",[117]],[[120141,120141],"mapped",[118]],[[120142,120142],"mapped",[119]],[[120143,120143],"mapped",[120]],[[120144,120144],"mapped",[121]],[[120145,120145],"disallowed"],[[120146,120146],"mapped",[97]],[[120147,120147],"mapped",[98]],[[120148,120148],"mapped",[99]],[[120149,120149],"mapped",[100]],[[120150,120150],"mapped",[101]],[[120151,120151],"mapped",[102]],[[120152,120152],"mapped",[103]],[[120153,120153],"mapped",[104]],[[120154,120154],"mapped",[105]],[[120155,120155],"mapped",[106]],[[120156,120156],"mapped",[107]],[[120157,120157],"mapped",[108]],[[120158,120158],"mapped",[109]],[[120159,120159],"mapped",[110]],[[120160,120160],"mapped",[111]],[[120161,120161],"mapped",[112]],[[120162,120162],"mapped",[113]],[[120163,120163],"mapped",[114]],[[120164,120164],"mapped",[115]],[[120165,120165],"mapped",[116]],[[120166,120166],"mapped",[117]],[[120167,120167],"mapped",[118]],[[120168,120168],"mapped",[119]],[[120169,120169],"mapped",[120]],[[120170,120170],"mapped",[121]],[[120171,120171],"mapped",[122]],[[120172,120172],"mapped",[97]],[[120173,120173],"mapped",[98]],[[120174,120174],"mapped",[99]],[[120175,120175],"mapped",[100]],[[120176,120176],"mapped",[101]],[[120177,120177],"mapped",[102]],[[120178,120178],"mapped",[103]],[[120179,120179],"mapped",[104]],[[120180,120180],"mapped",[105]],[[120181,120181],"mapped",[106]],[[120182,120182],"mapped",[107]],[[120183,120183],"mapped",[108]],[[120184,120184],"mapped",[109]],[[120185,120185],"mapped",[110]],[[120186,120186],"mapped",[111]],[[120187,120187],"mapped",[112]],[[120188,120188],"mapped",[113]],[[120189,120189],"mapped",[114]],[[120190,120190],"mapped",[115]],[[120191,120191],"mapped",[116]],[[120192,120192],"mapped",[117]],[[120193,120193],"mapped",[118]],[[120194,120194],"mapped",[119]],[[120195,120195],"mapped",[120]],[[120196,120196],"mapped",[121]],[[120197,120197],"mapped",[122]],[[120198,120198],"mapped",[97]],[[120199,120199],"mapped",[98]],[[120200,120200],"mapped",[99]],[[120201,120201],"mapped",[100]],[[120202,120202],"mapped",[101]],[[120203,120203],"mapped",[102]],[[120204,120204],"mapped",[103]],[[120205,120205],"mapped",[104]],[[120206,120206],"mapped",[105]],[[120207,120207],"mapped",[106]],[[120208,120208],"mapped",[107]],[[120209,120209],"mapped",[108]],[[120210,120210],"mapped",[109]],[[120211,120211],"mapped",[110]],[[120212,120212],"mapped",[111]],[[120213,120213],"mapped",[112]],[[120214,120214],"mapped",[113]],[[120215,120215],"mapped",[114]],[[120216,120216],"mapped",[115]],[[120217,120217],"mapped",[116]],[[120218,120218],"mapped",[117]],[[120219,120219],"mapped",[118]],[[120220,120220],"mapped",[119]],[[120221,120221],"mapped",[120]],[[120222,120222],"mapped",[121]],[[120223,120223],"mapped",[122]],[[120224,120224],"mapped",[97]],[[120225,120225],"mapped",[98]],[[120226,120226],"mapped",[99]],[[120227,120227],"mapped",[100]],[[120228,120228],"mapped",[101]],[[120229,120229],"mapped",[102]],[[120230,120230],"mapped",[103]],[[120231,120231],"mapped",[104]],[[120232,120232],"mapped",[105]],[[120233,120233],"mapped",[106]],[[120234,120234],"mapped",[107]],[[120235,120235],"mapped",[108]],[[120236,120236],"mapped",[109]],[[120237,120237],"mapped",[110]],[[120238,120238],"mapped",[111]],[[120239,120239],"mapped",[112]],[[120240,120240],"mapped",[113]],[[120241,120241],"mapped",[114]],[[120242,120242],"mapped",[115]],[[120243,120243],"mapped",[116]],[[120244,120244],"mapped",[117]],[[120245,120245],"mapped",[118]],[[120246,120246],"mapped",[119]],[[120247,120247],"mapped",[120]],[[120248,120248],"mapped",[121]],[[120249,120249],"mapped",[122]],[[120250,120250],"mapped",[97]],[[120251,120251],"mapped",[98]],[[120252,120252],"mapped",[99]],[[120253,120253],"mapped",[100]],[[120254,120254],"mapped",[101]],[[120255,120255],"mapped",[102]],[[120256,120256],"mapped",[103]],[[120257,120257],"mapped",[104]],[[120258,120258],"mapped",[105]],[[120259,120259],"mapped",[106]],[[120260,120260],"mapped",[107]],[[120261,120261],"mapped",[108]],[[120262,120262],"mapped",[109]],[[120263,120263],"mapped",[110]],[[120264,120264],"mapped",[111]],[[120265,120265],"mapped",[112]],[[120266,120266],"mapped",[113]],[[120267,120267],"mapped",[114]],[[120268,120268],"mapped",[115]],[[120269,120269],"mapped",[116]],[[120270,120270],"mapped",[117]],[[120271,120271],"mapped",[118]],[[120272,120272],"mapped",[119]],[[120273,120273],"mapped",[120]],[[120274,120274],"mapped",[121]],[[120275,120275],"mapped",[122]],[[120276,120276],"mapped",[97]],[[120277,120277],"mapped",[98]],[[120278,120278],"mapped",[99]],[[120279,120279],"mapped",[100]],[[120280,120280],"mapped",[101]],[[120281,120281],"mapped",[102]],[[120282,120282],"mapped",[103]],[[120283,120283],"mapped",[104]],[[120284,120284],"mapped",[105]],[[120285,120285],"mapped",[106]],[[120286,120286],"mapped",[107]],[[120287,120287],"mapped",[108]],[[120288,120288],"mapped",[109]],[[120289,120289],"mapped",[110]],[[120290,120290],"mapped",[111]],[[120291,120291],"mapped",[112]],[[120292,120292],"mapped",[113]],[[120293,120293],"mapped",[114]],[[120294,120294],"mapped",[115]],[[120295,120295],"mapped",[116]],[[120296,120296],"mapped",[117]],[[120297,120297],"mapped",[118]],[[120298,120298],"mapped",[119]],[[120299,120299],"mapped",[120]],[[120300,120300],"mapped",[121]],[[120301,120301],"mapped",[122]],[[120302,120302],"mapped",[97]],[[120303,120303],"mapped",[98]],[[120304,120304],"mapped",[99]],[[120305,120305],"mapped",[100]],[[120306,120306],"mapped",[101]],[[120307,120307],"mapped",[102]],[[120308,120308],"mapped",[103]],[[120309,120309],"mapped",[104]],[[120310,120310],"mapped",[105]],[[120311,120311],"mapped",[106]],[[120312,120312],"mapped",[107]],[[120313,120313],"mapped",[108]],[[120314,120314],"mapped",[109]],[[120315,120315],"mapped",[110]],[[120316,120316],"mapped",[111]],[[120317,120317],"mapped",[112]],[[120318,120318],"mapped",[113]],[[120319,120319],"mapped",[114]],[[120320,120320],"mapped",[115]],[[120321,120321],"mapped",[116]],[[120322,120322],"mapped",[117]],[[120323,120323],"mapped",[118]],[[120324,120324],"mapped",[119]],[[120325,120325],"mapped",[120]],[[120326,120326],"mapped",[121]],[[120327,120327],"mapped",[122]],[[120328,120328],"mapped",[97]],[[120329,120329],"mapped",[98]],[[120330,120330],"mapped",[99]],[[120331,120331],"mapped",[100]],[[120332,120332],"mapped",[101]],[[120333,120333],"mapped",[102]],[[120334,120334],"mapped",[103]],[[120335,120335],"mapped",[104]],[[120336,120336],"mapped",[105]],[[120337,120337],"mapped",[106]],[[120338,120338],"mapped",[107]],[[120339,120339],"mapped",[108]],[[120340,120340],"mapped",[109]],[[120341,120341],"mapped",[110]],[[120342,120342],"mapped",[111]],[[120343,120343],"mapped",[112]],[[120344,120344],"mapped",[113]],[[120345,120345],"mapped",[114]],[[120346,120346],"mapped",[115]],[[120347,120347],"mapped",[116]],[[120348,120348],"mapped",[117]],[[120349,120349],"mapped",[118]],[[120350,120350],"mapped",[119]],[[120351,120351],"mapped",[120]],[[120352,120352],"mapped",[121]],[[120353,120353],"mapped",[122]],[[120354,120354],"mapped",[97]],[[120355,120355],"mapped",[98]],[[120356,120356],"mapped",[99]],[[120357,120357],"mapped",[100]],[[120358,120358],"mapped",[101]],[[120359,120359],"mapped",[102]],[[120360,120360],"mapped",[103]],[[120361,120361],"mapped",[104]],[[120362,120362],"mapped",[105]],[[120363,120363],"mapped",[106]],[[120364,120364],"mapped",[107]],[[120365,120365],"mapped",[108]],[[120366,120366],"mapped",[109]],[[120367,120367],"mapped",[110]],[[120368,120368],"mapped",[111]],[[120369,120369],"mapped",[112]],[[120370,120370],"mapped",[113]],[[120371,120371],"mapped",[114]],[[120372,120372],"mapped",[115]],[[120373,120373],"mapped",[116]],[[120374,120374],"mapped",[117]],[[120375,120375],"mapped",[118]],[[120376,120376],"mapped",[119]],[[120377,120377],"mapped",[120]],[[120378,120378],"mapped",[121]],[[120379,120379],"mapped",[122]],[[120380,120380],"mapped",[97]],[[120381,120381],"mapped",[98]],[[120382,120382],"mapped",[99]],[[120383,120383],"mapped",[100]],[[120384,120384],"mapped",[101]],[[120385,120385],"mapped",[102]],[[120386,120386],"mapped",[103]],[[120387,120387],"mapped",[104]],[[120388,120388],"mapped",[105]],[[120389,120389],"mapped",[106]],[[120390,120390],"mapped",[107]],[[120391,120391],"mapped",[108]],[[120392,120392],"mapped",[109]],[[120393,120393],"mapped",[110]],[[120394,120394],"mapped",[111]],[[120395,120395],"mapped",[112]],[[120396,120396],"mapped",[113]],[[120397,120397],"mapped",[114]],[[120398,120398],"mapped",[115]],[[120399,120399],"mapped",[116]],[[120400,120400],"mapped",[117]],[[120401,120401],"mapped",[118]],[[120402,120402],"mapped",[119]],[[120403,120403],"mapped",[120]],[[120404,120404],"mapped",[121]],[[120405,120405],"mapped",[122]],[[120406,120406],"mapped",[97]],[[120407,120407],"mapped",[98]],[[120408,120408],"mapped",[99]],[[120409,120409],"mapped",[100]],[[120410,120410],"mapped",[101]],[[120411,120411],"mapped",[102]],[[120412,120412],"mapped",[103]],[[120413,120413],"mapped",[104]],[[120414,120414],"mapped",[105]],[[120415,120415],"mapped",[106]],[[120416,120416],"mapped",[107]],[[120417,120417],"mapped",[108]],[[120418,120418],"mapped",[109]],[[120419,120419],"mapped",[110]],[[120420,120420],"mapped",[111]],[[120421,120421],"mapped",[112]],[[120422,120422],"mapped",[113]],[[120423,120423],"mapped",[114]],[[120424,120424],"mapped",[115]],[[120425,120425],"mapped",[116]],[[120426,120426],"mapped",[117]],[[120427,120427],"mapped",[118]],[[120428,120428],"mapped",[119]],[[120429,120429],"mapped",[120]],[[120430,120430],"mapped",[121]],[[120431,120431],"mapped",[122]],[[120432,120432],"mapped",[97]],[[120433,120433],"mapped",[98]],[[120434,120434],"mapped",[99]],[[120435,120435],"mapped",[100]],[[120436,120436],"mapped",[101]],[[120437,120437],"mapped",[102]],[[120438,120438],"mapped",[103]],[[120439,120439],"mapped",[104]],[[120440,120440],"mapped",[105]],[[120441,120441],"mapped",[106]],[[120442,120442],"mapped",[107]],[[120443,120443],"mapped",[108]],[[120444,120444],"mapped",[109]],[[120445,120445],"mapped",[110]],[[120446,120446],"mapped",[111]],[[120447,120447],"mapped",[112]],[[120448,120448],"mapped",[113]],[[120449,120449],"mapped",[114]],[[120450,120450],"mapped",[115]],[[120451,120451],"mapped",[116]],[[120452,120452],"mapped",[117]],[[120453,120453],"mapped",[118]],[[120454,120454],"mapped",[119]],[[120455,120455],"mapped",[120]],[[120456,120456],"mapped",[121]],[[120457,120457],"mapped",[122]],[[120458,120458],"mapped",[97]],[[120459,120459],"mapped",[98]],[[120460,120460],"mapped",[99]],[[120461,120461],"mapped",[100]],[[120462,120462],"mapped",[101]],[[120463,120463],"mapped",[102]],[[120464,120464],"mapped",[103]],[[120465,120465],"mapped",[104]],[[120466,120466],"mapped",[105]],[[120467,120467],"mapped",[106]],[[120468,120468],"mapped",[107]],[[120469,120469],"mapped",[108]],[[120470,120470],"mapped",[109]],[[120471,120471],"mapped",[110]],[[120472,120472],"mapped",[111]],[[120473,120473],"mapped",[112]],[[120474,120474],"mapped",[113]],[[120475,120475],"mapped",[114]],[[120476,120476],"mapped",[115]],[[120477,120477],"mapped",[116]],[[120478,120478],"mapped",[117]],[[120479,120479],"mapped",[118]],[[120480,120480],"mapped",[119]],[[120481,120481],"mapped",[120]],[[120482,120482],"mapped",[121]],[[120483,120483],"mapped",[122]],[[120484,120484],"mapped",[305]],[[120485,120485],"mapped",[567]],[[120486,120487],"disallowed"],[[120488,120488],"mapped",[945]],[[120489,120489],"mapped",[946]],[[120490,120490],"mapped",[947]],[[120491,120491],"mapped",[948]],[[120492,120492],"mapped",[949]],[[120493,120493],"mapped",[950]],[[120494,120494],"mapped",[951]],[[120495,120495],"mapped",[952]],[[120496,120496],"mapped",[953]],[[120497,120497],"mapped",[954]],[[120498,120498],"mapped",[955]],[[120499,120499],"mapped",[956]],[[120500,120500],"mapped",[957]],[[120501,120501],"mapped",[958]],[[120502,120502],"mapped",[959]],[[120503,120503],"mapped",[960]],[[120504,120504],"mapped",[961]],[[120505,120505],"mapped",[952]],[[120506,120506],"mapped",[963]],[[120507,120507],"mapped",[964]],[[120508,120508],"mapped",[965]],[[120509,120509],"mapped",[966]],[[120510,120510],"mapped",[967]],[[120511,120511],"mapped",[968]],[[120512,120512],"mapped",[969]],[[120513,120513],"mapped",[8711]],[[120514,120514],"mapped",[945]],[[120515,120515],"mapped",[946]],[[120516,120516],"mapped",[947]],[[120517,120517],"mapped",[948]],[[120518,120518],"mapped",[949]],[[120519,120519],"mapped",[950]],[[120520,120520],"mapped",[951]],[[120521,120521],"mapped",[952]],[[120522,120522],"mapped",[953]],[[120523,120523],"mapped",[954]],[[120524,120524],"mapped",[955]],[[120525,120525],"mapped",[956]],[[120526,120526],"mapped",[957]],[[120527,120527],"mapped",[958]],[[120528,120528],"mapped",[959]],[[120529,120529],"mapped",[960]],[[120530,120530],"mapped",[961]],[[120531,120532],"mapped",[963]],[[120533,120533],"mapped",[964]],[[120534,120534],"mapped",[965]],[[120535,120535],"mapped",[966]],[[120536,120536],"mapped",[967]],[[120537,120537],"mapped",[968]],[[120538,120538],"mapped",[969]],[[120539,120539],"mapped",[8706]],[[120540,120540],"mapped",[949]],[[120541,120541],"mapped",[952]],[[120542,120542],"mapped",[954]],[[120543,120543],"mapped",[966]],[[120544,120544],"mapped",[961]],[[120545,120545],"mapped",[960]],[[120546,120546],"mapped",[945]],[[120547,120547],"mapped",[946]],[[120548,120548],"mapped",[947]],[[120549,120549],"mapped",[948]],[[120550,120550],"mapped",[949]],[[120551,120551],"mapped",[950]],[[120552,120552],"mapped",[951]],[[120553,120553],"mapped",[952]],[[120554,120554],"mapped",[953]],[[120555,120555],"mapped",[954]],[[120556,120556],"mapped",[955]],[[120557,120557],"mapped",[956]],[[120558,120558],"mapped",[957]],[[120559,120559],"mapped",[958]],[[120560,120560],"mapped",[959]],[[120561,120561],"mapped",[960]],[[120562,120562],"mapped",[961]],[[120563,120563],"mapped",[952]],[[120564,120564],"mapped",[963]],[[120565,120565],"mapped",[964]],[[120566,120566],"mapped",[965]],[[120567,120567],"mapped",[966]],[[120568,120568],"mapped",[967]],[[120569,120569],"mapped",[968]],[[120570,120570],"mapped",[969]],[[120571,120571],"mapped",[8711]],[[120572,120572],"mapped",[945]],[[120573,120573],"mapped",[946]],[[120574,120574],"mapped",[947]],[[120575,120575],"mapped",[948]],[[120576,120576],"mapped",[949]],[[120577,120577],"mapped",[950]],[[120578,120578],"mapped",[951]],[[120579,120579],"mapped",[952]],[[120580,120580],"mapped",[953]],[[120581,120581],"mapped",[954]],[[120582,120582],"mapped",[955]],[[120583,120583],"mapped",[956]],[[120584,120584],"mapped",[957]],[[120585,120585],"mapped",[958]],[[120586,120586],"mapped",[959]],[[120587,120587],"mapped",[960]],[[120588,120588],"mapped",[961]],[[120589,120590],"mapped",[963]],[[120591,120591],"mapped",[964]],[[120592,120592],"mapped",[965]],[[120593,120593],"mapped",[966]],[[120594,120594],"mapped",[967]],[[120595,120595],"mapped",[968]],[[120596,120596],"mapped",[969]],[[120597,120597],"mapped",[8706]],[[120598,120598],"mapped",[949]],[[120599,120599],"mapped",[952]],[[120600,120600],"mapped",[954]],[[120601,120601],"mapped",[966]],[[120602,120602],"mapped",[961]],[[120603,120603],"mapped",[960]],[[120604,120604],"mapped",[945]],[[120605,120605],"mapped",[946]],[[120606,120606],"mapped",[947]],[[120607,120607],"mapped",[948]],[[120608,120608],"mapped",[949]],[[120609,120609],"mapped",[950]],[[120610,120610],"mapped",[951]],[[120611,120611],"mapped",[952]],[[120612,120612],"mapped",[953]],[[120613,120613],"mapped",[954]],[[120614,120614],"mapped",[955]],[[120615,120615],"mapped",[956]],[[120616,120616],"mapped",[957]],[[120617,120617],"mapped",[958]],[[120618,120618],"mapped",[959]],[[120619,120619],"mapped",[960]],[[120620,120620],"mapped",[961]],[[120621,120621],"mapped",[952]],[[120622,120622],"mapped",[963]],[[120623,120623],"mapped",[964]],[[120624,120624],"mapped",[965]],[[120625,120625],"mapped",[966]],[[120626,120626],"mapped",[967]],[[120627,120627],"mapped",[968]],[[120628,120628],"mapped",[969]],[[120629,120629],"mapped",[8711]],[[120630,120630],"mapped",[945]],[[120631,120631],"mapped",[946]],[[120632,120632],"mapped",[947]],[[120633,120633],"mapped",[948]],[[120634,120634],"mapped",[949]],[[120635,120635],"mapped",[950]],[[120636,120636],"mapped",[951]],[[120637,120637],"mapped",[952]],[[120638,120638],"mapped",[953]],[[120639,120639],"mapped",[954]],[[120640,120640],"mapped",[955]],[[120641,120641],"mapped",[956]],[[120642,120642],"mapped",[957]],[[120643,120643],"mapped",[958]],[[120644,120644],"mapped",[959]],[[120645,120645],"mapped",[960]],[[120646,120646],"mapped",[961]],[[120647,120648],"mapped",[963]],[[120649,120649],"mapped",[964]],[[120650,120650],"mapped",[965]],[[120651,120651],"mapped",[966]],[[120652,120652],"mapped",[967]],[[120653,120653],"mapped",[968]],[[120654,120654],"mapped",[969]],[[120655,120655],"mapped",[8706]],[[120656,120656],"mapped",[949]],[[120657,120657],"mapped",[952]],[[120658,120658],"mapped",[954]],[[120659,120659],"mapped",[966]],[[120660,120660],"mapped",[961]],[[120661,120661],"mapped",[960]],[[120662,120662],"mapped",[945]],[[120663,120663],"mapped",[946]],[[120664,120664],"mapped",[947]],[[120665,120665],"mapped",[948]],[[120666,120666],"mapped",[949]],[[120667,120667],"mapped",[950]],[[120668,120668],"mapped",[951]],[[120669,120669],"mapped",[952]],[[120670,120670],"mapped",[953]],[[120671,120671],"mapped",[954]],[[120672,120672],"mapped",[955]],[[120673,120673],"mapped",[956]],[[120674,120674],"mapped",[957]],[[120675,120675],"mapped",[958]],[[120676,120676],"mapped",[959]],[[120677,120677],"mapped",[960]],[[120678,120678],"mapped",[961]],[[120679,120679],"mapped",[952]],[[120680,120680],"mapped",[963]],[[120681,120681],"mapped",[964]],[[120682,120682],"mapped",[965]],[[120683,120683],"mapped",[966]],[[120684,120684],"mapped",[967]],[[120685,120685],"mapped",[968]],[[120686,120686],"mapped",[969]],[[120687,120687],"mapped",[8711]],[[120688,120688],"mapped",[945]],[[120689,120689],"mapped",[946]],[[120690,120690],"mapped",[947]],[[120691,120691],"mapped",[948]],[[120692,120692],"mapped",[949]],[[120693,120693],"mapped",[950]],[[120694,120694],"mapped",[951]],[[120695,120695],"mapped",[952]],[[120696,120696],"mapped",[953]],[[120697,120697],"mapped",[954]],[[120698,120698],"mapped",[955]],[[120699,120699],"mapped",[956]],[[120700,120700],"mapped",[957]],[[120701,120701],"mapped",[958]],[[120702,120702],"mapped",[959]],[[120703,120703],"mapped",[960]],[[120704,120704],"mapped",[961]],[[120705,120706],"mapped",[963]],[[120707,120707],"mapped",[964]],[[120708,120708],"mapped",[965]],[[120709,120709],"mapped",[966]],[[120710,120710],"mapped",[967]],[[120711,120711],"mapped",[968]],[[120712,120712],"mapped",[969]],[[120713,120713],"mapped",[8706]],[[120714,120714],"mapped",[949]],[[120715,120715],"mapped",[952]],[[120716,120716],"mapped",[954]],[[120717,120717],"mapped",[966]],[[120718,120718],"mapped",[961]],[[120719,120719],"mapped",[960]],[[120720,120720],"mapped",[945]],[[120721,120721],"mapped",[946]],[[120722,120722],"mapped",[947]],[[120723,120723],"mapped",[948]],[[120724,120724],"mapped",[949]],[[120725,120725],"mapped",[950]],[[120726,120726],"mapped",[951]],[[120727,120727],"mapped",[952]],[[120728,120728],"mapped",[953]],[[120729,120729],"mapped",[954]],[[120730,120730],"mapped",[955]],[[120731,120731],"mapped",[956]],[[120732,120732],"mapped",[957]],[[120733,120733],"mapped",[958]],[[120734,120734],"mapped",[959]],[[120735,120735],"mapped",[960]],[[120736,120736],"mapped",[961]],[[120737,120737],"mapped",[952]],[[120738,120738],"mapped",[963]],[[120739,120739],"mapped",[964]],[[120740,120740],"mapped",[965]],[[120741,120741],"mapped",[966]],[[120742,120742],"mapped",[967]],[[120743,120743],"mapped",[968]],[[120744,120744],"mapped",[969]],[[120745,120745],"mapped",[8711]],[[120746,120746],"mapped",[945]],[[120747,120747],"mapped",[946]],[[120748,120748],"mapped",[947]],[[120749,120749],"mapped",[948]],[[120750,120750],"mapped",[949]],[[120751,120751],"mapped",[950]],[[120752,120752],"mapped",[951]],[[120753,120753],"mapped",[952]],[[120754,120754],"mapped",[953]],[[120755,120755],"mapped",[954]],[[120756,120756],"mapped",[955]],[[120757,120757],"mapped",[956]],[[120758,120758],"mapped",[957]],[[120759,120759],"mapped",[958]],[[120760,120760],"mapped",[959]],[[120761,120761],"mapped",[960]],[[120762,120762],"mapped",[961]],[[120763,120764],"mapped",[963]],[[120765,120765],"mapped",[964]],[[120766,120766],"mapped",[965]],[[120767,120767],"mapped",[966]],[[120768,120768],"mapped",[967]],[[120769,120769],"mapped",[968]],[[120770,120770],"mapped",[969]],[[120771,120771],"mapped",[8706]],[[120772,120772],"mapped",[949]],[[120773,120773],"mapped",[952]],[[120774,120774],"mapped",[954]],[[120775,120775],"mapped",[966]],[[120776,120776],"mapped",[961]],[[120777,120777],"mapped",[960]],[[120778,120779],"mapped",[989]],[[120780,120781],"disallowed"],[[120782,120782],"mapped",[48]],[[120783,120783],"mapped",[49]],[[120784,120784],"mapped",[50]],[[120785,120785],"mapped",[51]],[[120786,120786],"mapped",[52]],[[120787,120787],"mapped",[53]],[[120788,120788],"mapped",[54]],[[120789,120789],"mapped",[55]],[[120790,120790],"mapped",[56]],[[120791,120791],"mapped",[57]],[[120792,120792],"mapped",[48]],[[120793,120793],"mapped",[49]],[[120794,120794],"mapped",[50]],[[120795,120795],"mapped",[51]],[[120796,120796],"mapped",[52]],[[120797,120797],"mapped",[53]],[[120798,120798],"mapped",[54]],[[120799,120799],"mapped",[55]],[[120800,120800],"mapped",[56]],[[120801,120801],"mapped",[57]],[[120802,120802],"mapped",[48]],[[120803,120803],"mapped",[49]],[[120804,120804],"mapped",[50]],[[120805,120805],"mapped",[51]],[[120806,120806],"mapped",[52]],[[120807,120807],"mapped",[53]],[[120808,120808],"mapped",[54]],[[120809,120809],"mapped",[55]],[[120810,120810],"mapped",[56]],[[120811,120811],"mapped",[57]],[[120812,120812],"mapped",[48]],[[120813,120813],"mapped",[49]],[[120814,120814],"mapped",[50]],[[120815,120815],"mapped",[51]],[[120816,120816],"mapped",[52]],[[120817,120817],"mapped",[53]],[[120818,120818],"mapped",[54]],[[120819,120819],"mapped",[55]],[[120820,120820],"mapped",[56]],[[120821,120821],"mapped",[57]],[[120822,120822],"mapped",[48]],[[120823,120823],"mapped",[49]],[[120824,120824],"mapped",[50]],[[120825,120825],"mapped",[51]],[[120826,120826],"mapped",[52]],[[120827,120827],"mapped",[53]],[[120828,120828],"mapped",[54]],[[120829,120829],"mapped",[55]],[[120830,120830],"mapped",[56]],[[120831,120831],"mapped",[57]],[[120832,121343],"valid",[],"NV8"],[[121344,121398],"valid"],[[121399,121402],"valid",[],"NV8"],[[121403,121452],"valid"],[[121453,121460],"valid",[],"NV8"],[[121461,121461],"valid"],[[121462,121475],"valid",[],"NV8"],[[121476,121476],"valid"],[[121477,121483],"valid",[],"NV8"],[[121484,121498],"disallowed"],[[121499,121503],"valid"],[[121504,121504],"disallowed"],[[121505,121519],"valid"],[[121520,124927],"disallowed"],[[124928,125124],"valid"],[[125125,125126],"disallowed"],[[125127,125135],"valid",[],"NV8"],[[125136,125142],"valid"],[[125143,126463],"disallowed"],[[126464,126464],"mapped",[1575]],[[126465,126465],"mapped",[1576]],[[126466,126466],"mapped",[1580]],[[126467,126467],"mapped",[1583]],[[126468,126468],"disallowed"],[[126469,126469],"mapped",[1608]],[[126470,126470],"mapped",[1586]],[[126471,126471],"mapped",[1581]],[[126472,126472],"mapped",[1591]],[[126473,126473],"mapped",[1610]],[[126474,126474],"mapped",[1603]],[[126475,126475],"mapped",[1604]],[[126476,126476],"mapped",[1605]],[[126477,126477],"mapped",[1606]],[[126478,126478],"mapped",[1587]],[[126479,126479],"mapped",[1593]],[[126480,126480],"mapped",[1601]],[[126481,126481],"mapped",[1589]],[[126482,126482],"mapped",[1602]],[[126483,126483],"mapped",[1585]],[[126484,126484],"mapped",[1588]],[[126485,126485],"mapped",[1578]],[[126486,126486],"mapped",[1579]],[[126487,126487],"mapped",[1582]],[[126488,126488],"mapped",[1584]],[[126489,126489],"mapped",[1590]],[[126490,126490],"mapped",[1592]],[[126491,126491],"mapped",[1594]],[[126492,126492],"mapped",[1646]],[[126493,126493],"mapped",[1722]],[[126494,126494],"mapped",[1697]],[[126495,126495],"mapped",[1647]],[[126496,126496],"disallowed"],[[126497,126497],"mapped",[1576]],[[126498,126498],"mapped",[1580]],[[126499,126499],"disallowed"],[[126500,126500],"mapped",[1607]],[[126501,126502],"disallowed"],[[126503,126503],"mapped",[1581]],[[126504,126504],"disallowed"],[[126505,126505],"mapped",[1610]],[[126506,126506],"mapped",[1603]],[[126507,126507],"mapped",[1604]],[[126508,126508],"mapped",[1605]],[[126509,126509],"mapped",[1606]],[[126510,126510],"mapped",[1587]],[[126511,126511],"mapped",[1593]],[[126512,126512],"mapped",[1601]],[[126513,126513],"mapped",[1589]],[[126514,126514],"mapped",[1602]],[[126515,126515],"disallowed"],[[126516,126516],"mapped",[1588]],[[126517,126517],"mapped",[1578]],[[126518,126518],"mapped",[1579]],[[126519,126519],"mapped",[1582]],[[126520,126520],"disallowed"],[[126521,126521],"mapped",[1590]],[[126522,126522],"disallowed"],[[126523,126523],"mapped",[1594]],[[126524,126529],"disallowed"],[[126530,126530],"mapped",[1580]],[[126531,126534],"disallowed"],[[126535,126535],"mapped",[1581]],[[126536,126536],"disallowed"],[[126537,126537],"mapped",[1610]],[[126538,126538],"disallowed"],[[126539,126539],"mapped",[1604]],[[126540,126540],"disallowed"],[[126541,126541],"mapped",[1606]],[[126542,126542],"mapped",[1587]],[[126543,126543],"mapped",[1593]],[[126544,126544],"disallowed"],[[126545,126545],"mapped",[1589]],[[126546,126546],"mapped",[1602]],[[126547,126547],"disallowed"],[[126548,126548],"mapped",[1588]],[[126549,126550],"disallowed"],[[126551,126551],"mapped",[1582]],[[126552,126552],"disallowed"],[[126553,126553],"mapped",[1590]],[[126554,126554],"disallowed"],[[126555,126555],"mapped",[1594]],[[126556,126556],"disallowed"],[[126557,126557],"mapped",[1722]],[[126558,126558],"disallowed"],[[126559,126559],"mapped",[1647]],[[126560,126560],"disallowed"],[[126561,126561],"mapped",[1576]],[[126562,126562],"mapped",[1580]],[[126563,126563],"disallowed"],[[126564,126564],"mapped",[1607]],[[126565,126566],"disallowed"],[[126567,126567],"mapped",[1581]],[[126568,126568],"mapped",[1591]],[[126569,126569],"mapped",[1610]],[[126570,126570],"mapped",[1603]],[[126571,126571],"disallowed"],[[126572,126572],"mapped",[1605]],[[126573,126573],"mapped",[1606]],[[126574,126574],"mapped",[1587]],[[126575,126575],"mapped",[1593]],[[126576,126576],"mapped",[1601]],[[126577,126577],"mapped",[1589]],[[126578,126578],"mapped",[1602]],[[126579,126579],"disallowed"],[[126580,126580],"mapped",[1588]],[[126581,126581],"mapped",[1578]],[[126582,126582],"mapped",[1579]],[[126583,126583],"mapped",[1582]],[[126584,126584],"disallowed"],[[126585,126585],"mapped",[1590]],[[126586,126586],"mapped",[1592]],[[126587,126587],"mapped",[1594]],[[126588,126588],"mapped",[1646]],[[126589,126589],"disallowed"],[[126590,126590],"mapped",[1697]],[[126591,126591],"disallowed"],[[126592,126592],"mapped",[1575]],[[126593,126593],"mapped",[1576]],[[126594,126594],"mapped",[1580]],[[126595,126595],"mapped",[1583]],[[126596,126596],"mapped",[1607]],[[126597,126597],"mapped",[1608]],[[126598,126598],"mapped",[1586]],[[126599,126599],"mapped",[1581]],[[126600,126600],"mapped",[1591]],[[126601,126601],"mapped",[1610]],[[126602,126602],"disallowed"],[[126603,126603],"mapped",[1604]],[[126604,126604],"mapped",[1605]],[[126605,126605],"mapped",[1606]],[[126606,126606],"mapped",[1587]],[[126607,126607],"mapped",[1593]],[[126608,126608],"mapped",[1601]],[[126609,126609],"mapped",[1589]],[[126610,126610],"mapped",[1602]],[[126611,126611],"mapped",[1585]],[[126612,126612],"mapped",[1588]],[[126613,126613],"mapped",[1578]],[[126614,126614],"mapped",[1579]],[[126615,126615],"mapped",[1582]],[[126616,126616],"mapped",[1584]],[[126617,126617],"mapped",[1590]],[[126618,126618],"mapped",[1592]],[[126619,126619],"mapped",[1594]],[[126620,126624],"disallowed"],[[126625,126625],"mapped",[1576]],[[126626,126626],"mapped",[1580]],[[126627,126627],"mapped",[1583]],[[126628,126628],"disallowed"],[[126629,126629],"mapped",[1608]],[[126630,126630],"mapped",[1586]],[[126631,126631],"mapped",[1581]],[[126632,126632],"mapped",[1591]],[[126633,126633],"mapped",[1610]],[[126634,126634],"disallowed"],[[126635,126635],"mapped",[1604]],[[126636,126636],"mapped",[1605]],[[126637,126637],"mapped",[1606]],[[126638,126638],"mapped",[1587]],[[126639,126639],"mapped",[1593]],[[126640,126640],"mapped",[1601]],[[126641,126641],"mapped",[1589]],[[126642,126642],"mapped",[1602]],[[126643,126643],"mapped",[1585]],[[126644,126644],"mapped",[1588]],[[126645,126645],"mapped",[1578]],[[126646,126646],"mapped",[1579]],[[126647,126647],"mapped",[1582]],[[126648,126648],"mapped",[1584]],[[126649,126649],"mapped",[1590]],[[126650,126650],"mapped",[1592]],[[126651,126651],"mapped",[1594]],[[126652,126703],"disallowed"],[[126704,126705],"valid",[],"NV8"],[[126706,126975],"disallowed"],[[126976,127019],"valid",[],"NV8"],[[127020,127023],"disallowed"],[[127024,127123],"valid",[],"NV8"],[[127124,127135],"disallowed"],[[127136,127150],"valid",[],"NV8"],[[127151,127152],"disallowed"],[[127153,127166],"valid",[],"NV8"],[[127167,127167],"valid",[],"NV8"],[[127168,127168],"disallowed"],[[127169,127183],"valid",[],"NV8"],[[127184,127184],"disallowed"],[[127185,127199],"valid",[],"NV8"],[[127200,127221],"valid",[],"NV8"],[[127222,127231],"disallowed"],[[127232,127232],"disallowed"],[[127233,127233],"disallowed_STD3_mapped",[48,44]],[[127234,127234],"disallowed_STD3_mapped",[49,44]],[[127235,127235],"disallowed_STD3_mapped",[50,44]],[[127236,127236],"disallowed_STD3_mapped",[51,44]],[[127237,127237],"disallowed_STD3_mapped",[52,44]],[[127238,127238],"disallowed_STD3_mapped",[53,44]],[[127239,127239],"disallowed_STD3_mapped",[54,44]],[[127240,127240],"disallowed_STD3_mapped",[55,44]],[[127241,127241],"disallowed_STD3_mapped",[56,44]],[[127242,127242],"disallowed_STD3_mapped",[57,44]],[[127243,127244],"valid",[],"NV8"],[[127245,127247],"disallowed"],[[127248,127248],"disallowed_STD3_mapped",[40,97,41]],[[127249,127249],"disallowed_STD3_mapped",[40,98,41]],[[127250,127250],"disallowed_STD3_mapped",[40,99,41]],[[127251,127251],"disallowed_STD3_mapped",[40,100,41]],[[127252,127252],"disallowed_STD3_mapped",[40,101,41]],[[127253,127253],"disallowed_STD3_mapped",[40,102,41]],[[127254,127254],"disallowed_STD3_mapped",[40,103,41]],[[127255,127255],"disallowed_STD3_mapped",[40,104,41]],[[127256,127256],"disallowed_STD3_mapped",[40,105,41]],[[127257,127257],"disallowed_STD3_mapped",[40,106,41]],[[127258,127258],"disallowed_STD3_mapped",[40,107,41]],[[127259,127259],"disallowed_STD3_mapped",[40,108,41]],[[127260,127260],"disallowed_STD3_mapped",[40,109,41]],[[127261,127261],"disallowed_STD3_mapped",[40,110,41]],[[127262,127262],"disallowed_STD3_mapped",[40,111,41]],[[127263,127263],"disallowed_STD3_mapped",[40,112,41]],[[127264,127264],"disallowed_STD3_mapped",[40,113,41]],[[127265,127265],"disallowed_STD3_mapped",[40,114,41]],[[127266,127266],"disallowed_STD3_mapped",[40,115,41]],[[127267,127267],"disallowed_STD3_mapped",[40,116,41]],[[127268,127268],"disallowed_STD3_mapped",[40,117,41]],[[127269,127269],"disallowed_STD3_mapped",[40,118,41]],[[127270,127270],"disallowed_STD3_mapped",[40,119,41]],[[127271,127271],"disallowed_STD3_mapped",[40,120,41]],[[127272,127272],"disallowed_STD3_mapped",[40,121,41]],[[127273,127273],"disallowed_STD3_mapped",[40,122,41]],[[127274,127274],"mapped",[12308,115,12309]],[[127275,127275],"mapped",[99]],[[127276,127276],"mapped",[114]],[[127277,127277],"mapped",[99,100]],[[127278,127278],"mapped",[119,122]],[[127279,127279],"disallowed"],[[127280,127280],"mapped",[97]],[[127281,127281],"mapped",[98]],[[127282,127282],"mapped",[99]],[[127283,127283],"mapped",[100]],[[127284,127284],"mapped",[101]],[[127285,127285],"mapped",[102]],[[127286,127286],"mapped",[103]],[[127287,127287],"mapped",[104]],[[127288,127288],"mapped",[105]],[[127289,127289],"mapped",[106]],[[127290,127290],"mapped",[107]],[[127291,127291],"mapped",[108]],[[127292,127292],"mapped",[109]],[[127293,127293],"mapped",[110]],[[127294,127294],"mapped",[111]],[[127295,127295],"mapped",[112]],[[127296,127296],"mapped",[113]],[[127297,127297],"mapped",[114]],[[127298,127298],"mapped",[115]],[[127299,127299],"mapped",[116]],[[127300,127300],"mapped",[117]],[[127301,127301],"mapped",[118]],[[127302,127302],"mapped",[119]],[[127303,127303],"mapped",[120]],[[127304,127304],"mapped",[121]],[[127305,127305],"mapped",[122]],[[127306,127306],"mapped",[104,118]],[[127307,127307],"mapped",[109,118]],[[127308,127308],"mapped",[115,100]],[[127309,127309],"mapped",[115,115]],[[127310,127310],"mapped",[112,112,118]],[[127311,127311],"mapped",[119,99]],[[127312,127318],"valid",[],"NV8"],[[127319,127319],"valid",[],"NV8"],[[127320,127326],"valid",[],"NV8"],[[127327,127327],"valid",[],"NV8"],[[127328,127337],"valid",[],"NV8"],[[127338,127338],"mapped",[109,99]],[[127339,127339],"mapped",[109,100]],[[127340,127343],"disallowed"],[[127344,127352],"valid",[],"NV8"],[[127353,127353],"valid",[],"NV8"],[[127354,127354],"valid",[],"NV8"],[[127355,127356],"valid",[],"NV8"],[[127357,127358],"valid",[],"NV8"],[[127359,127359],"valid",[],"NV8"],[[127360,127369],"valid",[],"NV8"],[[127370,127373],"valid",[],"NV8"],[[127374,127375],"valid",[],"NV8"],[[127376,127376],"mapped",[100,106]],[[127377,127386],"valid",[],"NV8"],[[127387,127461],"disallowed"],[[127462,127487],"valid",[],"NV8"],[[127488,127488],"mapped",[12411,12363]],[[127489,127489],"mapped",[12467,12467]],[[127490,127490],"mapped",[12469]],[[127491,127503],"disallowed"],[[127504,127504],"mapped",[25163]],[[127505,127505],"mapped",[23383]],[[127506,127506],"mapped",[21452]],[[127507,127507],"mapped",[12487]],[[127508,127508],"mapped",[20108]],[[127509,127509],"mapped",[22810]],[[127510,127510],"mapped",[35299]],[[127511,127511],"mapped",[22825]],[[127512,127512],"mapped",[20132]],[[127513,127513],"mapped",[26144]],[[127514,127514],"mapped",[28961]],[[127515,127515],"mapped",[26009]],[[127516,127516],"mapped",[21069]],[[127517,127517],"mapped",[24460]],[[127518,127518],"mapped",[20877]],[[127519,127519],"mapped",[26032]],[[127520,127520],"mapped",[21021]],[[127521,127521],"mapped",[32066]],[[127522,127522],"mapped",[29983]],[[127523,127523],"mapped",[36009]],[[127524,127524],"mapped",[22768]],[[127525,127525],"mapped",[21561]],[[127526,127526],"mapped",[28436]],[[127527,127527],"mapped",[25237]],[[127528,127528],"mapped",[25429]],[[127529,127529],"mapped",[19968]],[[127530,127530],"mapped",[19977]],[[127531,127531],"mapped",[36938]],[[127532,127532],"mapped",[24038]],[[127533,127533],"mapped",[20013]],[[127534,127534],"mapped",[21491]],[[127535,127535],"mapped",[25351]],[[127536,127536],"mapped",[36208]],[[127537,127537],"mapped",[25171]],[[127538,127538],"mapped",[31105]],[[127539,127539],"mapped",[31354]],[[127540,127540],"mapped",[21512]],[[127541,127541],"mapped",[28288]],[[127542,127542],"mapped",[26377]],[[127543,127543],"mapped",[26376]],[[127544,127544],"mapped",[30003]],[[127545,127545],"mapped",[21106]],[[127546,127546],"mapped",[21942]],[[127547,127551],"disallowed"],[[127552,127552],"mapped",[12308,26412,12309]],[[127553,127553],"mapped",[12308,19977,12309]],[[127554,127554],"mapped",[12308,20108,12309]],[[127555,127555],"mapped",[12308,23433,12309]],[[127556,127556],"mapped",[12308,28857,12309]],[[127557,127557],"mapped",[12308,25171,12309]],[[127558,127558],"mapped",[12308,30423,12309]],[[127559,127559],"mapped",[12308,21213,12309]],[[127560,127560],"mapped",[12308,25943,12309]],[[127561,127567],"disallowed"],[[127568,127568],"mapped",[24471]],[[127569,127569],"mapped",[21487]],[[127570,127743],"disallowed"],[[127744,127776],"valid",[],"NV8"],[[127777,127788],"valid",[],"NV8"],[[127789,127791],"valid",[],"NV8"],[[127792,127797],"valid",[],"NV8"],[[127798,127798],"valid",[],"NV8"],[[127799,127868],"valid",[],"NV8"],[[127869,127869],"valid",[],"NV8"],[[127870,127871],"valid",[],"NV8"],[[127872,127891],"valid",[],"NV8"],[[127892,127903],"valid",[],"NV8"],[[127904,127940],"valid",[],"NV8"],[[127941,127941],"valid",[],"NV8"],[[127942,127946],"valid",[],"NV8"],[[127947,127950],"valid",[],"NV8"],[[127951,127955],"valid",[],"NV8"],[[127956,127967],"valid",[],"NV8"],[[127968,127984],"valid",[],"NV8"],[[127985,127991],"valid",[],"NV8"],[[127992,127999],"valid",[],"NV8"],[[128000,128062],"valid",[],"NV8"],[[128063,128063],"valid",[],"NV8"],[[128064,128064],"valid",[],"NV8"],[[128065,128065],"valid",[],"NV8"],[[128066,128247],"valid",[],"NV8"],[[128248,128248],"valid",[],"NV8"],[[128249,128252],"valid",[],"NV8"],[[128253,128254],"valid",[],"NV8"],[[128255,128255],"valid",[],"NV8"],[[128256,128317],"valid",[],"NV8"],[[128318,128319],"valid",[],"NV8"],[[128320,128323],"valid",[],"NV8"],[[128324,128330],"valid",[],"NV8"],[[128331,128335],"valid",[],"NV8"],[[128336,128359],"valid",[],"NV8"],[[128360,128377],"valid",[],"NV8"],[[128378,128378],"disallowed"],[[128379,128419],"valid",[],"NV8"],[[128420,128420],"disallowed"],[[128421,128506],"valid",[],"NV8"],[[128507,128511],"valid",[],"NV8"],[[128512,128512],"valid",[],"NV8"],[[128513,128528],"valid",[],"NV8"],[[128529,128529],"valid",[],"NV8"],[[128530,128532],"valid",[],"NV8"],[[128533,128533],"valid",[],"NV8"],[[128534,128534],"valid",[],"NV8"],[[128535,128535],"valid",[],"NV8"],[[128536,128536],"valid",[],"NV8"],[[128537,128537],"valid",[],"NV8"],[[128538,128538],"valid",[],"NV8"],[[128539,128539],"valid",[],"NV8"],[[128540,128542],"valid",[],"NV8"],[[128543,128543],"valid",[],"NV8"],[[128544,128549],"valid",[],"NV8"],[[128550,128551],"valid",[],"NV8"],[[128552,128555],"valid",[],"NV8"],[[128556,128556],"valid",[],"NV8"],[[128557,128557],"valid",[],"NV8"],[[128558,128559],"valid",[],"NV8"],[[128560,128563],"valid",[],"NV8"],[[128564,128564],"valid",[],"NV8"],[[128565,128576],"valid",[],"NV8"],[[128577,128578],"valid",[],"NV8"],[[128579,128580],"valid",[],"NV8"],[[128581,128591],"valid",[],"NV8"],[[128592,128639],"valid",[],"NV8"],[[128640,128709],"valid",[],"NV8"],[[128710,128719],"valid",[],"NV8"],[[128720,128720],"valid",[],"NV8"],[[128721,128735],"disallowed"],[[128736,128748],"valid",[],"NV8"],[[128749,128751],"disallowed"],[[128752,128755],"valid",[],"NV8"],[[128756,128767],"disallowed"],[[128768,128883],"valid",[],"NV8"],[[128884,128895],"disallowed"],[[128896,128980],"valid",[],"NV8"],[[128981,129023],"disallowed"],[[129024,129035],"valid",[],"NV8"],[[129036,129039],"disallowed"],[[129040,129095],"valid",[],"NV8"],[[129096,129103],"disallowed"],[[129104,129113],"valid",[],"NV8"],[[129114,129119],"disallowed"],[[129120,129159],"valid",[],"NV8"],[[129160,129167],"disallowed"],[[129168,129197],"valid",[],"NV8"],[[129198,129295],"disallowed"],[[129296,129304],"valid",[],"NV8"],[[129305,129407],"disallowed"],[[129408,129412],"valid",[],"NV8"],[[129413,129471],"disallowed"],[[129472,129472],"valid",[],"NV8"],[[129473,131069],"disallowed"],[[131070,131071],"disallowed"],[[131072,173782],"valid"],[[173783,173823],"disallowed"],[[173824,177972],"valid"],[[177973,177983],"disallowed"],[[177984,178205],"valid"],[[178206,178207],"disallowed"],[[178208,183969],"valid"],[[183970,194559],"disallowed"],[[194560,194560],"mapped",[20029]],[[194561,194561],"mapped",[20024]],[[194562,194562],"mapped",[20033]],[[194563,194563],"mapped",[131362]],[[194564,194564],"mapped",[20320]],[[194565,194565],"mapped",[20398]],[[194566,194566],"mapped",[20411]],[[194567,194567],"mapped",[20482]],[[194568,194568],"mapped",[20602]],[[194569,194569],"mapped",[20633]],[[194570,194570],"mapped",[20711]],[[194571,194571],"mapped",[20687]],[[194572,194572],"mapped",[13470]],[[194573,194573],"mapped",[132666]],[[194574,194574],"mapped",[20813]],[[194575,194575],"mapped",[20820]],[[194576,194576],"mapped",[20836]],[[194577,194577],"mapped",[20855]],[[194578,194578],"mapped",[132380]],[[194579,194579],"mapped",[13497]],[[194580,194580],"mapped",[20839]],[[194581,194581],"mapped",[20877]],[[194582,194582],"mapped",[132427]],[[194583,194583],"mapped",[20887]],[[194584,194584],"mapped",[20900]],[[194585,194585],"mapped",[20172]],[[194586,194586],"mapped",[20908]],[[194587,194587],"mapped",[20917]],[[194588,194588],"mapped",[168415]],[[194589,194589],"mapped",[20981]],[[194590,194590],"mapped",[20995]],[[194591,194591],"mapped",[13535]],[[194592,194592],"mapped",[21051]],[[194593,194593],"mapped",[21062]],[[194594,194594],"mapped",[21106]],[[194595,194595],"mapped",[21111]],[[194596,194596],"mapped",[13589]],[[194597,194597],"mapped",[21191]],[[194598,194598],"mapped",[21193]],[[194599,194599],"mapped",[21220]],[[194600,194600],"mapped",[21242]],[[194601,194601],"mapped",[21253]],[[194602,194602],"mapped",[21254]],[[194603,194603],"mapped",[21271]],[[194604,194604],"mapped",[21321]],[[194605,194605],"mapped",[21329]],[[194606,194606],"mapped",[21338]],[[194607,194607],"mapped",[21363]],[[194608,194608],"mapped",[21373]],[[194609,194611],"mapped",[21375]],[[194612,194612],"mapped",[133676]],[[194613,194613],"mapped",[28784]],[[194614,194614],"mapped",[21450]],[[194615,194615],"mapped",[21471]],[[194616,194616],"mapped",[133987]],[[194617,194617],"mapped",[21483]],[[194618,194618],"mapped",[21489]],[[194619,194619],"mapped",[21510]],[[194620,194620],"mapped",[21662]],[[194621,194621],"mapped",[21560]],[[194622,194622],"mapped",[21576]],[[194623,194623],"mapped",[21608]],[[194624,194624],"mapped",[21666]],[[194625,194625],"mapped",[21750]],[[194626,194626],"mapped",[21776]],[[194627,194627],"mapped",[21843]],[[194628,194628],"mapped",[21859]],[[194629,194630],"mapped",[21892]],[[194631,194631],"mapped",[21913]],[[194632,194632],"mapped",[21931]],[[194633,194633],"mapped",[21939]],[[194634,194634],"mapped",[21954]],[[194635,194635],"mapped",[22294]],[[194636,194636],"mapped",[22022]],[[194637,194637],"mapped",[22295]],[[194638,194638],"mapped",[22097]],[[194639,194639],"mapped",[22132]],[[194640,194640],"mapped",[20999]],[[194641,194641],"mapped",[22766]],[[194642,194642],"mapped",[22478]],[[194643,194643],"mapped",[22516]],[[194644,194644],"mapped",[22541]],[[194645,194645],"mapped",[22411]],[[194646,194646],"mapped",[22578]],[[194647,194647],"mapped",[22577]],[[194648,194648],"mapped",[22700]],[[194649,194649],"mapped",[136420]],[[194650,194650],"mapped",[22770]],[[194651,194651],"mapped",[22775]],[[194652,194652],"mapped",[22790]],[[194653,194653],"mapped",[22810]],[[194654,194654],"mapped",[22818]],[[194655,194655],"mapped",[22882]],[[194656,194656],"mapped",[136872]],[[194657,194657],"mapped",[136938]],[[194658,194658],"mapped",[23020]],[[194659,194659],"mapped",[23067]],[[194660,194660],"mapped",[23079]],[[194661,194661],"mapped",[23000]],[[194662,194662],"mapped",[23142]],[[194663,194663],"mapped",[14062]],[[194664,194664],"disallowed"],[[194665,194665],"mapped",[23304]],[[194666,194667],"mapped",[23358]],[[194668,194668],"mapped",[137672]],[[194669,194669],"mapped",[23491]],[[194670,194670],"mapped",[23512]],[[194671,194671],"mapped",[23527]],[[194672,194672],"mapped",[23539]],[[194673,194673],"mapped",[138008]],[[194674,194674],"mapped",[23551]],[[194675,194675],"mapped",[23558]],[[194676,194676],"disallowed"],[[194677,194677],"mapped",[23586]],[[194678,194678],"mapped",[14209]],[[194679,194679],"mapped",[23648]],[[194680,194680],"mapped",[23662]],[[194681,194681],"mapped",[23744]],[[194682,194682],"mapped",[23693]],[[194683,194683],"mapped",[138724]],[[194684,194684],"mapped",[23875]],[[194685,194685],"mapped",[138726]],[[194686,194686],"mapped",[23918]],[[194687,194687],"mapped",[23915]],[[194688,194688],"mapped",[23932]],[[194689,194689],"mapped",[24033]],[[194690,194690],"mapped",[24034]],[[194691,194691],"mapped",[14383]],[[194692,194692],"mapped",[24061]],[[194693,194693],"mapped",[24104]],[[194694,194694],"mapped",[24125]],[[194695,194695],"mapped",[24169]],[[194696,194696],"mapped",[14434]],[[194697,194697],"mapped",[139651]],[[194698,194698],"mapped",[14460]],[[194699,194699],"mapped",[24240]],[[194700,194700],"mapped",[24243]],[[194701,194701],"mapped",[24246]],[[194702,194702],"mapped",[24266]],[[194703,194703],"mapped",[172946]],[[194704,194704],"mapped",[24318]],[[194705,194706],"mapped",[140081]],[[194707,194707],"mapped",[33281]],[[194708,194709],"mapped",[24354]],[[194710,194710],"mapped",[14535]],[[194711,194711],"mapped",[144056]],[[194712,194712],"mapped",[156122]],[[194713,194713],"mapped",[24418]],[[194714,194714],"mapped",[24427]],[[194715,194715],"mapped",[14563]],[[194716,194716],"mapped",[24474]],[[194717,194717],"mapped",[24525]],[[194718,194718],"mapped",[24535]],[[194719,194719],"mapped",[24569]],[[194720,194720],"mapped",[24705]],[[194721,194721],"mapped",[14650]],[[194722,194722],"mapped",[14620]],[[194723,194723],"mapped",[24724]],[[194724,194724],"mapped",[141012]],[[194725,194725],"mapped",[24775]],[[194726,194726],"mapped",[24904]],[[194727,194727],"mapped",[24908]],[[194728,194728],"mapped",[24910]],[[194729,194729],"mapped",[24908]],[[194730,194730],"mapped",[24954]],[[194731,194731],"mapped",[24974]],[[194732,194732],"mapped",[25010]],[[194733,194733],"mapped",[24996]],[[194734,194734],"mapped",[25007]],[[194735,194735],"mapped",[25054]],[[194736,194736],"mapped",[25074]],[[194737,194737],"mapped",[25078]],[[194738,194738],"mapped",[25104]],[[194739,194739],"mapped",[25115]],[[194740,194740],"mapped",[25181]],[[194741,194741],"mapped",[25265]],[[194742,194742],"mapped",[25300]],[[194743,194743],"mapped",[25424]],[[194744,194744],"mapped",[142092]],[[194745,194745],"mapped",[25405]],[[194746,194746],"mapped",[25340]],[[194747,194747],"mapped",[25448]],[[194748,194748],"mapped",[25475]],[[194749,194749],"mapped",[25572]],[[194750,194750],"mapped",[142321]],[[194751,194751],"mapped",[25634]],[[194752,194752],"mapped",[25541]],[[194753,194753],"mapped",[25513]],[[194754,194754],"mapped",[14894]],[[194755,194755],"mapped",[25705]],[[194756,194756],"mapped",[25726]],[[194757,194757],"mapped",[25757]],[[194758,194758],"mapped",[25719]],[[194759,194759],"mapped",[14956]],[[194760,194760],"mapped",[25935]],[[194761,194761],"mapped",[25964]],[[194762,194762],"mapped",[143370]],[[194763,194763],"mapped",[26083]],[[194764,194764],"mapped",[26360]],[[194765,194765],"mapped",[26185]],[[194766,194766],"mapped",[15129]],[[194767,194767],"mapped",[26257]],[[194768,194768],"mapped",[15112]],[[194769,194769],"mapped",[15076]],[[194770,194770],"mapped",[20882]],[[194771,194771],"mapped",[20885]],[[194772,194772],"mapped",[26368]],[[194773,194773],"mapped",[26268]],[[194774,194774],"mapped",[32941]],[[194775,194775],"mapped",[17369]],[[194776,194776],"mapped",[26391]],[[194777,194777],"mapped",[26395]],[[194778,194778],"mapped",[26401]],[[194779,194779],"mapped",[26462]],[[194780,194780],"mapped",[26451]],[[194781,194781],"mapped",[144323]],[[194782,194782],"mapped",[15177]],[[194783,194783],"mapped",[26618]],[[194784,194784],"mapped",[26501]],[[194785,194785],"mapped",[26706]],[[194786,194786],"mapped",[26757]],[[194787,194787],"mapped",[144493]],[[194788,194788],"mapped",[26766]],[[194789,194789],"mapped",[26655]],[[194790,194790],"mapped",[26900]],[[194791,194791],"mapped",[15261]],[[194792,194792],"mapped",[26946]],[[194793,194793],"mapped",[27043]],[[194794,194794],"mapped",[27114]],[[194795,194795],"mapped",[27304]],[[194796,194796],"mapped",[145059]],[[194797,194797],"mapped",[27355]],[[194798,194798],"mapped",[15384]],[[194799,194799],"mapped",[27425]],[[194800,194800],"mapped",[145575]],[[194801,194801],"mapped",[27476]],[[194802,194802],"mapped",[15438]],[[194803,194803],"mapped",[27506]],[[194804,194804],"mapped",[27551]],[[194805,194805],"mapped",[27578]],[[194806,194806],"mapped",[27579]],[[194807,194807],"mapped",[146061]],[[194808,194808],"mapped",[138507]],[[194809,194809],"mapped",[146170]],[[194810,194810],"mapped",[27726]],[[194811,194811],"mapped",[146620]],[[194812,194812],"mapped",[27839]],[[194813,194813],"mapped",[27853]],[[194814,194814],"mapped",[27751]],[[194815,194815],"mapped",[27926]],[[194816,194816],"mapped",[27966]],[[194817,194817],"mapped",[28023]],[[194818,194818],"mapped",[27969]],[[194819,194819],"mapped",[28009]],[[194820,194820],"mapped",[28024]],[[194821,194821],"mapped",[28037]],[[194822,194822],"mapped",[146718]],[[194823,194823],"mapped",[27956]],[[194824,194824],"mapped",[28207]],[[194825,194825],"mapped",[28270]],[[194826,194826],"mapped",[15667]],[[194827,194827],"mapped",[28363]],[[194828,194828],"mapped",[28359]],[[194829,194829],"mapped",[147153]],[[194830,194830],"mapped",[28153]],[[194831,194831],"mapped",[28526]],[[194832,194832],"mapped",[147294]],[[194833,194833],"mapped",[147342]],[[194834,194834],"mapped",[28614]],[[194835,194835],"mapped",[28729]],[[194836,194836],"mapped",[28702]],[[194837,194837],"mapped",[28699]],[[194838,194838],"mapped",[15766]],[[194839,194839],"mapped",[28746]],[[194840,194840],"mapped",[28797]],[[194841,194841],"mapped",[28791]],[[194842,194842],"mapped",[28845]],[[194843,194843],"mapped",[132389]],[[194844,194844],"mapped",[28997]],[[194845,194845],"mapped",[148067]],[[194846,194846],"mapped",[29084]],[[194847,194847],"disallowed"],[[194848,194848],"mapped",[29224]],[[194849,194849],"mapped",[29237]],[[194850,194850],"mapped",[29264]],[[194851,194851],"mapped",[149000]],[[194852,194852],"mapped",[29312]],[[194853,194853],"mapped",[29333]],[[194854,194854],"mapped",[149301]],[[194855,194855],"mapped",[149524]],[[194856,194856],"mapped",[29562]],[[194857,194857],"mapped",[29579]],[[194858,194858],"mapped",[16044]],[[194859,194859],"mapped",[29605]],[[194860,194861],"mapped",[16056]],[[194862,194862],"mapped",[29767]],[[194863,194863],"mapped",[29788]],[[194864,194864],"mapped",[29809]],[[194865,194865],"mapped",[29829]],[[194866,194866],"mapped",[29898]],[[194867,194867],"mapped",[16155]],[[194868,194868],"mapped",[29988]],[[194869,194869],"mapped",[150582]],[[194870,194870],"mapped",[30014]],[[194871,194871],"mapped",[150674]],[[194872,194872],"mapped",[30064]],[[194873,194873],"mapped",[139679]],[[194874,194874],"mapped",[30224]],[[194875,194875],"mapped",[151457]],[[194876,194876],"mapped",[151480]],[[194877,194877],"mapped",[151620]],[[194878,194878],"mapped",[16380]],[[194879,194879],"mapped",[16392]],[[194880,194880],"mapped",[30452]],[[194881,194881],"mapped",[151795]],[[194882,194882],"mapped",[151794]],[[194883,194883],"mapped",[151833]],[[194884,194884],"mapped",[151859]],[[194885,194885],"mapped",[30494]],[[194886,194887],"mapped",[30495]],[[194888,194888],"mapped",[30538]],[[194889,194889],"mapped",[16441]],[[194890,194890],"mapped",[30603]],[[194891,194891],"mapped",[16454]],[[194892,194892],"mapped",[16534]],[[194893,194893],"mapped",[152605]],[[194894,194894],"mapped",[30798]],[[194895,194895],"mapped",[30860]],[[194896,194896],"mapped",[30924]],[[194897,194897],"mapped",[16611]],[[194898,194898],"mapped",[153126]],[[194899,194899],"mapped",[31062]],[[194900,194900],"mapped",[153242]],[[194901,194901],"mapped",[153285]],[[194902,194902],"mapped",[31119]],[[194903,194903],"mapped",[31211]],[[194904,194904],"mapped",[16687]],[[194905,194905],"mapped",[31296]],[[194906,194906],"mapped",[31306]],[[194907,194907],"mapped",[31311]],[[194908,194908],"mapped",[153980]],[[194909,194910],"mapped",[154279]],[[194911,194911],"disallowed"],[[194912,194912],"mapped",[16898]],[[194913,194913],"mapped",[154539]],[[194914,194914],"mapped",[31686]],[[194915,194915],"mapped",[31689]],[[194916,194916],"mapped",[16935]],[[194917,194917],"mapped",[154752]],[[194918,194918],"mapped",[31954]],[[194919,194919],"mapped",[17056]],[[194920,194920],"mapped",[31976]],[[194921,194921],"mapped",[31971]],[[194922,194922],"mapped",[32000]],[[194923,194923],"mapped",[155526]],[[194924,194924],"mapped",[32099]],[[194925,194925],"mapped",[17153]],[[194926,194926],"mapped",[32199]],[[194927,194927],"mapped",[32258]],[[194928,194928],"mapped",[32325]],[[194929,194929],"mapped",[17204]],[[194930,194930],"mapped",[156200]],[[194931,194931],"mapped",[156231]],[[194932,194932],"mapped",[17241]],[[194933,194933],"mapped",[156377]],[[194934,194934],"mapped",[32634]],[[194935,194935],"mapped",[156478]],[[194936,194936],"mapped",[32661]],[[194937,194937],"mapped",[32762]],[[194938,194938],"mapped",[32773]],[[194939,194939],"mapped",[156890]],[[194940,194940],"mapped",[156963]],[[194941,194941],"mapped",[32864]],[[194942,194942],"mapped",[157096]],[[194943,194943],"mapped",[32880]],[[194944,194944],"mapped",[144223]],[[194945,194945],"mapped",[17365]],[[194946,194946],"mapped",[32946]],[[194947,194947],"mapped",[33027]],[[194948,194948],"mapped",[17419]],[[194949,194949],"mapped",[33086]],[[194950,194950],"mapped",[23221]],[[194951,194951],"mapped",[157607]],[[194952,194952],"mapped",[157621]],[[194953,194953],"mapped",[144275]],[[194954,194954],"mapped",[144284]],[[194955,194955],"mapped",[33281]],[[194956,194956],"mapped",[33284]],[[194957,194957],"mapped",[36766]],[[194958,194958],"mapped",[17515]],[[194959,194959],"mapped",[33425]],[[194960,194960],"mapped",[33419]],[[194961,194961],"mapped",[33437]],[[194962,194962],"mapped",[21171]],[[194963,194963],"mapped",[33457]],[[194964,194964],"mapped",[33459]],[[194965,194965],"mapped",[33469]],[[194966,194966],"mapped",[33510]],[[194967,194967],"mapped",[158524]],[[194968,194968],"mapped",[33509]],[[194969,194969],"mapped",[33565]],[[194970,194970],"mapped",[33635]],[[194971,194971],"mapped",[33709]],[[194972,194972],"mapped",[33571]],[[194973,194973],"mapped",[33725]],[[194974,194974],"mapped",[33767]],[[194975,194975],"mapped",[33879]],[[194976,194976],"mapped",[33619]],[[194977,194977],"mapped",[33738]],[[194978,194978],"mapped",[33740]],[[194979,194979],"mapped",[33756]],[[194980,194980],"mapped",[158774]],[[194981,194981],"mapped",[159083]],[[194982,194982],"mapped",[158933]],[[194983,194983],"mapped",[17707]],[[194984,194984],"mapped",[34033]],[[194985,194985],"mapped",[34035]],[[194986,194986],"mapped",[34070]],[[194987,194987],"mapped",[160714]],[[194988,194988],"mapped",[34148]],[[194989,194989],"mapped",[159532]],[[194990,194990],"mapped",[17757]],[[194991,194991],"mapped",[17761]],[[194992,194992],"mapped",[159665]],[[194993,194993],"mapped",[159954]],[[194994,194994],"mapped",[17771]],[[194995,194995],"mapped",[34384]],[[194996,194996],"mapped",[34396]],[[194997,194997],"mapped",[34407]],[[194998,194998],"mapped",[34409]],[[194999,194999],"mapped",[34473]],[[195000,195000],"mapped",[34440]],[[195001,195001],"mapped",[34574]],[[195002,195002],"mapped",[34530]],[[195003,195003],"mapped",[34681]],[[195004,195004],"mapped",[34600]],[[195005,195005],"mapped",[34667]],[[195006,195006],"mapped",[34694]],[[195007,195007],"disallowed"],[[195008,195008],"mapped",[34785]],[[195009,195009],"mapped",[34817]],[[195010,195010],"mapped",[17913]],[[195011,195011],"mapped",[34912]],[[195012,195012],"mapped",[34915]],[[195013,195013],"mapped",[161383]],[[195014,195014],"mapped",[35031]],[[195015,195015],"mapped",[35038]],[[195016,195016],"mapped",[17973]],[[195017,195017],"mapped",[35066]],[[195018,195018],"mapped",[13499]],[[195019,195019],"mapped",[161966]],[[195020,195020],"mapped",[162150]],[[195021,195021],"mapped",[18110]],[[195022,195022],"mapped",[18119]],[[195023,195023],"mapped",[35488]],[[195024,195024],"mapped",[35565]],[[195025,195025],"mapped",[35722]],[[195026,195026],"mapped",[35925]],[[195027,195027],"mapped",[162984]],[[195028,195028],"mapped",[36011]],[[195029,195029],"mapped",[36033]],[[195030,195030],"mapped",[36123]],[[195031,195031],"mapped",[36215]],[[195032,195032],"mapped",[163631]],[[195033,195033],"mapped",[133124]],[[195034,195034],"mapped",[36299]],[[195035,195035],"mapped",[36284]],[[195036,195036],"mapped",[36336]],[[195037,195037],"mapped",[133342]],[[195038,195038],"mapped",[36564]],[[195039,195039],"mapped",[36664]],[[195040,195040],"mapped",[165330]],[[195041,195041],"mapped",[165357]],[[195042,195042],"mapped",[37012]],[[195043,195043],"mapped",[37105]],[[195044,195044],"mapped",[37137]],[[195045,195045],"mapped",[165678]],[[195046,195046],"mapped",[37147]],[[195047,195047],"mapped",[37432]],[[195048,195048],"mapped",[37591]],[[195049,195049],"mapped",[37592]],[[195050,195050],"mapped",[37500]],[[195051,195051],"mapped",[37881]],[[195052,195052],"mapped",[37909]],[[195053,195053],"mapped",[166906]],[[195054,195054],"mapped",[38283]],[[195055,195055],"mapped",[18837]],[[195056,195056],"mapped",[38327]],[[195057,195057],"mapped",[167287]],[[195058,195058],"mapped",[18918]],[[195059,195059],"mapped",[38595]],[[195060,195060],"mapped",[23986]],[[195061,195061],"mapped",[38691]],[[195062,195062],"mapped",[168261]],[[195063,195063],"mapped",[168474]],[[195064,195064],"mapped",[19054]],[[195065,195065],"mapped",[19062]],[[195066,195066],"mapped",[38880]],[[195067,195067],"mapped",[168970]],[[195068,195068],"mapped",[19122]],[[195069,195069],"mapped",[169110]],[[195070,195071],"mapped",[38923]],[[195072,195072],"mapped",[38953]],[[195073,195073],"mapped",[169398]],[[195074,195074],"mapped",[39138]],[[195075,195075],"mapped",[19251]],[[195076,195076],"mapped",[39209]],[[195077,195077],"mapped",[39335]],[[195078,195078],"mapped",[39362]],[[195079,195079],"mapped",[39422]],[[195080,195080],"mapped",[19406]],[[195081,195081],"mapped",[170800]],[[195082,195082],"mapped",[39698]],[[195083,195083],"mapped",[40000]],[[195084,195084],"mapped",[40189]],[[195085,195085],"mapped",[19662]],[[195086,195086],"mapped",[19693]],[[195087,195087],"mapped",[40295]],[[195088,195088],"mapped",[172238]],[[195089,195089],"mapped",[19704]],[[195090,195090],"mapped",[172293]],[[195091,195091],"mapped",[172558]],[[195092,195092],"mapped",[172689]],[[195093,195093],"mapped",[40635]],[[195094,195094],"mapped",[19798]],[[195095,195095],"mapped",[40697]],[[195096,195096],"mapped",[40702]],[[195097,195097],"mapped",[40709]],[[195098,195098],"mapped",[40719]],[[195099,195099],"mapped",[40726]],[[195100,195100],"mapped",[40763]],[[195101,195101],"mapped",[173568]],[[195102,196605],"disallowed"],[[196606,196607],"disallowed"],[[196608,262141],"disallowed"],[[262142,262143],"disallowed"],[[262144,327677],"disallowed"],[[327678,327679],"disallowed"],[[327680,393213],"disallowed"],[[393214,393215],"disallowed"],[[393216,458749],"disallowed"],[[458750,458751],"disallowed"],[[458752,524285],"disallowed"],[[524286,524287],"disallowed"],[[524288,589821],"disallowed"],[[589822,589823],"disallowed"],[[589824,655357],"disallowed"],[[655358,655359],"disallowed"],[[655360,720893],"disallowed"],[[720894,720895],"disallowed"],[[720896,786429],"disallowed"],[[786430,786431],"disallowed"],[[786432,851965],"disallowed"],[[851966,851967],"disallowed"],[[851968,917501],"disallowed"],[[917502,917503],"disallowed"],[[917504,917504],"disallowed"],[[917505,917505],"disallowed"],[[917506,917535],"disallowed"],[[917536,917631],"disallowed"],[[917632,917759],"disallowed"],[[917760,917999],"ignored"],[[918000,983037],"disallowed"],[[983038,983039],"disallowed"],[[983040,1048573],"disallowed"],[[1048574,1048575],"disallowed"],[[1048576,1114109],"disallowed"],[[1114110,1114111],"disallowed"]] \ No newline at end of file diff --git a/node_modules/tr46/package.json b/node_modules/tr46/package.json new file mode 100644 index 0000000..b6826da --- /dev/null +++ b/node_modules/tr46/package.json @@ -0,0 +1,31 @@ +{ + "name": "tr46", + "version": "0.0.3", + "description": "An implementation of the Unicode TR46 spec", + "main": "index.js", + "scripts": { + "test": "mocha", + "pretest": "node scripts/getLatestUnicodeTests.js", + "prepublish": "node scripts/generateMappingTable.js" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/Sebmaster/tr46.js.git" + }, + "keywords": [ + "unicode", + "tr46", + "url", + "whatwg" + ], + "author": "Sebastian Mayr ", + "license": "MIT", + "bugs": { + "url": "https://github.com/Sebmaster/tr46.js/issues" + }, + "homepage": "https://github.com/Sebmaster/tr46.js#readme", + "devDependencies": { + "mocha": "^2.2.5", + "request": "^2.57.0" + } +} diff --git a/node_modules/uid-safe/HISTORY.md b/node_modules/uid-safe/HISTORY.md new file mode 100644 index 0000000..3ec249f --- /dev/null +++ b/node_modules/uid-safe/HISTORY.md @@ -0,0 +1,61 @@ +2.1.5 / 2017-08-02 +================== + + * perf: remove only trailing `=` + +2.1.4 / 2017-03-02 +================== + + * Remove `base64-url` dependency + +2.1.3 / 2016-10-30 +================== + + * deps: base64-url@1.3.3 + +2.1.2 / 2016-08-15 +================== + + * deps: base64-url@1.3.2 + +2.1.1 / 2016-05-04 +================== + + * deps: base64-url@1.2.2 + +2.1.0 / 2016-01-17 +================== + + * Use `random-bytes` for byte source + +2.0.0 / 2015-05-08 +================== + + * Use global `Promise` when returning a promise + +1.1.0 / 2015-02-01 +================== + + * Use `crypto.randomBytes`, if available + * deps: base64-url@1.2.1 + +1.0.3 / 2015-01-31 +================== + + * Fix error branch that would throw + * deps: base64-url@1.2.0 + +1.0.2 / 2015-01-08 +================== + + * Remove dependency on `mz` + +1.0.1 / 2014-06-18 +================== + + * Remove direct `bluebird` dependency + +1.0.0 / 2014-06-18 +================== + + * Initial release diff --git a/node_modules/uid-safe/LICENSE b/node_modules/uid-safe/LICENSE new file mode 100644 index 0000000..c4b8a49 --- /dev/null +++ b/node_modules/uid-safe/LICENSE @@ -0,0 +1,22 @@ +The MIT License (MIT) + +Copyright (c) 2014 Jonathan Ong +Copyright (c) 2015-2017 Douglas Christopher Wilson + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/uid-safe/README.md b/node_modules/uid-safe/README.md new file mode 100644 index 0000000..fa02be8 --- /dev/null +++ b/node_modules/uid-safe/README.md @@ -0,0 +1,77 @@ +# uid-safe + +[![NPM Version][npm-image]][npm-url] +[![NPM Downloads][downloads-image]][downloads-url] +[![Node.js Version][node-version-image]][node-version-url] +[![Build Status][travis-image]][travis-url] +[![Test Coverage][coveralls-image]][coveralls-url] + +URL and cookie safe UIDs + +Create cryptographically secure UIDs safe for both cookie and URL usage. +This is in contrast to modules such as [rand-token](https://www.npmjs.com/package/rand-token) +and [uid2](https://www.npmjs.com/package/uid2) whose UIDs are actually skewed +due to the use of `%` and unnecessarily truncate the UID. +Use this if you could still use UIDs with `-` and `_` in them. + +## Installation + +```sh +$ npm install uid-safe +``` + +## API + +```js +var uid = require('uid-safe') +``` + +### uid(byteLength, callback) + +Asynchronously create a UID with a specific byte length. Because `base64` +encoding is used underneath, this is not the string length. For example, +to create a UID of length 24, you want a byte length of 18. + +```js +uid(18, function (err, string) { + if (err) throw err + // do something with the string +}) +``` + +### uid(byteLength) + +Asynchronously create a UID with a specific byte length and return a +`Promise`. + +**Note**: To use promises in Node.js _prior to 0.12_, promises must be +"polyfilled" using `global.Promise = require('bluebird')`. + +```js +uid(18).then(function (string) { + // do something with the string +}) +``` + +### uid.sync(byteLength) + +A synchronous version of above. + +```js +var string = uid.sync(18) +``` + +## License + +[MIT](LICENSE) + +[npm-image]: https://img.shields.io/npm/v/uid-safe.svg +[npm-url]: https://npmjs.org/package/uid-safe +[node-version-image]: https://img.shields.io/node/v/uid-safe.svg +[node-version-url]: https://nodejs.org/en/download/ +[travis-image]: https://img.shields.io/travis/crypto-utils/uid-safe/master.svg +[travis-url]: https://travis-ci.org/crypto-utils/uid-safe +[coveralls-image]: https://img.shields.io/coveralls/crypto-utils/uid-safe/master.svg +[coveralls-url]: https://coveralls.io/r/crypto-utils/uid-safe?branch=master +[downloads-image]: https://img.shields.io/npm/dm/uid-safe.svg +[downloads-url]: https://npmjs.org/package/uid-safe diff --git a/node_modules/uid-safe/index.js b/node_modules/uid-safe/index.js new file mode 100644 index 0000000..18e8492 --- /dev/null +++ b/node_modules/uid-safe/index.js @@ -0,0 +1,107 @@ +/*! + * uid-safe + * Copyright(c) 2014 Jonathan Ong + * Copyright(c) 2015-2017 Douglas Christopher Wilson + * MIT Licensed + */ + +'use strict' + +/** + * Module dependencies. + * @private + */ + +var randomBytes = require('random-bytes') + +/** + * Module variables. + * @private + */ + +var EQUAL_END_REGEXP = /=+$/ +var PLUS_GLOBAL_REGEXP = /\+/g +var SLASH_GLOBAL_REGEXP = /\//g + +/** + * Module exports. + * @public + */ + +module.exports = uid +module.exports.sync = uidSync + +/** + * Create a unique ID. + * + * @param {number} length + * @param {function} [callback] + * @return {Promise} + * @public + */ + +function uid (length, callback) { + // validate callback is a function, if provided + if (callback !== undefined && typeof callback !== 'function') { + throw new TypeError('argument callback must be a function') + } + + // require the callback without promises + if (!callback && !global.Promise) { + throw new TypeError('argument callback is required') + } + + if (callback) { + // classic callback style + return generateUid(length, callback) + } + + return new Promise(function executor (resolve, reject) { + generateUid(length, function onUid (err, str) { + if (err) return reject(err) + resolve(str) + }) + }) +} + +/** + * Create a unique ID sync. + * + * @param {number} length + * @return {string} + * @public + */ + +function uidSync (length) { + return toString(randomBytes.sync(length)) +} + +/** + * Generate a unique ID string. + * + * @param {number} length + * @param {function} callback + * @private + */ + +function generateUid (length, callback) { + randomBytes(length, function (err, buf) { + if (err) return callback(err) + callback(null, toString(buf)) + }) +} + +/** + * Change a Buffer into a string. + * + * @param {Buffer} buf + * @return {string} + * @private + */ + +function toString (buf) { + return buf.toString('base64') + .replace(EQUAL_END_REGEXP, '') + .replace(PLUS_GLOBAL_REGEXP, '-') + .replace(SLASH_GLOBAL_REGEXP, '_') +} diff --git a/node_modules/uid-safe/package.json b/node_modules/uid-safe/package.json new file mode 100644 index 0000000..9d9ea4c --- /dev/null +++ b/node_modules/uid-safe/package.json @@ -0,0 +1,46 @@ +{ + "name": "uid-safe", + "description": "URL and cookie safe UIDs", + "version": "2.1.5", + "contributors": [ + "Douglas Christopher Wilson ", + "Jonathan Ong (http://jongleberry.com)" + ], + "license": "MIT", + "repository": "crypto-utils/uid-safe", + "dependencies": { + "random-bytes": "~1.0.0" + }, + "devDependencies": { + "bluebird": "3.5.0", + "eslint": "3.19.0", + "eslint-config-standard": "10.2.1", + "eslint-plugin-import": "2.7.0", + "eslint-plugin-node": "5.1.1", + "eslint-plugin-promise": "3.5.0", + "eslint-plugin-standard": "3.0.1", + "istanbul": "0.4.5", + "mocha": "2.5.3" + }, + "files": [ + "LICENSE", + "HISTORY.md", + "README.md", + "index.js" + ], + "engines": { + "node": ">= 0.8" + }, + "scripts": { + "lint": "eslint .", + "test": "mocha --trace-deprecation --reporter spec --bail --check-leaks test/", + "test-cov": "istanbul cover node_modules/mocha/bin/_mocha -- --trace-deprecation --reporter dot --check-leaks test/", + "test-travis": "istanbul cover node_modules/mocha/bin/_mocha --report lcovonly -- --trace-deprecation --reporter spec --check-leaks test/" + }, + "keywords": [ + "random", + "generator", + "uid", + "safe" + ] +} diff --git a/node_modules/util-deprecate/History.md b/node_modules/util-deprecate/History.md new file mode 100644 index 0000000..acc8675 --- /dev/null +++ b/node_modules/util-deprecate/History.md @@ -0,0 +1,16 @@ + +1.0.2 / 2015-10-07 +================== + + * use try/catch when checking `localStorage` (#3, @kumavis) + +1.0.1 / 2014-11-25 +================== + + * browser: use `console.warn()` for deprecation calls + * browser: more jsdocs + +1.0.0 / 2014-04-30 +================== + + * initial commit diff --git a/node_modules/util-deprecate/LICENSE b/node_modules/util-deprecate/LICENSE new file mode 100644 index 0000000..6a60e8c --- /dev/null +++ b/node_modules/util-deprecate/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/README.md b/node_modules/util-deprecate/README.md new file mode 100644 index 0000000..75622fa --- /dev/null +++ b/node_modules/util-deprecate/README.md @@ -0,0 +1,53 @@ +util-deprecate +============== +### The Node.js `util.deprecate()` function with browser support + +In Node.js, this module simply re-exports the `util.deprecate()` function. + +In the web browser (i.e. via browserify), a browser-specific implementation +of the `util.deprecate()` function is used. + + +## API + +A `deprecate()` function is the only thing exposed by this module. + +``` javascript +// setup: +exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); + + +// users see: +foo(); +// foo() is deprecated, use bar() instead +foo(); +foo(); +``` + + +## License + +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/browser.js b/node_modules/util-deprecate/browser.js new file mode 100644 index 0000000..549ae2f --- /dev/null +++ b/node_modules/util-deprecate/browser.js @@ -0,0 +1,67 @@ + +/** + * Module exports. + */ + +module.exports = deprecate; + +/** + * Mark that a method should not be used. + * Returns a modified function which warns once by default. + * + * If `localStorage.noDeprecation = true` is set, then it is a no-op. + * + * If `localStorage.throwDeprecation = true` is set, then deprecated functions + * will throw an Error when invoked. + * + * If `localStorage.traceDeprecation = true` is set, then deprecated functions + * will invoke `console.trace()` instead of `console.error()`. + * + * @param {Function} fn - the function to deprecate + * @param {String} msg - the string to print to the console when `fn` is invoked + * @returns {Function} a new "deprecated" version of `fn` + * @api public + */ + +function deprecate (fn, msg) { + if (config('noDeprecation')) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (config('throwDeprecation')) { + throw new Error(msg); + } else if (config('traceDeprecation')) { + console.trace(msg); + } else { + console.warn(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +} + +/** + * Checks `localStorage` for boolean values for the given `name`. + * + * @param {String} name + * @returns {Boolean} + * @api private + */ + +function config (name) { + // accessing global.localStorage can trigger a DOMException in sandboxed iframes + try { + if (!global.localStorage) return false; + } catch (_) { + return false; + } + var val = global.localStorage[name]; + if (null == val) return false; + return String(val).toLowerCase() === 'true'; +} diff --git a/node_modules/util-deprecate/node.js b/node_modules/util-deprecate/node.js new file mode 100644 index 0000000..5e6fcff --- /dev/null +++ b/node_modules/util-deprecate/node.js @@ -0,0 +1,6 @@ + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = require('util').deprecate; diff --git a/node_modules/util-deprecate/package.json b/node_modules/util-deprecate/package.json new file mode 100644 index 0000000..2e79f89 --- /dev/null +++ b/node_modules/util-deprecate/package.json @@ -0,0 +1,27 @@ +{ + "name": "util-deprecate", + "version": "1.0.2", + "description": "The Node.js `util.deprecate()` function with browser support", + "main": "node.js", + "browser": "browser.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/util-deprecate.git" + }, + "keywords": [ + "util", + "deprecate", + "browserify", + "browser", + "node" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/util-deprecate/issues" + }, + "homepage": "https://github.com/TooTallNate/util-deprecate" +} diff --git a/node_modules/webidl-conversions/LICENSE.md b/node_modules/webidl-conversions/LICENSE.md new file mode 100644 index 0000000..d4a994f --- /dev/null +++ b/node_modules/webidl-conversions/LICENSE.md @@ -0,0 +1,12 @@ +# The BSD 2-Clause License + +Copyright (c) 2014, Domenic Denicola +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/node_modules/webidl-conversions/README.md b/node_modules/webidl-conversions/README.md new file mode 100644 index 0000000..3657890 --- /dev/null +++ b/node_modules/webidl-conversions/README.md @@ -0,0 +1,53 @@ +# WebIDL Type Conversions on JavaScript Values + +This package implements, in JavaScript, the algorithms to convert a given JavaScript value according to a given [WebIDL](http://heycam.github.io/webidl/) [type](http://heycam.github.io/webidl/#idl-types). + +The goal is that you should be able to write code like + +```js +const conversions = require("webidl-conversions"); + +function doStuff(x, y) { + x = conversions["boolean"](x); + y = conversions["unsigned long"](y); + // actual algorithm code here +} +``` + +and your function `doStuff` will behave the same as a WebIDL operation declared as + +```webidl +void doStuff(boolean x, unsigned long y); +``` + +## API + +This package's main module's default export is an object with a variety of methods, each corresponding to a different WebIDL type. Each method, when invoked on a JavaScript value, will give back the new JavaScript value that results after passing through the WebIDL conversion rules. (See below for more details on what that means.) Alternately, the method could throw an error, if the WebIDL algorithm is specified to do so: for example `conversions["float"](NaN)` [will throw a `TypeError`](http://heycam.github.io/webidl/#es-float). + +## Status + +All of the numeric types are implemented (float being implemented as double) and some others are as well - check the source for all of them. This list will grow over time in service of the [HTML as Custom Elements](https://github.com/dglazkov/html-as-custom-elements) project, but in the meantime, pull requests welcome! + +I'm not sure yet what the strategy will be for modifiers, e.g. [`[Clamp]`](http://heycam.github.io/webidl/#Clamp). Maybe something like `conversions["unsigned long"](x, { clamp: true })`? We'll see. + +We might also want to extend the API to give better error messages, e.g. "Argument 1 of HTMLMediaElement.fastSeek is not a finite floating-point value" instead of "Argument is not a finite floating-point value." This would require passing in more information to the conversion functions than we currently do. + +## Background + +What's actually going on here, conceptually, is pretty weird. Let's try to explain. + +WebIDL, as part of its madness-inducing design, has its own type system. When people write algorithms in web platform specs, they usually operate on WebIDL values, i.e. instances of WebIDL types. For example, if they were specifying the algorithm for our `doStuff` operation above, they would treat `x` as a WebIDL value of [WebIDL type `boolean`](http://heycam.github.io/webidl/#idl-boolean). Crucially, they would _not_ treat `x` as a JavaScript variable whose value is either the JavaScript `true` or `false`. They're instead working in a different type system altogether, with its own rules. + +Separately from its type system, WebIDL defines a ["binding"](http://heycam.github.io/webidl/#ecmascript-binding) of the type system into JavaScript. This contains rules like: when you pass a JavaScript value to the JavaScript method that manifests a given WebIDL operation, how does that get converted into a WebIDL value? For example, a JavaScript `true` passed in the position of a WebIDL `boolean` argument becomes a WebIDL `true`. But, a JavaScript `true` passed in the position of a [WebIDL `unsigned long`](http://heycam.github.io/webidl/#idl-unsigned-long) becomes a WebIDL `1`. And so on. + +Finally, we have the actual implementation code. This is usually C++, although these days [some smart people are using Rust](https://github.com/servo/servo). The implementation, of course, has its own type system. So when they implement the WebIDL algorithms, they don't actually use WebIDL values, since those aren't "real" outside of specs. Instead, implementations apply the WebIDL binding rules in such a way as to convert incoming JavaScript values into C++ values. For example, if code in the browser called `doStuff(true, true)`, then the implementation code would eventually receive a C++ `bool` containing `true` and a C++ `uint32_t` containing `1`. + +The upside of all this is that implementations can abstract all the conversion logic away, letting WebIDL handle it, and focus on implementing the relevant methods in C++ with values of the correct type already provided. That is payoff of WebIDL, in a nutshell. + +And getting to that payoff is the goal of _this_ project—but for JavaScript implementations, instead of C++ ones. That is, this library is designed to make it easier for JavaScript developers to write functions that behave like a given WebIDL operation. So conceptually, the conversion pipeline, which in its general form is JavaScript values ↦ WebIDL values ↦ implementation-language values, in this case becomes JavaScript values ↦ WebIDL values ↦ JavaScript values. And that intermediate step is where all the logic is performed: a JavaScript `true` becomes a WebIDL `1` in an unsigned long context, which then becomes a JavaScript `1`. + +## Don't Use This + +Seriously, why would you ever use this? You really shouldn't. WebIDL is … not great, and you shouldn't be emulating its semantics. If you're looking for a generic argument-processing library, you should find one with better rules than those from WebIDL. In general, your JavaScript should not be trying to become more like WebIDL; if anything, we should fix WebIDL to make it more like JavaScript. + +The _only_ people who should use this are those trying to create faithful implementations (or polyfills) of web platform interfaces defined in WebIDL. diff --git a/node_modules/webidl-conversions/lib/index.js b/node_modules/webidl-conversions/lib/index.js new file mode 100644 index 0000000..c5153a3 --- /dev/null +++ b/node_modules/webidl-conversions/lib/index.js @@ -0,0 +1,189 @@ +"use strict"; + +var conversions = {}; +module.exports = conversions; + +function sign(x) { + return x < 0 ? -1 : 1; +} + +function evenRound(x) { + // Round x to the nearest integer, choosing the even integer if it lies halfway between two. + if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor) + return Math.floor(x); + } else { + return Math.round(x); + } +} + +function createNumberConversion(bitLength, typeOpts) { + if (!typeOpts.unsigned) { + --bitLength; + } + const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength); + const upperBound = Math.pow(2, bitLength) - 1; + + const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength); + const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1); + + return function(V, opts) { + if (!opts) opts = {}; + + let x = +V; + + if (opts.enforceRange) { + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite number"); + } + + x = sign(x) * Math.floor(Math.abs(x)); + if (x < lowerBound || x > upperBound) { + throw new TypeError("Argument is not in byte range"); + } + + return x; + } + + if (!isNaN(x) && opts.clamp) { + x = evenRound(x); + + if (x < lowerBound) x = lowerBound; + if (x > upperBound) x = upperBound; + return x; + } + + if (!Number.isFinite(x) || x === 0) { + return 0; + } + + x = sign(x) * Math.floor(Math.abs(x)); + x = x % moduloVal; + + if (!typeOpts.unsigned && x >= moduloBound) { + return x - moduloVal; + } else if (typeOpts.unsigned) { + if (x < 0) { + x += moduloVal; + } else if (x === -0) { // don't return negative zero + return 0; + } + } + + return x; + } +} + +conversions["void"] = function () { + return undefined; +}; + +conversions["boolean"] = function (val) { + return !!val; +}; + +conversions["byte"] = createNumberConversion(8, { unsigned: false }); +conversions["octet"] = createNumberConversion(8, { unsigned: true }); + +conversions["short"] = createNumberConversion(16, { unsigned: false }); +conversions["unsigned short"] = createNumberConversion(16, { unsigned: true }); + +conversions["long"] = createNumberConversion(32, { unsigned: false }); +conversions["unsigned long"] = createNumberConversion(32, { unsigned: true }); + +conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 }); +conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 }); + +conversions["double"] = function (V) { + const x = +V; + + if (!Number.isFinite(x)) { + throw new TypeError("Argument is not a finite floating-point value"); + } + + return x; +}; + +conversions["unrestricted double"] = function (V) { + const x = +V; + + if (isNaN(x)) { + throw new TypeError("Argument is NaN"); + } + + return x; +}; + +// not quite valid, but good enough for JS +conversions["float"] = conversions["double"]; +conversions["unrestricted float"] = conversions["unrestricted double"]; + +conversions["DOMString"] = function (V, opts) { + if (!opts) opts = {}; + + if (opts.treatNullAsEmptyString && V === null) { + return ""; + } + + return String(V); +}; + +conversions["ByteString"] = function (V, opts) { + const x = String(V); + let c = undefined; + for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) { + if (c > 255) { + throw new TypeError("Argument is not a valid bytestring"); + } + } + + return x; +}; + +conversions["USVString"] = function (V) { + const S = String(V); + const n = S.length; + const U = []; + for (let i = 0; i < n; ++i) { + const c = S.charCodeAt(i); + if (c < 0xD800 || c > 0xDFFF) { + U.push(String.fromCodePoint(c)); + } else if (0xDC00 <= c && c <= 0xDFFF) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + if (i === n - 1) { + U.push(String.fromCodePoint(0xFFFD)); + } else { + const d = S.charCodeAt(i + 1); + if (0xDC00 <= d && d <= 0xDFFF) { + const a = c & 0x3FF; + const b = d & 0x3FF; + U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b)); + ++i; + } else { + U.push(String.fromCodePoint(0xFFFD)); + } + } + } + } + + return U.join(''); +}; + +conversions["Date"] = function (V, opts) { + if (!(V instanceof Date)) { + throw new TypeError("Argument is not a Date object"); + } + if (isNaN(V)) { + return undefined; + } + + return V; +}; + +conversions["RegExp"] = function (V, opts) { + if (!(V instanceof RegExp)) { + V = new RegExp(V); + } + + return V; +}; diff --git a/node_modules/webidl-conversions/package.json b/node_modules/webidl-conversions/package.json new file mode 100644 index 0000000..c31bc07 --- /dev/null +++ b/node_modules/webidl-conversions/package.json @@ -0,0 +1,23 @@ +{ + "name": "webidl-conversions", + "version": "3.0.1", + "description": "Implements the WebIDL algorithms for converting to and from JavaScript values", + "main": "lib/index.js", + "scripts": { + "test": "mocha test/*.js" + }, + "repository": "jsdom/webidl-conversions", + "keywords": [ + "webidl", + "web", + "types" + ], + "files": [ + "lib/" + ], + "author": "Domenic Denicola (https://domenic.me/)", + "license": "BSD-2-Clause", + "devDependencies": { + "mocha": "^1.21.4" + } +} diff --git a/node_modules/whatwg-url/LICENSE.txt b/node_modules/whatwg-url/LICENSE.txt new file mode 100644 index 0000000..54dfac3 --- /dev/null +++ b/node_modules/whatwg-url/LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015–2016 Sebastian Mayr + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/whatwg-url/README.md b/node_modules/whatwg-url/README.md new file mode 100644 index 0000000..4347a7f --- /dev/null +++ b/node_modules/whatwg-url/README.md @@ -0,0 +1,67 @@ +# whatwg-url + +whatwg-url is a full implementation of the WHATWG [URL Standard](https://url.spec.whatwg.org/). It can be used standalone, but it also exposes a lot of the internal algorithms that are useful for integrating a URL parser into a project like [jsdom](https://github.com/tmpvar/jsdom). + +## Current Status + +whatwg-url is currently up to date with the URL spec up to commit [a62223](https://github.com/whatwg/url/commit/a622235308342c9adc7fc2fd1659ff059f7d5e2a). + +## API + +### The `URL` Constructor + +The main API is the [`URL`](https://url.spec.whatwg.org/#url) export, which follows the spec's behavior in all ways (including e.g. `USVString` conversion). Most consumers of this library will want to use this. + +### Low-level URL Standard API + +The following methods are exported for use by places like jsdom that need to implement things like [`HTMLHyperlinkElementUtils`](https://html.spec.whatwg.org/#htmlhyperlinkelementutils). They operate on or return an "internal URL" or ["URL record"](https://url.spec.whatwg.org/#concept-url) type. + +- [URL parser](https://url.spec.whatwg.org/#concept-url-parser): `parseURL(input, { baseURL, encodingOverride })` +- [Basic URL parser](https://url.spec.whatwg.org/#concept-basic-url-parser): `basicURLParse(input, { baseURL, encodingOverride, url, stateOverride })` +- [URL serializer](https://url.spec.whatwg.org/#concept-url-serializer): `serializeURL(urlRecord, excludeFragment)` +- [Host serializer](https://url.spec.whatwg.org/#concept-host-serializer): `serializeHost(hostFromURLRecord)` +- [Serialize an integer](https://url.spec.whatwg.org/#serialize-an-integer): `serializeInteger(number)` +- [Origin](https://url.spec.whatwg.org/#concept-url-origin) [serializer](https://html.spec.whatwg.org/multipage/browsers.html#serialization-of-an-origin): `serializeURLOrigin(urlRecord)` +- [Set the username](https://url.spec.whatwg.org/#set-the-username): `setTheUsername(urlRecord, usernameString)` +- [Set the password](https://url.spec.whatwg.org/#set-the-password): `setThePassword(urlRecord, passwordString)` +- [Cannot have a username/password/port](https://url.spec.whatwg.org/#cannot-have-a-username-password-port): `cannotHaveAUsernamePasswordPort(urlRecord)` + +The `stateOverride` parameter is one of the following strings: + +- [`"scheme start"`](https://url.spec.whatwg.org/#scheme-start-state) +- [`"scheme"`](https://url.spec.whatwg.org/#scheme-state) +- [`"no scheme"`](https://url.spec.whatwg.org/#no-scheme-state) +- [`"special relative or authority"`](https://url.spec.whatwg.org/#special-relative-or-authority-state) +- [`"path or authority"`](https://url.spec.whatwg.org/#path-or-authority-state) +- [`"relative"`](https://url.spec.whatwg.org/#relative-state) +- [`"relative slash"`](https://url.spec.whatwg.org/#relative-slash-state) +- [`"special authority slashes"`](https://url.spec.whatwg.org/#special-authority-slashes-state) +- [`"special authority ignore slashes"`](https://url.spec.whatwg.org/#special-authority-ignore-slashes-state) +- [`"authority"`](https://url.spec.whatwg.org/#authority-state) +- [`"host"`](https://url.spec.whatwg.org/#host-state) +- [`"hostname"`](https://url.spec.whatwg.org/#hostname-state) +- [`"port"`](https://url.spec.whatwg.org/#port-state) +- [`"file"`](https://url.spec.whatwg.org/#file-state) +- [`"file slash"`](https://url.spec.whatwg.org/#file-slash-state) +- [`"file host"`](https://url.spec.whatwg.org/#file-host-state) +- [`"path start"`](https://url.spec.whatwg.org/#path-start-state) +- [`"path"`](https://url.spec.whatwg.org/#path-state) +- [`"cannot-be-a-base-URL path"`](https://url.spec.whatwg.org/#cannot-be-a-base-url-path-state) +- [`"query"`](https://url.spec.whatwg.org/#query-state) +- [`"fragment"`](https://url.spec.whatwg.org/#fragment-state) + +The URL record type has the following API: + +- [`scheme`](https://url.spec.whatwg.org/#concept-url-scheme) +- [`username`](https://url.spec.whatwg.org/#concept-url-username) +- [`password`](https://url.spec.whatwg.org/#concept-url-password) +- [`host`](https://url.spec.whatwg.org/#concept-url-host) +- [`port`](https://url.spec.whatwg.org/#concept-url-port) +- [`path`](https://url.spec.whatwg.org/#concept-url-path) (as an array) +- [`query`](https://url.spec.whatwg.org/#concept-url-query) +- [`fragment`](https://url.spec.whatwg.org/#concept-url-fragment) +- [`cannotBeABaseURL`](https://url.spec.whatwg.org/#url-cannot-be-a-base-url-flag) (as a boolean) + +These properties should be treated with care, as in general changing them will cause the URL record to be in an inconsistent state until the appropriate invocation of `basicURLParse` is used to fix it up. You can see examples of this in the URL Standard, where there are many step sequences like "4. Set context object’s url’s fragment to the empty string. 5. Basic URL parse _input_ with context object’s url as _url_ and fragment state as _state override_." In between those two steps, a URL record is in an unusable state. + +The return value of "failure" in the spec is represented by the string `"failure"`. That is, functions like `parseURL` and `basicURLParse` can return _either_ a URL record _or_ the string `"failure"`. diff --git a/node_modules/whatwg-url/lib/URL-impl.js b/node_modules/whatwg-url/lib/URL-impl.js new file mode 100644 index 0000000..dc7452c --- /dev/null +++ b/node_modules/whatwg-url/lib/URL-impl.js @@ -0,0 +1,200 @@ +"use strict"; +const usm = require("./url-state-machine"); + +exports.implementation = class URLImpl { + constructor(constructorArgs) { + const url = constructorArgs[0]; + const base = constructorArgs[1]; + + let parsedBase = null; + if (base !== undefined) { + parsedBase = usm.basicURLParse(base); + if (parsedBase === "failure") { + throw new TypeError("Invalid base URL"); + } + } + + const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase }); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } + + this._url = parsedURL; + + // TODO: query stuff + } + + get href() { + return usm.serializeURL(this._url); + } + + set href(v) { + const parsedURL = usm.basicURLParse(v); + if (parsedURL === "failure") { + throw new TypeError("Invalid URL"); + } + + this._url = parsedURL; + } + + get origin() { + return usm.serializeURLOrigin(this._url); + } + + get protocol() { + return this._url.scheme + ":"; + } + + set protocol(v) { + usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" }); + } + + get username() { + return this._url.username; + } + + set username(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + usm.setTheUsername(this._url, v); + } + + get password() { + return this._url.password; + } + + set password(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + usm.setThePassword(this._url, v); + } + + get host() { + const url = this._url; + + if (url.host === null) { + return ""; + } + + if (url.port === null) { + return usm.serializeHost(url.host); + } + + return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port); + } + + set host(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + usm.basicURLParse(v, { url: this._url, stateOverride: "host" }); + } + + get hostname() { + if (this._url.host === null) { + return ""; + } + + return usm.serializeHost(this._url.host); + } + + set hostname(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" }); + } + + get port() { + if (this._url.port === null) { + return ""; + } + + return usm.serializeInteger(this._url.port); + } + + set port(v) { + if (usm.cannotHaveAUsernamePasswordPort(this._url)) { + return; + } + + if (v === "") { + this._url.port = null; + } else { + usm.basicURLParse(v, { url: this._url, stateOverride: "port" }); + } + } + + get pathname() { + if (this._url.cannotBeABaseURL) { + return this._url.path[0]; + } + + if (this._url.path.length === 0) { + return ""; + } + + return "/" + this._url.path.join("/"); + } + + set pathname(v) { + if (this._url.cannotBeABaseURL) { + return; + } + + this._url.path = []; + usm.basicURLParse(v, { url: this._url, stateOverride: "path start" }); + } + + get search() { + if (this._url.query === null || this._url.query === "") { + return ""; + } + + return "?" + this._url.query; + } + + set search(v) { + // TODO: query stuff + + const url = this._url; + + if (v === "") { + url.query = null; + return; + } + + const input = v[0] === "?" ? v.substring(1) : v; + url.query = ""; + usm.basicURLParse(input, { url, stateOverride: "query" }); + } + + get hash() { + if (this._url.fragment === null || this._url.fragment === "") { + return ""; + } + + return "#" + this._url.fragment; + } + + set hash(v) { + if (v === "") { + this._url.fragment = null; + return; + } + + const input = v[0] === "#" ? v.substring(1) : v; + this._url.fragment = ""; + usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" }); + } + + toJSON() { + return this.href; + } +}; diff --git a/node_modules/whatwg-url/lib/URL.js b/node_modules/whatwg-url/lib/URL.js new file mode 100644 index 0000000..78c7207 --- /dev/null +++ b/node_modules/whatwg-url/lib/URL.js @@ -0,0 +1,196 @@ +"use strict"; + +const conversions = require("webidl-conversions"); +const utils = require("./utils.js"); +const Impl = require(".//URL-impl.js"); + +const impl = utils.implSymbol; + +function URL(url) { + if (!this || this[impl] || !(this instanceof URL)) { + throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function."); + } + if (arguments.length < 1) { + throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present."); + } + const args = []; + for (let i = 0; i < arguments.length && i < 2; ++i) { + args[i] = arguments[i]; + } + args[0] = conversions["USVString"](args[0]); + if (args[1] !== undefined) { + args[1] = conversions["USVString"](args[1]); + } + + module.exports.setup(this, args); +} + +URL.prototype.toJSON = function toJSON() { + if (!this || !module.exports.is(this)) { + throw new TypeError("Illegal invocation"); + } + const args = []; + for (let i = 0; i < arguments.length && i < 0; ++i) { + args[i] = arguments[i]; + } + return this[impl].toJSON.apply(this[impl], args); +}; +Object.defineProperty(URL.prototype, "href", { + get() { + return this[impl].href; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].href = V; + }, + enumerable: true, + configurable: true +}); + +URL.prototype.toString = function () { + if (!this || !module.exports.is(this)) { + throw new TypeError("Illegal invocation"); + } + return this.href; +}; + +Object.defineProperty(URL.prototype, "origin", { + get() { + return this[impl].origin; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "protocol", { + get() { + return this[impl].protocol; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].protocol = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "username", { + get() { + return this[impl].username; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].username = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "password", { + get() { + return this[impl].password; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].password = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "host", { + get() { + return this[impl].host; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].host = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "hostname", { + get() { + return this[impl].hostname; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].hostname = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "port", { + get() { + return this[impl].port; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].port = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "pathname", { + get() { + return this[impl].pathname; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].pathname = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "search", { + get() { + return this[impl].search; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].search = V; + }, + enumerable: true, + configurable: true +}); + +Object.defineProperty(URL.prototype, "hash", { + get() { + return this[impl].hash; + }, + set(V) { + V = conversions["USVString"](V); + this[impl].hash = V; + }, + enumerable: true, + configurable: true +}); + + +module.exports = { + is(obj) { + return !!obj && obj[impl] instanceof Impl.implementation; + }, + create(constructorArgs, privateData) { + let obj = Object.create(URL.prototype); + this.setup(obj, constructorArgs, privateData); + return obj; + }, + setup(obj, constructorArgs, privateData) { + if (!privateData) privateData = {}; + privateData.wrapper = obj; + + obj[impl] = new Impl.implementation(constructorArgs, privateData); + obj[impl][utils.wrapperSymbol] = obj; + }, + interface: URL, + expose: { + Window: { URL: URL }, + Worker: { URL: URL } + } +}; + diff --git a/node_modules/whatwg-url/lib/public-api.js b/node_modules/whatwg-url/lib/public-api.js new file mode 100644 index 0000000..932dcad --- /dev/null +++ b/node_modules/whatwg-url/lib/public-api.js @@ -0,0 +1,11 @@ +"use strict"; + +exports.URL = require("./URL").interface; +exports.serializeURL = require("./url-state-machine").serializeURL; +exports.serializeURLOrigin = require("./url-state-machine").serializeURLOrigin; +exports.basicURLParse = require("./url-state-machine").basicURLParse; +exports.setTheUsername = require("./url-state-machine").setTheUsername; +exports.setThePassword = require("./url-state-machine").setThePassword; +exports.serializeHost = require("./url-state-machine").serializeHost; +exports.serializeInteger = require("./url-state-machine").serializeInteger; +exports.parseURL = require("./url-state-machine").parseURL; diff --git a/node_modules/whatwg-url/lib/url-state-machine.js b/node_modules/whatwg-url/lib/url-state-machine.js new file mode 100644 index 0000000..27d977a --- /dev/null +++ b/node_modules/whatwg-url/lib/url-state-machine.js @@ -0,0 +1,1297 @@ +"use strict"; +const punycode = require("punycode"); +const tr46 = require("tr46"); + +const specialSchemes = { + ftp: 21, + file: null, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443 +}; + +const failure = Symbol("failure"); + +function countSymbols(str) { + return punycode.ucs2.decode(str).length; +} + +function at(input, idx) { + const c = input[idx]; + return isNaN(c) ? undefined : String.fromCodePoint(c); +} + +function isASCIIDigit(c) { + return c >= 0x30 && c <= 0x39; +} + +function isASCIIAlpha(c) { + return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); +} + +function isASCIIAlphanumeric(c) { + return isASCIIAlpha(c) || isASCIIDigit(c); +} + +function isASCIIHex(c) { + return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); +} + +function isSingleDot(buffer) { + return buffer === "." || buffer.toLowerCase() === "%2e"; +} + +function isDoubleDot(buffer) { + buffer = buffer.toLowerCase(); + return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; +} + +function isWindowsDriveLetterCodePoints(cp1, cp2) { + return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); +} + +function isWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); +} + +function isNormalizedWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; +} + +function containsForbiddenHostCodePoint(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function containsForbiddenHostCodePointExcludingPercent(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function isSpecialScheme(scheme) { + return specialSchemes[scheme] !== undefined; +} + +function isSpecial(url) { + return isSpecialScheme(url.scheme); +} + +function defaultPort(scheme) { + return specialSchemes[scheme]; +} + +function percentEncode(c) { + let hex = c.toString(16).toUpperCase(); + if (hex.length === 1) { + hex = "0" + hex; + } + + return "%" + hex; +} + +function utf8PercentEncode(c) { + const buf = new Buffer(c); + + let str = ""; + + for (let i = 0; i < buf.length; ++i) { + str += percentEncode(buf[i]); + } + + return str; +} + +function utf8PercentDecode(str) { + const input = new Buffer(str); + const output = []; + for (let i = 0; i < input.length; ++i) { + if (input[i] !== 37) { + output.push(input[i]); + } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { + output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); + i += 2; + } else { + output.push(input[i]); + } + } + return new Buffer(output).toString(); +} + +function isC0ControlPercentEncode(c) { + return c <= 0x1F || c > 0x7E; +} + +const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); +function isPathPercentEncode(c) { + return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); +} + +const extraUserinfoPercentEncodeSet = + new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); +function isUserinfoPercentEncode(c) { + return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); +} + +function percentEncodeChar(c, encodeSetPredicate) { + const cStr = String.fromCodePoint(c); + + if (encodeSetPredicate(c)) { + return utf8PercentEncode(cStr); + } + + return cStr; +} + +function parseIPv4Number(input) { + let R = 10; + + if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { + input = input.substring(2); + R = 16; + } else if (input.length >= 2 && input.charAt(0) === "0") { + input = input.substring(1); + R = 8; + } + + if (input === "") { + return 0; + } + + const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); + if (regex.test(input)) { + return failure; + } + + return parseInt(input, R); +} + +function parseIPv4(input) { + const parts = input.split("."); + if (parts[parts.length - 1] === "") { + if (parts.length > 1) { + parts.pop(); + } + } + + if (parts.length > 4) { + return input; + } + + const numbers = []; + for (const part of parts) { + if (part === "") { + return input; + } + const n = parseIPv4Number(part); + if (n === failure) { + return input; + } + + numbers.push(n); + } + + for (let i = 0; i < numbers.length - 1; ++i) { + if (numbers[i] > 255) { + return failure; + } + } + if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { + return failure; + } + + let ipv4 = numbers.pop(); + let counter = 0; + + for (const n of numbers) { + ipv4 += n * Math.pow(256, 3 - counter); + ++counter; + } + + return ipv4; +} + +function serializeIPv4(address) { + let output = ""; + let n = address; + + for (let i = 1; i <= 4; ++i) { + output = String(n % 256) + output; + if (i !== 4) { + output = "." + output; + } + n = Math.floor(n / 256); + } + + return output; +} + +function parseIPv6(input) { + const address = [0, 0, 0, 0, 0, 0, 0, 0]; + let pieceIndex = 0; + let compress = null; + let pointer = 0; + + input = punycode.ucs2.decode(input); + + if (input[pointer] === 58) { + if (input[pointer + 1] !== 58) { + return failure; + } + + pointer += 2; + ++pieceIndex; + compress = pieceIndex; + } + + while (pointer < input.length) { + if (pieceIndex === 8) { + return failure; + } + + if (input[pointer] === 58) { + if (compress !== null) { + return failure; + } + ++pointer; + ++pieceIndex; + compress = pieceIndex; + continue; + } + + let value = 0; + let length = 0; + + while (length < 4 && isASCIIHex(input[pointer])) { + value = value * 0x10 + parseInt(at(input, pointer), 16); + ++pointer; + ++length; + } + + if (input[pointer] === 46) { + if (length === 0) { + return failure; + } + + pointer -= length; + + if (pieceIndex > 6) { + return failure; + } + + let numbersSeen = 0; + + while (input[pointer] !== undefined) { + let ipv4Piece = null; + + if (numbersSeen > 0) { + if (input[pointer] === 46 && numbersSeen < 4) { + ++pointer; + } else { + return failure; + } + } + + if (!isASCIIDigit(input[pointer])) { + return failure; + } + + while (isASCIIDigit(input[pointer])) { + const number = parseInt(at(input, pointer)); + if (ipv4Piece === null) { + ipv4Piece = number; + } else if (ipv4Piece === 0) { + return failure; + } else { + ipv4Piece = ipv4Piece * 10 + number; + } + if (ipv4Piece > 255) { + return failure; + } + ++pointer; + } + + address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; + + ++numbersSeen; + + if (numbersSeen === 2 || numbersSeen === 4) { + ++pieceIndex; + } + } + + if (numbersSeen !== 4) { + return failure; + } + + break; + } else if (input[pointer] === 58) { + ++pointer; + if (input[pointer] === undefined) { + return failure; + } + } else if (input[pointer] !== undefined) { + return failure; + } + + address[pieceIndex] = value; + ++pieceIndex; + } + + if (compress !== null) { + let swaps = pieceIndex - compress; + pieceIndex = 7; + while (pieceIndex !== 0 && swaps > 0) { + const temp = address[compress + swaps - 1]; + address[compress + swaps - 1] = address[pieceIndex]; + address[pieceIndex] = temp; + --pieceIndex; + --swaps; + } + } else if (compress === null && pieceIndex !== 8) { + return failure; + } + + return address; +} + +function serializeIPv6(address) { + let output = ""; + const seqResult = findLongestZeroSequence(address); + const compress = seqResult.idx; + let ignore0 = false; + + for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { + if (ignore0 && address[pieceIndex] === 0) { + continue; + } else if (ignore0) { + ignore0 = false; + } + + if (compress === pieceIndex) { + const separator = pieceIndex === 0 ? "::" : ":"; + output += separator; + ignore0 = true; + continue; + } + + output += address[pieceIndex].toString(16); + + if (pieceIndex !== 7) { + output += ":"; + } + } + + return output; +} + +function parseHost(input, isSpecialArg) { + if (input[0] === "[") { + if (input[input.length - 1] !== "]") { + return failure; + } + + return parseIPv6(input.substring(1, input.length - 1)); + } + + if (!isSpecialArg) { + return parseOpaqueHost(input); + } + + const domain = utf8PercentDecode(input); + const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); + if (asciiDomain === null) { + return failure; + } + + if (containsForbiddenHostCodePoint(asciiDomain)) { + return failure; + } + + const ipv4Host = parseIPv4(asciiDomain); + if (typeof ipv4Host === "number" || ipv4Host === failure) { + return ipv4Host; + } + + return asciiDomain; +} + +function parseOpaqueHost(input) { + if (containsForbiddenHostCodePointExcludingPercent(input)) { + return failure; + } + + let output = ""; + const decoded = punycode.ucs2.decode(input); + for (let i = 0; i < decoded.length; ++i) { + output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); + } + return output; +} + +function findLongestZeroSequence(arr) { + let maxIdx = null; + let maxLen = 1; // only find elements > 1 + let currStart = null; + let currLen = 0; + + for (let i = 0; i < arr.length; ++i) { + if (arr[i] !== 0) { + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + currStart = null; + currLen = 0; + } else { + if (currStart === null) { + currStart = i; + } + ++currLen; + } + } + + // if trailing zeros + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + return { + idx: maxIdx, + len: maxLen + }; +} + +function serializeHost(host) { + if (typeof host === "number") { + return serializeIPv4(host); + } + + // IPv6 serializer + if (host instanceof Array) { + return "[" + serializeIPv6(host) + "]"; + } + + return host; +} + +function trimControlChars(url) { + return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); +} + +function trimTabAndNewline(url) { + return url.replace(/\u0009|\u000A|\u000D/g, ""); +} + +function shortenPath(url) { + const path = url.path; + if (path.length === 0) { + return; + } + if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { + return; + } + + path.pop(); +} + +function includesCredentials(url) { + return url.username !== "" || url.password !== ""; +} + +function cannotHaveAUsernamePasswordPort(url) { + return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; +} + +function isNormalizedWindowsDriveLetter(string) { + return /^[A-Za-z]:$/.test(string); +} + +function URLStateMachine(input, base, encodingOverride, url, stateOverride) { + this.pointer = 0; + this.input = input; + this.base = base || null; + this.encodingOverride = encodingOverride || "utf-8"; + this.stateOverride = stateOverride; + this.url = url; + this.failure = false; + this.parseError = false; + + if (!this.url) { + this.url = { + scheme: "", + username: "", + password: "", + host: null, + port: null, + path: [], + query: null, + fragment: null, + + cannotBeABaseURL: false + }; + + const res = trimControlChars(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + } + + const res = trimTabAndNewline(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + + this.state = stateOverride || "scheme start"; + + this.buffer = ""; + this.atFlag = false; + this.arrFlag = false; + this.passwordTokenSeenFlag = false; + + this.input = punycode.ucs2.decode(this.input); + + for (; this.pointer <= this.input.length; ++this.pointer) { + const c = this.input[this.pointer]; + const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); + + // exec state machine + const ret = this["parse " + this.state](c, cStr); + if (!ret) { + break; // terminate algorithm + } else if (ret === failure) { + this.failure = true; + break; + } + } +} + +URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { + if (isASCIIAlpha(c)) { + this.buffer += cStr.toLowerCase(); + this.state = "scheme"; + } else if (!this.stateOverride) { + this.state = "no scheme"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { + if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { + this.buffer += cStr.toLowerCase(); + } else if (c === 58) { + if (this.stateOverride) { + if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { + return false; + } + + if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { + return false; + } + + if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { + return false; + } + + if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { + return false; + } + } + this.url.scheme = this.buffer; + this.buffer = ""; + if (this.stateOverride) { + return false; + } + if (this.url.scheme === "file") { + if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { + this.parseError = true; + } + this.state = "file"; + } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { + this.state = "special relative or authority"; + } else if (isSpecial(this.url)) { + this.state = "special authority slashes"; + } else if (this.input[this.pointer + 1] === 47) { + this.state = "path or authority"; + ++this.pointer; + } else { + this.url.cannotBeABaseURL = true; + this.url.path.push(""); + this.state = "cannot-be-a-base-URL path"; + } + } else if (!this.stateOverride) { + this.buffer = ""; + this.state = "no scheme"; + this.pointer = -1; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { + if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { + return failure; + } else if (this.base.cannotBeABaseURL && c === 35) { + this.url.scheme = this.base.scheme; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.url.cannotBeABaseURL = true; + this.state = "fragment"; + } else if (this.base.scheme === "file") { + this.state = "file"; + --this.pointer; + } else { + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { + if (c === 47) { + this.state = "authority"; + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative"] = function parseRelative(c) { + this.url.scheme = this.base.scheme; + if (isNaN(c)) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 47) { + this.state = "relative slash"; + } else if (c === 63) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else if (isSpecial(this.url) && c === 92) { + this.parseError = true; + this.state = "relative slash"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(0, this.base.path.length - 1); + + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { + if (isSpecial(this.url) && (c === 47 || c === 92)) { + if (c === 92) { + this.parseError = true; + } + this.state = "special authority ignore slashes"; + } else if (c === 47) { + this.state = "authority"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "special authority ignore slashes"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { + if (c !== 47 && c !== 92) { + this.state = "authority"; + --this.pointer; + } else { + this.parseError = true; + } + + return true; +}; + +URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { + if (c === 64) { + this.parseError = true; + if (this.atFlag) { + this.buffer = "%40" + this.buffer; + } + this.atFlag = true; + + // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars + const len = countSymbols(this.buffer); + for (let pointer = 0; pointer < len; ++pointer) { + const codePoint = this.buffer.codePointAt(pointer); + + if (codePoint === 58 && !this.passwordTokenSeenFlag) { + this.passwordTokenSeenFlag = true; + continue; + } + const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); + if (this.passwordTokenSeenFlag) { + this.url.password += encodedCodePoints; + } else { + this.url.username += encodedCodePoints; + } + } + this.buffer = ""; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + if (this.atFlag && this.buffer === "") { + this.parseError = true; + return failure; + } + this.pointer -= countSymbols(this.buffer) + 1; + this.buffer = ""; + this.state = "host"; + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse hostname"] = +URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { + if (this.stateOverride && this.url.scheme === "file") { + --this.pointer; + this.state = "file host"; + } else if (c === 58 && !this.arrFlag) { + if (this.buffer === "") { + this.parseError = true; + return failure; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "port"; + if (this.stateOverride === "hostname") { + return false; + } + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + --this.pointer; + if (isSpecial(this.url) && this.buffer === "") { + this.parseError = true; + return failure; + } else if (this.stateOverride && this.buffer === "" && + (includesCredentials(this.url) || this.url.port !== null)) { + this.parseError = true; + return false; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "path start"; + if (this.stateOverride) { + return false; + } + } else { + if (c === 91) { + this.arrFlag = true; + } else if (c === 93) { + this.arrFlag = false; + } + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { + if (isASCIIDigit(c)) { + this.buffer += cStr; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92) || + this.stateOverride) { + if (this.buffer !== "") { + const port = parseInt(this.buffer); + if (port > Math.pow(2, 16) - 1) { + this.parseError = true; + return failure; + } + this.url.port = port === defaultPort(this.url.scheme) ? null : port; + this.buffer = ""; + } + if (this.stateOverride) { + return false; + } + this.state = "path start"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); + +URLStateMachine.prototype["parse file"] = function parseFile(c) { + this.url.scheme = "file"; + + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file slash"; + } else if (this.base !== null && this.base.scheme === "file") { + if (isNaN(c)) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 63) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else { + if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points + !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || + (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points + !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + shortenPath(this.url); + } else { + this.parseError = true; + } + + this.state = "path"; + --this.pointer; + } + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file host"; + } else { + if (this.base !== null && this.base.scheme === "file") { + if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { + this.url.path.push(this.base.path[0]); + } else { + this.url.host = this.base.host; + } + } + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { + if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { + --this.pointer; + if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { + this.parseError = true; + this.state = "path"; + } else if (this.buffer === "") { + this.url.host = ""; + if (this.stateOverride) { + return false; + } + this.state = "path start"; + } else { + let host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + if (host === "localhost") { + host = ""; + } + this.url.host = host; + + if (this.stateOverride) { + return false; + } + + this.buffer = ""; + this.state = "path start"; + } + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { + if (isSpecial(this.url)) { + if (c === 92) { + this.parseError = true; + } + this.state = "path"; + + if (c !== 47 && c !== 92) { + --this.pointer; + } + } else if (!this.stateOverride && c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (!this.stateOverride && c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else if (c !== undefined) { + this.state = "path"; + if (c !== 47) { + --this.pointer; + } + } + + return true; +}; + +URLStateMachine.prototype["parse path"] = function parsePath(c) { + if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || + (!this.stateOverride && (c === 63 || c === 35))) { + if (isSpecial(this.url) && c === 92) { + this.parseError = true; + } + + if (isDoubleDot(this.buffer)) { + shortenPath(this.url); + if (c !== 47 && !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } + } else if (isSingleDot(this.buffer) && c !== 47 && + !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } else if (!isSingleDot(this.buffer)) { + if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { + if (this.url.host !== "" && this.url.host !== null) { + this.parseError = true; + this.url.host = ""; + } + this.buffer = this.buffer[0] + ":"; + } + this.url.path.push(this.buffer); + } + this.buffer = ""; + if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { + while (this.url.path.length > 1 && this.url.path[0] === "") { + this.parseError = true; + this.url.path.shift(); + } + } + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += percentEncodeChar(c, isPathPercentEncode); + } + + return true; +}; + +URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else { + // TODO: Add: not a URL code point + if (!isNaN(c) && c !== 37) { + this.parseError = true; + } + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + if (!isNaN(c)) { + this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); + } + } + + return true; +}; + +URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { + if (isNaN(c) || (!this.stateOverride && c === 35)) { + if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { + this.encodingOverride = "utf-8"; + } + + const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead + for (let i = 0; i < buffer.length; ++i) { + if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || + buffer[i] === 0x3C || buffer[i] === 0x3E) { + this.url.query += percentEncode(buffer[i]); + } else { + this.url.query += String.fromCodePoint(buffer[i]); + } + } + + this.buffer = ""; + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { + if (isNaN(c)) { // do nothing + } else if (c === 0x0) { + this.parseError = true; + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); + } + + return true; +}; + +function serializeURL(url, excludeFragment) { + let output = url.scheme + ":"; + if (url.host !== null) { + output += "//"; + + if (url.username !== "" || url.password !== "") { + output += url.username; + if (url.password !== "") { + output += ":" + url.password; + } + output += "@"; + } + + output += serializeHost(url.host); + + if (url.port !== null) { + output += ":" + url.port; + } + } else if (url.host === null && url.scheme === "file") { + output += "//"; + } + + if (url.cannotBeABaseURL) { + output += url.path[0]; + } else { + for (const string of url.path) { + output += "/" + string; + } + } + + if (url.query !== null) { + output += "?" + url.query; + } + + if (!excludeFragment && url.fragment !== null) { + output += "#" + url.fragment; + } + + return output; +} + +function serializeOrigin(tuple) { + let result = tuple.scheme + "://"; + result += serializeHost(tuple.host); + + if (tuple.port !== null) { + result += ":" + tuple.port; + } + + return result; +} + +module.exports.serializeURL = serializeURL; + +module.exports.serializeURLOrigin = function (url) { + // https://url.spec.whatwg.org/#concept-url-origin + switch (url.scheme) { + case "blob": + try { + return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); + } catch (e) { + // serializing an opaque origin returns "null" + return "null"; + } + case "ftp": + case "gopher": + case "http": + case "https": + case "ws": + case "wss": + return serializeOrigin({ + scheme: url.scheme, + host: url.host, + port: url.port + }); + case "file": + // spec says "exercise to the reader", chrome says "file://" + return "file://"; + default: + // serializing an opaque origin returns "null" + return "null"; + } +}; + +module.exports.basicURLParse = function (input, options) { + if (options === undefined) { + options = {}; + } + + const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); + if (usm.failure) { + return "failure"; + } + + return usm.url; +}; + +module.exports.setTheUsername = function (url, username) { + url.username = ""; + const decoded = punycode.ucs2.decode(username); + for (let i = 0; i < decoded.length; ++i) { + url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.setThePassword = function (url, password) { + url.password = ""; + const decoded = punycode.ucs2.decode(password); + for (let i = 0; i < decoded.length; ++i) { + url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.serializeHost = serializeHost; + +module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; + +module.exports.serializeInteger = function (integer) { + return String(integer); +}; + +module.exports.parseURL = function (input, options) { + if (options === undefined) { + options = {}; + } + + // We don't handle blobs, so this just delegates: + return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); +}; diff --git a/node_modules/whatwg-url/lib/utils.js b/node_modules/whatwg-url/lib/utils.js new file mode 100644 index 0000000..a562009 --- /dev/null +++ b/node_modules/whatwg-url/lib/utils.js @@ -0,0 +1,20 @@ +"use strict"; + +module.exports.mixin = function mixin(target, source) { + const keys = Object.getOwnPropertyNames(source); + for (let i = 0; i < keys.length; ++i) { + Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); + } +}; + +module.exports.wrapperSymbol = Symbol("wrapper"); +module.exports.implSymbol = Symbol("impl"); + +module.exports.wrapperForImpl = function (impl) { + return impl[module.exports.wrapperSymbol]; +}; + +module.exports.implForWrapper = function (wrapper) { + return wrapper[module.exports.implSymbol]; +}; + diff --git a/node_modules/whatwg-url/package.json b/node_modules/whatwg-url/package.json new file mode 100644 index 0000000..fce35ae --- /dev/null +++ b/node_modules/whatwg-url/package.json @@ -0,0 +1,32 @@ +{ + "name": "whatwg-url", + "version": "5.0.0", + "description": "An implementation of the WHATWG URL Standard's URL API and parsing machinery", + "main": "lib/public-api.js", + "files": [ + "lib/" + ], + "author": "Sebastian Mayr ", + "license": "MIT", + "repository": "jsdom/whatwg-url", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + }, + "devDependencies": { + "eslint": "^2.6.0", + "istanbul": "~0.4.3", + "mocha": "^2.2.4", + "recast": "~0.10.29", + "request": "^2.55.0", + "webidl2js": "^3.0.2" + }, + "scripts": { + "build": "node scripts/transform.js && node scripts/convert-idl.js", + "coverage": "istanbul cover node_modules/mocha/bin/_mocha", + "lint": "eslint .", + "prepublish": "npm run build", + "pretest": "node scripts/get-latest-platform-tests.js && npm run build", + "test": "mocha" + } +} diff --git a/node_modules/wide-align/LICENSE b/node_modules/wide-align/LICENSE new file mode 100644 index 0000000..f4be44d --- /dev/null +++ b/node_modules/wide-align/LICENSE @@ -0,0 +1,14 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF +OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + diff --git a/node_modules/wide-align/README.md b/node_modules/wide-align/README.md new file mode 100644 index 0000000..32f1be0 --- /dev/null +++ b/node_modules/wide-align/README.md @@ -0,0 +1,47 @@ +wide-align +---------- + +A wide-character aware text alignment function for use in terminals / on the +console. + +### Usage + +``` +var align = require('wide-align') + +// Note that if you view this on a unicode console, all of the slashes are +// aligned. This is because on a console, all narrow characters are +// an en wide and all wide characters are an em. In browsers, this isn't +// held to and wide characters like "古" can be less than two narrow +// characters even with a fixed width font. + +console.log(align.center('abc', 10)) // ' abc ' +console.log(align.center('古古古', 10)) // ' 古古古 ' +console.log(align.left('abc', 10)) // 'abc ' +console.log(align.left('古古古', 10)) // '古古古 ' +console.log(align.right('abc', 10)) // ' abc' +console.log(align.right('古古古', 10)) // ' 古古古' +``` + +### Functions + +#### `align.center(str, length)` → `str` + +Returns *str* with spaces added to both sides such that that it is *length* +chars long and centered in the spaces. + +#### `align.left(str, length)` → `str` + +Returns *str* with spaces to the right such that it is *length* chars long. + +### `align.right(str, length)` → `str` + +Returns *str* with spaces to the left such that it is *length* chars long. + +### Origins + +These functions were originally taken from +[cliui](https://npmjs.com/package/cliui). Changes include switching to the +MUCH faster pad generation function from +[lodash](https://npmjs.com/package/lodash), making center alignment pad +both sides and adding left alignment. diff --git a/node_modules/wide-align/align.js b/node_modules/wide-align/align.js new file mode 100644 index 0000000..4f94ca4 --- /dev/null +++ b/node_modules/wide-align/align.js @@ -0,0 +1,65 @@ +'use strict' +var stringWidth = require('string-width') + +exports.center = alignCenter +exports.left = alignLeft +exports.right = alignRight + +// lodash's way of generating pad characters. + +function createPadding (width) { + var result = '' + var string = ' ' + var n = width + do { + if (n % 2) { + result += string; + } + n = Math.floor(n / 2); + string += string; + } while (n); + + return result; +} + +function alignLeft (str, width) { + var trimmed = str.trimRight() + if (trimmed.length === 0 && str.length >= width) return str + var padding = '' + var strWidth = stringWidth(trimmed) + + if (strWidth < width) { + padding = createPadding(width - strWidth) + } + + return trimmed + padding +} + +function alignRight (str, width) { + var trimmed = str.trimLeft() + if (trimmed.length === 0 && str.length >= width) return str + var padding = '' + var strWidth = stringWidth(trimmed) + + if (strWidth < width) { + padding = createPadding(width - strWidth) + } + + return padding + trimmed +} + +function alignCenter (str, width) { + var trimmed = str.trim() + if (trimmed.length === 0 && str.length >= width) return str + var padLeft = '' + var padRight = '' + var strWidth = stringWidth(trimmed) + + if (strWidth < width) { + var padLeftBy = parseInt((width - strWidth) / 2, 10) + padLeft = createPadding(padLeftBy) + padRight = createPadding(width - (strWidth + padLeftBy)) + } + + return padLeft + trimmed + padRight +} diff --git a/node_modules/wide-align/package.json b/node_modules/wide-align/package.json new file mode 100644 index 0000000..2dd2707 --- /dev/null +++ b/node_modules/wide-align/package.json @@ -0,0 +1,33 @@ +{ + "name": "wide-align", + "version": "1.1.5", + "description": "A wide-character aware text alignment function for use on the console or with fixed width fonts.", + "main": "align.js", + "scripts": { + "test": "tap --coverage test/*.js" + }, + "keywords": [ + "wide", + "double", + "unicode", + "cjkv", + "pad", + "align" + ], + "author": "Rebecca Turner (http://re-becca.org/)", + "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/iarna/wide-align" + }, + "//": "But not version 5 of string-width, as that's ESM only", + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" + }, + "devDependencies": { + "tap": "*" + }, + "files": [ + "align.js" + ] +} diff --git a/node_modules/wrappy/LICENSE b/node_modules/wrappy/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/wrappy/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/wrappy/README.md b/node_modules/wrappy/README.md new file mode 100644 index 0000000..98eab25 --- /dev/null +++ b/node_modules/wrappy/README.md @@ -0,0 +1,36 @@ +# wrappy + +Callback wrapping utility + +## USAGE + +```javascript +var wrappy = require("wrappy") + +// var wrapper = wrappy(wrapperFunction) + +// make sure a cb is called only once +// See also: http://npm.im/once for this specific use case +var once = wrappy(function (cb) { + var called = false + return function () { + if (called) return + called = true + return cb.apply(this, arguments) + } +}) + +function printBoo () { + console.log('boo') +} +// has some rando property +printBoo.iAmBooPrinter = true + +var onlyPrintOnce = once(printBoo) + +onlyPrintOnce() // prints 'boo' +onlyPrintOnce() // does nothing + +// random property is retained! +assert.equal(onlyPrintOnce.iAmBooPrinter, true) +``` diff --git a/node_modules/wrappy/package.json b/node_modules/wrappy/package.json new file mode 100644 index 0000000..1307520 --- /dev/null +++ b/node_modules/wrappy/package.json @@ -0,0 +1,29 @@ +{ + "name": "wrappy", + "version": "1.0.2", + "description": "Callback wrapping utility", + "main": "wrappy.js", + "files": [ + "wrappy.js" + ], + "directories": { + "test": "test" + }, + "dependencies": {}, + "devDependencies": { + "tap": "^2.3.1" + }, + "scripts": { + "test": "tap --coverage test/*.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/wrappy" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/wrappy/issues" + }, + "homepage": "https://github.com/npm/wrappy" +} diff --git a/node_modules/wrappy/wrappy.js b/node_modules/wrappy/wrappy.js new file mode 100644 index 0000000..bb7e7d6 --- /dev/null +++ b/node_modules/wrappy/wrappy.js @@ -0,0 +1,33 @@ +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} diff --git a/node_modules/yallist/LICENSE b/node_modules/yallist/LICENSE new file mode 100644 index 0000000..19129e3 --- /dev/null +++ b/node_modules/yallist/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/yallist/README.md b/node_modules/yallist/README.md new file mode 100644 index 0000000..f586101 --- /dev/null +++ b/node_modules/yallist/README.md @@ -0,0 +1,204 @@ +# yallist + +Yet Another Linked List + +There are many doubly-linked list implementations like it, but this +one is mine. + +For when an array would be too big, and a Map can't be iterated in +reverse order. + + +[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist) + +## basic usage + +```javascript +var yallist = require('yallist') +var myList = yallist.create([1, 2, 3]) +myList.push('foo') +myList.unshift('bar') +// of course pop() and shift() are there, too +console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo'] +myList.forEach(function (k) { + // walk the list head to tail +}) +myList.forEachReverse(function (k, index, list) { + // walk the list tail to head +}) +var myDoubledList = myList.map(function (k) { + return k + k +}) +// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo'] +// mapReverse is also a thing +var myDoubledListReverse = myList.mapReverse(function (k) { + return k + k +}) // ['foofoo', 6, 4, 2, 'barbar'] + +var reduced = myList.reduce(function (set, entry) { + set += entry + return set +}, 'start') +console.log(reduced) // 'startfoo123bar' +``` + +## api + +The whole API is considered "public". + +Functions with the same name as an Array method work more or less the +same way. + +There's reverse versions of most things because that's the point. + +### Yallist + +Default export, the class that holds and manages a list. + +Call it with either a forEach-able (like an array) or a set of +arguments, to initialize the list. + +The Array-ish methods all act like you'd expect. No magic length, +though, so if you change that it won't automatically prune or add +empty spots. + +### Yallist.create(..) + +Alias for Yallist function. Some people like factories. + +#### yallist.head + +The first node in the list + +#### yallist.tail + +The last node in the list + +#### yallist.length + +The number of nodes in the list. (Change this at your peril. It is +not magic like Array length.) + +#### yallist.toArray() + +Convert the list to an array. + +#### yallist.forEach(fn, [thisp]) + +Call a function on each item in the list. + +#### yallist.forEachReverse(fn, [thisp]) + +Call a function on each item in the list, in reverse order. + +#### yallist.get(n) + +Get the data at position `n` in the list. If you use this a lot, +probably better off just using an Array. + +#### yallist.getReverse(n) + +Get the data at position `n`, counting from the tail. + +#### yallist.map(fn, thisp) + +Create a new Yallist with the result of calling the function on each +item. + +#### yallist.mapReverse(fn, thisp) + +Same as `map`, but in reverse. + +#### yallist.pop() + +Get the data from the list tail, and remove the tail from the list. + +#### yallist.push(item, ...) + +Insert one or more items to the tail of the list. + +#### yallist.reduce(fn, initialValue) + +Like Array.reduce. + +#### yallist.reduceReverse + +Like Array.reduce, but in reverse. + +#### yallist.reverse + +Reverse the list in place. + +#### yallist.shift() + +Get the data from the list head, and remove the head from the list. + +#### yallist.slice([from], [to]) + +Just like Array.slice, but returns a new Yallist. + +#### yallist.sliceReverse([from], [to]) + +Just like yallist.slice, but the result is returned in reverse. + +#### yallist.toArray() + +Create an array representation of the list. + +#### yallist.toArrayReverse() + +Create a reversed array representation of the list. + +#### yallist.unshift(item, ...) + +Insert one or more items to the head of the list. + +#### yallist.unshiftNode(node) + +Move a Node object to the front of the list. (That is, pull it out of +wherever it lives, and make it the new head.) + +If the node belongs to a different list, then that list will remove it +first. + +#### yallist.pushNode(node) + +Move a Node object to the end of the list. (That is, pull it out of +wherever it lives, and make it the new tail.) + +If the node belongs to a list already, then that list will remove it +first. + +#### yallist.removeNode(node) + +Remove a node from the list, preserving referential integrity of head +and tail and other nodes. + +Will throw an error if you try to have a list remove a node that +doesn't belong to it. + +### Yallist.Node + +The class that holds the data and is actually the list. + +Call with `var n = new Node(value, previousNode, nextNode)` + +Note that if you do direct operations on Nodes themselves, it's very +easy to get into weird states where the list is broken. Be careful :) + +#### node.next + +The next node in the list. + +#### node.prev + +The previous node in the list. + +#### node.value + +The data the node contains. + +#### node.list + +The list to which this node belongs. (Null if it does not belong to +any list.) diff --git a/node_modules/yallist/iterator.js b/node_modules/yallist/iterator.js new file mode 100644 index 0000000..d41c97a --- /dev/null +++ b/node_modules/yallist/iterator.js @@ -0,0 +1,8 @@ +'use strict' +module.exports = function (Yallist) { + Yallist.prototype[Symbol.iterator] = function* () { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value + } + } +} diff --git a/node_modules/yallist/package.json b/node_modules/yallist/package.json new file mode 100644 index 0000000..8a08386 --- /dev/null +++ b/node_modules/yallist/package.json @@ -0,0 +1,29 @@ +{ + "name": "yallist", + "version": "4.0.0", + "description": "Yet Another Linked List", + "main": "yallist.js", + "directories": { + "test": "test" + }, + "files": [ + "yallist.js", + "iterator.js" + ], + "dependencies": {}, + "devDependencies": { + "tap": "^12.1.0" + }, + "scripts": { + "test": "tap test/*.js --100", + "preversion": "npm test", + "postversion": "npm publish", + "postpublish": "git push origin --all; git push origin --tags" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/yallist.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC" +} diff --git a/node_modules/yallist/yallist.js b/node_modules/yallist/yallist.js new file mode 100644 index 0000000..4e83ab1 --- /dev/null +++ b/node_modules/yallist/yallist.js @@ -0,0 +1,426 @@ +'use strict' +module.exports = Yallist + +Yallist.Node = Node +Yallist.create = Yallist + +function Yallist (list) { + var self = this + if (!(self instanceof Yallist)) { + self = new Yallist() + } + + self.tail = null + self.head = null + self.length = 0 + + if (list && typeof list.forEach === 'function') { + list.forEach(function (item) { + self.push(item) + }) + } else if (arguments.length > 0) { + for (var i = 0, l = arguments.length; i < l; i++) { + self.push(arguments[i]) + } + } + + return self +} + +Yallist.prototype.removeNode = function (node) { + if (node.list !== this) { + throw new Error('removing node which does not belong to this list') + } + + var next = node.next + var prev = node.prev + + if (next) { + next.prev = prev + } + + if (prev) { + prev.next = next + } + + if (node === this.head) { + this.head = next + } + if (node === this.tail) { + this.tail = prev + } + + node.list.length-- + node.next = null + node.prev = null + node.list = null + + return next +} + +Yallist.prototype.unshiftNode = function (node) { + if (node === this.head) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var head = this.head + node.list = this + node.next = head + if (head) { + head.prev = node + } + + this.head = node + if (!this.tail) { + this.tail = node + } + this.length++ +} + +Yallist.prototype.pushNode = function (node) { + if (node === this.tail) { + return + } + + if (node.list) { + node.list.removeNode(node) + } + + var tail = this.tail + node.list = this + node.prev = tail + if (tail) { + tail.next = node + } + + this.tail = node + if (!this.head) { + this.head = node + } + this.length++ +} + +Yallist.prototype.push = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + push(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.unshift = function () { + for (var i = 0, l = arguments.length; i < l; i++) { + unshift(this, arguments[i]) + } + return this.length +} + +Yallist.prototype.pop = function () { + if (!this.tail) { + return undefined + } + + var res = this.tail.value + this.tail = this.tail.prev + if (this.tail) { + this.tail.next = null + } else { + this.head = null + } + this.length-- + return res +} + +Yallist.prototype.shift = function () { + if (!this.head) { + return undefined + } + + var res = this.head.value + this.head = this.head.next + if (this.head) { + this.head.prev = null + } else { + this.tail = null + } + this.length-- + return res +} + +Yallist.prototype.forEach = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.head, i = 0; walker !== null; i++) { + fn.call(thisp, walker.value, i, this) + walker = walker.next + } +} + +Yallist.prototype.forEachReverse = function (fn, thisp) { + thisp = thisp || this + for (var walker = this.tail, i = this.length - 1; walker !== null; i--) { + fn.call(thisp, walker.value, i, this) + walker = walker.prev + } +} + +Yallist.prototype.get = function (n) { + for (var i = 0, walker = this.head; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.next + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.getReverse = function (n) { + for (var i = 0, walker = this.tail; walker !== null && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.prev + } + if (i === n && walker !== null) { + return walker.value + } +} + +Yallist.prototype.map = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.head; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.next + } + return res +} + +Yallist.prototype.mapReverse = function (fn, thisp) { + thisp = thisp || this + var res = new Yallist() + for (var walker = this.tail; walker !== null;) { + res.push(fn.call(thisp, walker.value, this)) + walker = walker.prev + } + return res +} + +Yallist.prototype.reduce = function (fn, initial) { + var acc + var walker = this.head + if (arguments.length > 1) { + acc = initial + } else if (this.head) { + walker = this.head.next + acc = this.head.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = 0; walker !== null; i++) { + acc = fn(acc, walker.value, i) + walker = walker.next + } + + return acc +} + +Yallist.prototype.reduceReverse = function (fn, initial) { + var acc + var walker = this.tail + if (arguments.length > 1) { + acc = initial + } else if (this.tail) { + walker = this.tail.prev + acc = this.tail.value + } else { + throw new TypeError('Reduce of empty list with no initial value') + } + + for (var i = this.length - 1; walker !== null; i--) { + acc = fn(acc, walker.value, i) + walker = walker.prev + } + + return acc +} + +Yallist.prototype.toArray = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.head; walker !== null; i++) { + arr[i] = walker.value + walker = walker.next + } + return arr +} + +Yallist.prototype.toArrayReverse = function () { + var arr = new Array(this.length) + for (var i = 0, walker = this.tail; walker !== null; i++) { + arr[i] = walker.value + walker = walker.prev + } + return arr +} + +Yallist.prototype.slice = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = 0, walker = this.head; walker !== null && i < from; i++) { + walker = walker.next + } + for (; walker !== null && i < to; i++, walker = walker.next) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.sliceReverse = function (from, to) { + to = to || this.length + if (to < 0) { + to += this.length + } + from = from || 0 + if (from < 0) { + from += this.length + } + var ret = new Yallist() + if (to < from || to < 0) { + return ret + } + if (from < 0) { + from = 0 + } + if (to > this.length) { + to = this.length + } + for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) { + walker = walker.prev + } + for (; walker !== null && i > from; i--, walker = walker.prev) { + ret.push(walker.value) + } + return ret +} + +Yallist.prototype.splice = function (start, deleteCount, ...nodes) { + if (start > this.length) { + start = this.length - 1 + } + if (start < 0) { + start = this.length + start; + } + + for (var i = 0, walker = this.head; walker !== null && i < start; i++) { + walker = walker.next + } + + var ret = [] + for (var i = 0; walker && i < deleteCount; i++) { + ret.push(walker.value) + walker = this.removeNode(walker) + } + if (walker === null) { + walker = this.tail + } + + if (walker !== this.head && walker !== this.tail) { + walker = walker.prev + } + + for (var i = 0; i < nodes.length; i++) { + walker = insert(this, walker, nodes[i]) + } + return ret; +} + +Yallist.prototype.reverse = function () { + var head = this.head + var tail = this.tail + for (var walker = head; walker !== null; walker = walker.prev) { + var p = walker.prev + walker.prev = walker.next + walker.next = p + } + this.head = tail + this.tail = head + return this +} + +function insert (self, node, value) { + var inserted = node === self.head ? + new Node(value, null, node, self) : + new Node(value, node, node.next, self) + + if (inserted.next === null) { + self.tail = inserted + } + if (inserted.prev === null) { + self.head = inserted + } + + self.length++ + + return inserted +} + +function push (self, item) { + self.tail = new Node(item, self.tail, null, self) + if (!self.head) { + self.head = self.tail + } + self.length++ +} + +function unshift (self, item) { + self.head = new Node(item, null, self.head, self) + if (!self.tail) { + self.tail = self.head + } + self.length++ +} + +function Node (value, prev, next, list) { + if (!(this instanceof Node)) { + return new Node(value, prev, next, list) + } + + this.list = list + this.value = value + + if (prev) { + prev.next = this + this.prev = prev + } else { + this.prev = null + } + + if (next) { + next.prev = this + this.next = next + } else { + this.next = null + } +} + +try { + // add if support for Symbol.iterator is present + require('./iterator.js')(Yallist) +} catch (er) {} diff --git a/package-lock.json b/package-lock.json index 72e6c9e..2ffc729 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,10 +8,93 @@ "name": "star-wars-hearthstone", "version": "1.0.0", "dependencies": { + "bcrypt": "^5.1.1", + "connect-redis": "^7.1.0", "express": "^4.18.2", + "express-session": "^1.17.3", + "redis": "^4.6.12", "socket.io": "^4.7.2" } }, + "node_modules/@mapbox/node-pre-gyp": { + "version": "1.0.11", + "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz", + "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==", + "license": "BSD-3-Clause", + "dependencies": { + "detect-libc": "^2.0.0", + "https-proxy-agent": "^5.0.0", + "make-dir": "^3.1.0", + "node-fetch": "^2.6.7", + "nopt": "^5.0.0", + "npmlog": "^5.0.1", + "rimraf": "^3.0.2", + "semver": "^7.3.5", + "tar": "^6.1.11" + }, + "bin": { + "node-pre-gyp": "bin/node-pre-gyp" + } + }, + "node_modules/@redis/bloom": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@redis/bloom/-/bloom-1.2.0.tgz", + "integrity": "sha512-HG2DFjYKbpNmVXsa0keLHp/3leGJz1mjh09f2RLGGLQZzSHpkmZWuwJbAvo3QcRY8p80m5+ZdXZdYOSBLlp7Cg==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/client": { + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/@redis/client/-/client-1.6.1.tgz", + "integrity": "sha512-/KCsg3xSlR+nCK8/8ZYSknYxvXHwubJrU82F3Lm1Fp6789VQ0/3RJKfsmRXjqfaTA++23CvC3hqmqe/2GEt6Kw==", + "license": "MIT", + "dependencies": { + "cluster-key-slot": "1.1.2", + "generic-pool": "3.9.0", + "yallist": "4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@redis/graph": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@redis/graph/-/graph-1.1.1.tgz", + "integrity": "sha512-FEMTcTHZozZciLRl6GiiIB4zGm5z5F3F6a6FZCyrfxdKOhFlGkiAqlexWMBzCi4DcRoyiOsuLfW+cjlGWyExOw==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/json": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@redis/json/-/json-1.0.7.tgz", + "integrity": "sha512-6UyXfjVaTBTJtKNG4/9Z8PSpKE6XgSyEb8iwaqDcy+uKrd/DGYHTWkUdnQDyzm727V7p21WUMhsqz5oy65kPcQ==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/search": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@redis/search/-/search-1.2.0.tgz", + "integrity": "sha512-tYoDBbtqOVigEDMAcTGsRlMycIIjwMCgD8eR2t0NANeQmgK/lvxNAvYyb6bZDD4frHRhIHkJu2TBRvB0ERkOmw==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/time-series": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@redis/time-series/-/time-series-1.1.0.tgz", + "integrity": "sha512-c1Q99M5ljsIuc4YdaCwfUEXsofakb9c8+Zse2qxTadu8TalLXuAESzLvFAvNVbkmSlvlzIQOLpBCmWI9wTOt+g==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, "node_modules/@socket.io/component-emitter": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/@socket.io/component-emitter/-/component-emitter-3.1.2.tgz", @@ -36,6 +119,12 @@ "undici-types": "~7.16.0" } }, + "node_modules/abbrev": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", + "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", + "license": "ISC" + }, "node_modules/accepts": { "version": "1.3.8", "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", @@ -49,12 +138,82 @@ "node": ">= 0.6" } }, + "node_modules/agent-base": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", + "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", + "license": "MIT", + "dependencies": { + "debug": "4" + }, + "engines": { + "node": ">= 6.0.0" + } + }, + "node_modules/agent-base/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/agent-base/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/aproba": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.1.0.tgz", + "integrity": "sha512-tLIEcj5GuR2RSTnxNKdkK0dJ/GrC7P38sUkiDmDuHfsHmbagTFAxDVIBltoklXEVIQ/f14IL8IMJ5pn9Hez1Ew==", + "license": "ISC" + }, + "node_modules/are-we-there-yet": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", + "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "dependencies": { + "delegates": "^1.0.0", + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==", "license": "MIT" }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, "node_modules/base64id": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/base64id/-/base64id-2.0.0.tgz", @@ -64,6 +223,20 @@ "node": "^4.5.0 || >= 5.9" } }, + "node_modules/bcrypt": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-5.1.1.tgz", + "integrity": "sha512-AGBHOG5hPYZ5Xl9KXzU5iKq9516yEmvCKDg3ecP5kX2aB6UqTeXZxk2ELnDgDm6BQSMlLt9rDB4LoSMx0rYwww==", + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "@mapbox/node-pre-gyp": "^1.0.11", + "node-addon-api": "^5.0.0" + }, + "engines": { + "node": ">= 10.0.0" + } + }, "node_modules/body-parser": { "version": "1.20.4", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz", @@ -88,6 +261,16 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, "node_modules/bytes": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", @@ -126,6 +309,57 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/cluster-key-slot": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", + "integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==", + "license": "Apache-2.0", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/color-support": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", + "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", + "license": "ISC", + "bin": { + "color-support": "bin.js" + } + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "license": "MIT" + }, + "node_modules/connect-redis": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/connect-redis/-/connect-redis-7.1.1.tgz", + "integrity": "sha512-M+z7alnCJiuzKa8/1qAYdGUXHYfDnLolOGAUjOioB07pP39qxjG+X9ibsud7qUBc4jMV5Mcy3ugGv8eFcgamJQ==", + "license": "MIT", + "engines": { + "node": ">=16" + }, + "peerDependencies": { + "express-session": ">=1" + } + }, + "node_modules/console-control-strings": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", + "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", + "license": "ISC" + }, "node_modules/content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", @@ -188,6 +422,12 @@ "ms": "2.0.0" } }, + "node_modules/delegates": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", + "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", + "license": "MIT" + }, "node_modules/depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -207,6 +447,15 @@ "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/detect-libc": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.1.2.tgz", + "integrity": "sha512-Btj2BOOO83o3WyH59e8MgXsxEQVcarkUOpEYrubB0urwnN10yQ364rsiByU11nZlqWYZm05i/of7io4mzihBtQ==", + "license": "Apache-2.0", + "engines": { + "node": ">=8" + } + }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -227,6 +476,12 @@ "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==", "license": "MIT" }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "license": "MIT" + }, "node_modules/encodeurl": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", @@ -379,6 +634,29 @@ "url": "https://opencollective.com/express" } }, + "node_modules/express-session": { + "version": "1.19.0", + "resolved": "https://registry.npmjs.org/express-session/-/express-session-1.19.0.tgz", + "integrity": "sha512-0csaMkGq+vaiZTmSMMGkfdCOabYv192VbytFypcvI0MANrp+4i/7yEkJ0sbAEhycQjntaKGzYfjfXQyVb7BHMA==", + "license": "MIT", + "dependencies": { + "cookie": "~0.7.2", + "cookie-signature": "~1.0.7", + "debug": "~2.6.9", + "depd": "~2.0.0", + "on-headers": "~1.1.0", + "parseurl": "~1.3.3", + "safe-buffer": "~5.2.1", + "uid-safe": "~2.1.5" + }, + "engines": { + "node": ">= 0.8.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, "node_modules/finalhandler": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.2.tgz", @@ -415,6 +693,36 @@ "node": ">= 0.6" } }, + "node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/fs-minipass/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "license": "ISC" + }, "node_modules/function-bind": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", @@ -424,6 +732,36 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/gauge": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", + "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "dependencies": { + "aproba": "^1.0.3 || ^2.0.0", + "color-support": "^1.1.2", + "console-control-strings": "^1.0.0", + "has-unicode": "^2.0.1", + "object-assign": "^4.1.1", + "signal-exit": "^3.0.0", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wide-align": "^1.1.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/generic-pool": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-3.9.0.tgz", + "integrity": "sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g==", + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, "node_modules/get-intrinsic": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", @@ -461,6 +799,27 @@ "node": ">= 0.4" } }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/gopd": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", @@ -485,6 +844,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/has-unicode": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", + "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", + "license": "ISC" + }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", @@ -517,6 +882,42 @@ "url": "https://opencollective.com/express" } }, + "node_modules/https-proxy-agent": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", + "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", + "license": "MIT", + "dependencies": { + "agent-base": "6", + "debug": "4" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/https-proxy-agent/node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/https-proxy-agent/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, "node_modules/iconv-lite": { "version": "0.4.24", "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", @@ -529,6 +930,17 @@ "node": ">=0.10.0" } }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -544,6 +956,39 @@ "node": ">= 0.10" } }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/make-dir": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", + "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", + "license": "MIT", + "dependencies": { + "semver": "^6.0.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-dir/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -613,6 +1058,64 @@ "node": ">= 0.6" } }, + "node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "license": "ISC", + "engines": { + "node": ">=8" + } + }, + "node_modules/minizlib": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", + "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", + "license": "MIT", + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minizlib/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "license": "MIT", + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/ms": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", @@ -628,6 +1131,60 @@ "node": ">= 0.6" } }, + "node_modules/node-addon-api": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-5.1.0.tgz", + "integrity": "sha512-eh0GgfEkpnoWDq+VY8OyvYhFEzBk6jIYbRKdIlyTiAXIVJ8PyBaKb0rp7oDtoddbdoHWhq8wwr+XZ81F1rpNdA==", + "license": "MIT" + }, + "node_modules/node-fetch": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", + "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", + "dependencies": { + "whatwg-url": "^5.0.0" + }, + "engines": { + "node": "4.x || >=6.0.0" + }, + "peerDependencies": { + "encoding": "^0.1.0" + }, + "peerDependenciesMeta": { + "encoding": { + "optional": true + } + } + }, + "node_modules/nopt": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", + "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", + "license": "ISC", + "dependencies": { + "abbrev": "1" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/npmlog": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", + "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", + "deprecated": "This package is no longer supported.", + "license": "ISC", + "dependencies": { + "are-we-there-yet": "^2.0.0", + "console-control-strings": "^1.1.0", + "gauge": "^3.0.0", + "set-blocking": "^2.0.0" + } + }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", @@ -661,6 +1218,24 @@ "node": ">= 0.8" } }, + "node_modules/on-headers": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/on-headers/-/on-headers-1.1.0.tgz", + "integrity": "sha512-737ZY3yNnXy37FHkQxPzt4UZ2UWPWiCZWLvFZ4fu5cueciegX0zGPnrlY6bwRg4FdQOe9YU8MkmJwGhoMybl8A==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", + "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", @@ -670,6 +1245,15 @@ "node": ">= 0.8" } }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/path-to-regexp": { "version": "0.1.12", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz", @@ -704,6 +1288,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/random-bytes": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/random-bytes/-/random-bytes-1.0.0.tgz", + "integrity": "sha512-iv7LhNVO047HzYR3InF6pUcUsPQiHTM1Qal51DcGSuZFBil1aBBWG5eHPNek7bvILMaYJ/8RU1e8w1AMdHmLQQ==", + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, "node_modules/range-parser": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", @@ -728,6 +1321,53 @@ "node": ">= 0.8" } }, + "node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/redis": { + "version": "4.7.1", + "resolved": "https://registry.npmjs.org/redis/-/redis-4.7.1.tgz", + "integrity": "sha512-S1bJDnqLftzHXHP8JsT5II/CtHWQrASX5K96REjWjlmWKrviSOLWmM7QnRLstAWsu1VBBV1ffV6DzCvxNP0UJQ==", + "license": "MIT", + "workspaces": [ + "./packages/*" + ], + "dependencies": { + "@redis/bloom": "1.2.0", + "@redis/client": "1.6.1", + "@redis/graph": "1.1.1", + "@redis/json": "1.0.7", + "@redis/search": "1.2.0", + "@redis/time-series": "1.1.0" + } + }, + "node_modules/rimraf": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", + "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "deprecated": "Rimraf versions prior to v4 are no longer supported", + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", @@ -754,6 +1394,18 @@ "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", "license": "MIT" }, + "node_modules/semver": { + "version": "7.7.3", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.3.tgz", + "integrity": "sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/send": { "version": "0.19.2", "resolved": "https://registry.npmjs.org/send/-/send-0.19.2.tgz", @@ -799,6 +1451,12 @@ "node": ">= 0.8.0" } }, + "node_modules/set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", + "license": "ISC" + }, "node_modules/setprototypeof": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", @@ -877,6 +1535,12 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "license": "ISC" + }, "node_modules/socket.io": { "version": "4.8.3", "resolved": "https://registry.npmjs.org/socket.io/-/socket.io-4.8.3.tgz", @@ -996,6 +1660,59 @@ "node": ">= 0.8" } }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "deprecated": "Old versions of tar are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exhorbitant rates) by contacting i@izs.me", + "license": "ISC", + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/toidentifier": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", @@ -1005,6 +1722,12 @@ "node": ">=0.6" } }, + "node_modules/tr46": { + "version": "0.0.3", + "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", + "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", + "license": "MIT" + }, "node_modules/type-is": { "version": "1.6.18", "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", @@ -1018,6 +1741,18 @@ "node": ">= 0.6" } }, + "node_modules/uid-safe": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/uid-safe/-/uid-safe-2.1.5.tgz", + "integrity": "sha512-KPHm4VL5dDXKz01UuEd88Df+KzynaohSL9fBh096KWAxSKZQDI2uBrVqtvRM4rwrIrRRKsdLNML/lnaaVSRioA==", + "license": "MIT", + "dependencies": { + "random-bytes": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/undici-types": { "version": "7.16.0", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", @@ -1033,6 +1768,12 @@ "node": ">= 0.8" } }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "license": "MIT" + }, "node_modules/utils-merge": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", @@ -1051,6 +1792,37 @@ "node": ">= 0.8" } }, + "node_modules/webidl-conversions": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", + "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", + "license": "BSD-2-Clause" + }, + "node_modules/whatwg-url": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", + "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", + "dependencies": { + "tr46": "~0.0.3", + "webidl-conversions": "^3.0.0" + } + }, + "node_modules/wide-align": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", + "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", + "license": "ISC", + "dependencies": { + "string-width": "^1.0.2 || 2 || 3 || 4" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", + "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "license": "ISC" + }, "node_modules/ws": { "version": "8.18.3", "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz", @@ -1071,6 +1843,12 @@ "optional": true } } + }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "license": "ISC" } } }

Bo%tW({voZD**0xr`r=# zgT?^z73>6`GjH3SOd@d!o!fGevco((d>h(^Qcl*4b?u3|fkN@GilqtWTRmyw&CHj< zVv1_og#?7J7s6gJ#3_CZ1&Kid@k@VYS1cGr@d8ND8EM6hoD*vp4-`g948?LksDjM~ zPKQ5|KW=aP+-C3Tb6fe>&c9XsTg|_F%{vu6<>tiVR5Ne2-hctM))oMWAY|~mGxnAA z)Pe{Ov-M4T#yiyJ!$#VA8w-3&CqQEx_u^ay?M& zvn8H1zoYWPc#7VpBC$LJKHs7N0Uz_5Jb;X4FcE#JK$A=Y&?J4q!q!^D`&oKfG55a) zrAJQ{P`W+^N^7N(zKryqVoX><9=w%>%GhX8wr> z`rb!J>AU$$UQ?22fuxpA0xJNM*tHT9#r*a((b!hT0?{!yO-Lre^vZ$OgRN?38rfs^ z?L@zW_msVxc>`J3|mQ4egS8+Q}EVq-0o%^w-0zsc^8 z&QR;3R^Do;Nnzt$y3n5_OquA>SyKKsk*0WZ~eve_V=?u;GDC_+zFr{*N z%41@@4|Y)0p77zUB`$xMIGUl+<~@4_DBO}+Q3F;v zWqboWh~=P+J% z%ZIeWXW&1&U#`c%LFK30GJ71nDrOX-Q?2g>YWvN82&JUELUbmr2^mVwC)!=F^B^gA z9Z!dAZiCYp&O@N-kU5jO(2zT)M6>rBwRWY7@(fA~FSjG2JH?zZC{1s=)}C#iEGdqS zYRGJ&qCB>@5vsR{FA($TX5r|UR*st(C0ju<2 zic@&Bw3(p@)FzCL_L=jP zNmObRZ=1pSPe@ZUuvc1|i8jo24>G!`s1Pvtj- zhs;qt%PKE}UvukCL|k%QnIajXA>S-ZCdGf;BC+7yhA717`NNDykKC&$lSEPYLFDrgO*{hGq|K#Xw4Q8faH8!zft1|gX}#`B65F@Hce4N-h~kq^7ECD=kFnXZhmlZRK} z>~4FnZ417=J+ho`F%`WndPx>u5mArCxLbUpziEr^nC{GguX)f3F@Av@Yjbs@2&@q* zWaz7CIbdWLC5C&`Z;lP=vg-u&&95Vx6`+rA0q9EvzUMPXUIp!n;$_|qjV0>~ zp(?uvaRhpe6A&V1-pzhmuj##QoEK~+Ie~Grh++gD8OSu8ghbK6`$h_hEh%!eG!kT_ zZVMB#|I+;e7b43ZZ2d(45U$6S?dFpV%Xcm|+STW3ciqj4`J*0Y!r19Id=-JFxds04 zrtE;@b+dIK8PX8F;KY2~&TqS1ZzAqlcC!#mv&=bU@*A@u(EI&O*`>Op->g#QfOe6_ zG&N+CdA00eM~}JdZ6PE=BRt9N(hAkBd@AAaJZ?{R)!>G_>YK7A0BxhEgP9xTJm<{+6VbWESkV3~q zUqcagB#E=pU%=8}=hKs$^>EkmHD6l(2!`P5)RD;-VM=P&F%*9bn`(%dUzW=H?g|*s zrnHZON{%|DW8wk3$3*fQ$qK*bz&67ZnR2{`zz~FR3Z4wm6BG2XBQ%AW0a%2KDN97M zlyKw?P2duR&!K_*k&#qs9AE3f&`E-5*wtIjUBXy&*VL@YRU19gCF~6Pm~5!6V5w6pa%dw_U{3{M ziG5ycX*2MNaXgf!^%39lseHhhM;y!r@-u*;jO9`=feohkD*%-k@-$Vke} ze5D+K6~x)2y570u&BO)V6yw2df-|lXDOrOtCcxLk7+gpURZ?q--mN!c1ie&Lh-opJ?h9-EU zb-CnX1|Q|PHP5V5lW+o)qe>@>pBgB4DpzOG-|x8S@13oxZ9Tj>+h6le{HqGXZRBfc z@K@$Dj}4M6{wlSgpH$CHQX^jqF$DMDxk?Q+k>7CVqw0xO5lks)a+ZM3+#$PxMbG#N zp=c|i&*}z?Tpk#MQ^ug;L{q?vt z=|z4_4;z9ysH-4d_=G_fb*PH0{uolvAEX|@2w_OhkF9!sBJ~_*e;ifMcA11!=gw92 z4Dbj-*+&bPQxY;VTD(YVg_Mz6=SX$GQQK_~t47bbbnbTQN)S!#fJU7Pol~*eZA0;f zizpvdTq70JLeAMdaIVlqnKS_bWHYBpSFyxabwizWC5y+Us$Z6=o@GIeo@&F|pXtAY z_A`4-_OwMb%AR)19XUp)IqDOjWv5xgkN5&$FT>e0%ClBx&iwFWQo|~Oj@0?@)F=0W zQD0(cN!jBA?H|?3-9z(=f8Egh!h9i-g@)lskxtK+65u#DEw!j;tLLe1Uo^DB^mGjB z|CLwBTpI_Vrrv)Lsj!u0(D)4E$rPVIMvy_jrrlWp6*$;`uijX7dm~KMyb)-c_Qnw#`OY^oJT@htZ}a%*ZP#(mdpv($#JO>` zkx$vl+46l@M9R6d`94g_rRyD3r4%FUAQ)mH9}srFM%tJlXSXRF$?Aq)rthy+u{@C!B%2hQg|?D z3M5gM)^XQu2ymrSX*>T1efB3c2{NcFEU%tg(zDpeF(;0qF zhE#RhofoB6YhXR8GoOE#y7+Uon?GNfyZO_VdHkE<;?GrX{(N~Z=^f2f#P7Esuzr6S zvwnZpC%>Prj5J%n_dH;>wA2%6?o3+hiL|+ywA9lrf&6Bv zERZKZyggvO$q#Qa>rMUKZ13BAWTkC$|B{XKRx<;iov*i;ueW5r-jexx>))BLw^H-< zR%*W9O3l|>|MU5J^qBLt;;qllm!!F@8Oh9-q#Zq9k~U<%R=hQMzT|CSzT|E2eBs`! zrcAKI0K5q(a?&pbvKM@!AiD#c#D2+={c>~Gelj!zG1?t>@z;WE4ahDU4B3)l@i&!! zui9zaEteMYaJ7p+UlA5NkAE|Y_;VG6gQ&=AQaoi41J0JxbPI^yYP`F&eX{Lc=Z?g!)DRJPA!O# z5RdTJ_G?9a%O0tXbRyqvDBqZN4ZlS@%bqxX$ok;Y_(*vLM;B`%Xd`U34N z|6KV%?5My2SV^V>yS=vG`4WEif$qtwaI<_po@Rr^{VoKR4#-c8|wy z!IrqT5{s0Hqx9-^PLORVPj5M5<6&{0Uh@ZeSdt9|jof3qS#crgNj!36!7;iKmF#9; zHEEF_%QH7O=#f835dsDDknR>9d8lEHA~n#_V6^ERVo#ItM+_JjdW408A&NJkJ{o1p zUD(|=!qM8ypPfO6&&87P5>M29B-}rC@uV(Kg&h%3w`aWMfZ7$MokUkh_~Q~!?4OiM zucu0E7Ok~aFsy-g)gHf;Jd}s>JRKGO zB-Lhl)Lm}0p698FYi$ywGvS!<&3Wn=3y0nKjM~`<_ZmHR+eY=zJb2aaIjRV^EV#9knU&{(_ zaSlJM=ERZszCVeyz~OE^7%cWwRRoL7-(QgF_knRh!NG24=mb>$&<-^MJB~o43itdw z=`t*h&DQLT>Pd3pio{=g@CyEB1#^8h`xnnhox;*ne=aXaAg3Bg7P%~+y*$a`nvWL2xfa|lO$e39vgmD$N)kLFWg%I8D5 zroC&Av9Dm<_qjXXHvALAhjX;MJLwUDtpP9*z=z{FwA?`w55cR|+XwP?7)zwbdBB?D z8SuMJw=XL(Iy=qmHRxdLN_1mEf9f?j0{7Qj^YlZngtxdOQ_GaUSM(*ybk~V7kuCn$ zR6Rmz>*2Z*=b(y$xw`hug8sxK+T;BFoirz92C}(Z*_d@DlJ9A9bZC$9w>2CernSmZ zrh0s(ddSjRr|}TKfIl*C;&U$yW#tyZBA^i2bZSDOvdgSJXO2HznCh;+i@IfvQBWc8_E9*3K*Vuz&!po4s!>&$sH&<6Mm&vJadFirOB+-DYh(#%{@*(Pvvv03aP_~k&UJ@BXPvKOdDS}q0(k#_>->LtKVLiM ze`%esIrKT}JQm@IT4%2B_+MCOqh%I=Bl`MqXQP)mS{%T+Yi+<#LW!)5=0?8jyG#`L zNZARC+X!AD$gy70wq<3GXoX7A2YEWSpkc3QehvAW+uht*;pvP!+MWKS?v{ka4o^Ad zk}*%DQAZo@w&aiw0QO&ocnh!+X$$$gj91j&DgZHPJ-yNJL%||>H zYqvq)c@Dka`L?5DL#!_=T2Pd=#e4QHPxA&2>v1Q3;Vbt@M|}|w9rZ{@15v;9&Tq5`33pT{C4|2*xRWr=V&6t5jTsef zyJd=Y2djwG>*?6t{JQ4pd^?*C(cg{@%^y`SrZ;<<57=X`=QMA~&Dx2zguA^+r#JGH zc+zY2#!GK7sBj}H7`$F6Dn2t||9ZW|7yg**MU1Eac)f&U_|Mj>fw{OmwO+bfFcA#- z552Dc^Hrmj&t0|qk6yK*Yj#veKYz`np95>=|NpyYS-Pn z8=j7Apk+gQ7TMlUN!esd-73_R9J3Y|++^_-${=$Wf}lzWhFnV)YJT@@(coFSC;a3HIUHG} z0>x4CYfiZum$0%}2XJMpQ&0AAlk0we^fI`X+2cI~*Hh%UXr$1r@?)C$?_(K~3sZuJ zWR+eK92Q<>B^WZ*Oy~;A)Zxg7hA;Ovt^W$W6Mt{iafHhYfT>kmn+AdtJ3pdh4|<_;n!Ef zhGk#TURPS{)WTn(sL>-8BBr+}YH_Ytr0EZ>hv0R*1!u^gvGV6QMPPFeBR$+|?v+UNnGn`?mrt+FnSr2w$(fxQDFjI7KF1 zS7>b~%4nvRIpZU&hwONc^J!Hk-=0T6#RK`?SMHbPqi97}3dnDOd z=9CwA)h%V%uwy862evid4pyO~N9MYwYdIc_JF!B9v2bdSXU{mp)(n?&wQd*!C$ju{ zwFfgXch%QQf;q`LS7IDco;Z~!Jt^QFw(=n*1~9Fcq*{IUd1l$Cvh46Z!tVS}YG%_7 ztvL-)3?mIwZ4aT!NQMVoa;D6K9@~Vz<$T^(&IXU z*M63fvDh57DTLQCuO_e2mAD5P3s>=Vc&ckJ37v4zP9UkTv3gyeQb8P5@g`nsr-eM~ z%XI4t@a4&{O+(sZ`?2o^*rpHhUx1t*E%0#bL|dq<)~~gSNl$G+YrS87FSq>V5t4|X{Urc=hbgfTg#?PunH=wV#7v1iX zX1H`_!=hYDiw0L|Vyje~Im@I%Ils=#MA1>Vqcx&R-mnKWr6RQKXR$zzo~NWT_v1_5 zkInKt=Bd4Y(%ahLZ0ijyM80Tmj$nJU)U8qv-axXDYakL zT7{*pZPMDl%P*p(BvTX@$T!auYy)C@!Mq<-p9s}{^|qU9zZSfv_8Y;*+FOF-YZnAd zY8M3yYeT`Q?hCs^vnqEdo;NRGNfQ2UPel=RFh-e%#Cn1Y3p1bNY26&m)z-&zVTVD~ zV68<&65txM04tmr750}E(T94x6%#pT86Du5%msi}g72D_RpHdMP>?8vWAh@uzRKP0 zzSotMKq@Ymr(yCmtjpIoAyMLK-KHMP31Zl}%T$f?#U;L|U=>eC%zbv(npZt-Wa$bl+7`Ja+kNbufogR<@he_3W4j7xZ9T(zeYsUEMw95@&o0I z^n;xRp*id{Om=1&Q(XZI5h#0yYIjixgt@fD7R*j|D$x`MAPI;&JJ$;|*l}dcV5crS zSU};BfW3@*B;2Ia=h%i%%<|vL2G^s@uoF|!RNyWDGn=Pqb7= zHhV$EPc$zJ*n&5UVB)rU0i(%{xY6p{%r|}7;{~NSzxIW1=(9CMuP@E=tff8N2?dhAU37>&YB2;~-AP~J< zxLd5L5~Ts-ZW|T(<-ViuNK{n?a)|@LY*`9mrT-;4UU`o7dDbxL>GiNf__1J!46U zzW#I?mVfG?afX}94t3R4FOnd!G}>G{QleEuUn~%@A<>y5m+3-th_|RQM40XYy z$xx;9nTxR@`Y3LuY=;K*7evSPP5rF5-Y-h8mBwWgy)r2*x8RXl!6Tyuk60@+e#pw4 zih3wC95XV_4(UJ8fRW9&a;D{Y?9~aMC!Ci| zT&$N1`48Nq1MJqd$#l5O{K8+vkan0Z2ONJFaNrHIPH1~gf(Y}6xPW_jGLdDkO!X{d zh>AJ|R?G6HPsgJY1N`XkO81L|zvZ1G&LqgT;m@YXwl=|Bgxy%c558LxQHLX>*V}%* zm}wr6-=$dQd)sel=3hI=GMK{^H;E!|^%;x%?6%M-tte;3b9MA~5H$le|u0B4A?IR|61a&_c+|*zklE@K$3~a71 zZIC`pm-7RwD5M8ciQ5zYTUIDvzUZR1$f0%Jx`@&nx#3|wLX0(na7Z62==P&}q-{Sd zD{e(b+fo^E#J7exTWiT3qm>&lPQQUc-|%M??`?f#B_O_XE?zy1^2d#Y{hD{i{xhfxwAqA zQm`@nfPnF*-l`j#gCl|6<(9SnmvFo#h#AT4!6IXJtCc!hrIw0$dQ`yz#Kh*?1XeZ) ztSFmWanrKmPp0k4Ev8_wU3o>o7>hdDr#%)|D&@lsxRZ@C^XXekr953|@Xu8#;~fgK zmR2cGh1}v;0dvFo{!xI$QYo+Dk&|s=sh=fA+Hw%FANjg{3H%7h`c_J@oDsu7g8!{?j`$l&8ISZkk6??oJnCliIMxcw0?#N+UE8~#qk2ALP`{M!`S?`7hA5v=aHD@d=kxT>(g8~c zO*e)gPJEjL9Q-T|v|DMQ%Ln?s0BAw;o6$gz6I_7?+VwxsK-Z@<(DnakG|*tSOdtdi zW@N6FJBI$*r8ga}eAze9;+kaf6>oA}gL&Om%j6=B$TDYFU6Ea+!^@yx_gVttrzWx=m4J9Z zAeAgWi$ZdoRjJth8Lx`W+@$RIiKK%x7`brO)kPuhUw``zs_D@qbDScDP8Pq;yR>vQ z2YWUK2t!Zqfqu`tk_%Kg7K^N7aldseey|~ZEcS6o;CE;o;FeX`+#Iv;4(r-xJk+ea zlFc3Wr7m*jE}NZXH=ia!j+_N@I?y5ef36z{H{^{#NZ~VzAI1JAL__OV>g3020 zNSB>*zTiB$lqn!$HZr7db(TOOB+eDPTqsd06;f4VU0o+djW(zBJjE z(Y6Mxk;&t0#?>##J<}Ad++y@6i_4`7@j6Ntm+%OH7hwrh5dJtB%tZ`R(k3UFb=NfK z!tb$-l&B3dTfjqEI%30hbJX3(6hyRg0 zxXdZ^U%*#@*B48jY(s(f%D0XIypvU8sRVs%mMOq}UG;Hjj1x<;AIDuo%#`Qjwg5E5cs?0zgjy&Mnvl-X&x3 zAj>%)g7>Z$Fhrj~{n|i77@H-&EhqZilrNaf zh~-6QUdo`@8^}nlv(h54ln8Yh6awR|61;M(@7JjBP-5b+2Mnr0S?T)9QWuw>)u%Y1 z+;yxq{vXzLP3uq0w#7VA-EKYtT1ggv@Tv^CMNz#fLPrT7=!RC3nW3r^%~wWdsv4KBDltRqc~iW$*rqP? zXR>&JL>c^yu>IO&x!m=n!gOhmIrQX49t&*72Kvk;YN0;p!YUiC7{IJ6h57|XMb*VE zbb6%FjsQ31^9=?!nSBNCrviqqJ*T{?y^D|l;J&)g0tY368RVs%_6dQO|1B7OwQh`g z2nn#UHT>GedgYj(?*`2GY~|}~e6`|8z`@0*GgRT3ir^3#+G&P7Brs7pKlMQgs|p9Tk^eSAS;Co@DX&r~?E?;O&ia zd3<7@_T$EiDU&JH*TPyK2xO2zBHBd%RocYec_F&s37gz(_xm9NNOgPJwnI>>@EFLU4tvUw8`9g?|N@MR46 zb3(Wx5Nsgo{v@%DcY7!Y&0%7LZrp@n=OSl#X*SxCO|*giLxmo_M>b6Y6F|R{gd(Om z^nOcK)Mkqr%Zh^EYN^7MITpOBrRr|^y|$%_{IOs|OVvH{dr?c(x8?UsEmhxdp&j866dd{9$Vr|>)Yg~Y<-9PoW8zQeok87B0tBize9cs)`#UM zXZ`K`5NwAvsoYaB{j~dxbwZS5;*c!<7>!C~SyAF787dhT?QQhm(b>f%)9Uk|`HXtb z8P8%XrTKm(vQY~Uj}7WJ^3Aey6$tK7cNncUhMPB$FbG_fECYnFuX&Pdr|h1#U(*8x z(|6`1cL++h8%{PO(YA8l;?g?ZDBds0H9Lc2jUENl8}vOr^ssE-Qr{QtKS<%vWO220 zF#?F+T>4lC<1P`nn{Ul7tXQvXAJ> zSxWQZvAUuzev}-^JRix=>gT+i1|`Z<(;^ugxx}^)>;Q$%eT~LGc+AigArtO@iJk#W zlh!p{jeAVBGgPTJ>9siFwp2Izz+|Oshl7^pReMT8qr>ZD1xG9{3a=AEp-qtiPw~*@ zD`R{<9EyD=zb=0F^NZxt*RS+b5#RF>A2$&-JRgWkmYKZ}D?7~@bZK>Xh0BJC0*GfW zV`!Yc5!wQZNw!$Vr_4N)1SEGOlf^&YC#%+DuxWqDBbjUXCTQp;#vSU9XD+iGitz)6Mcwt-jW zXyxWPdwe?0(r=)}6K%|jyLmNo?vgCe9ZQ6ix%ymO*XJrH$6j^K-(i3FOq;(O*_iZc zH+H3wllam{VMcx*_+(boOgShsfXW=@5~1^#5N$P;oqa@*x>(4M}4WVqmfuCV#2 zqrUS|tcy>fme9kPWV@7Mc3$W~sU)wfE@?|p%?|gu2Rm{_6aPHDsC5OiYx5*PWOqky z;ypJh;izcB+1W)@9?GNzwCB5@CM}ziI4<~|tXs5rXbMYyD#}+j!=BXUI>ew(p zLHEibIplm*$D?>-iy%)?)!bz6H3+JpwO+=orot$kVBSyR=$sQGCFWm01iU_upT}48 z50j}fI%e`#J2v3iq!8_d$DC8h~C_7sx60)1^>{f)h z&@Yh{x@bqYn|?*Zt{?!CtSZriMek+44^XGPWlPLabmj@siC1o+L#4dMZ)9etMYAEX z_o(V6phT*6^AJeS6Fy=OonZLftaxP1ub~_wIX~iW<(9ydO!qv7Ze& z8h*CQ?BNh0S-zT2@@yPItZI;5RB;>h>IYEmAz07qe|p z72Ib@b%H^=Wi!hvTbHj4kSXxG)sqe*K~=F(r<>C}%(LebDJLgCCQ)0=bE!s-Y2^7^ z>fI_>hCBGciJsJ%iHr;wJ2(&TaSk(dz2-dmWLKx$uKI>4crUqQbF-UrnV zp%*zBp%)J)H0i_BNY3==N~kWd9#}#emiu=H&!}~FD0zF~v@I-Qj`=G=0z$D5{&u$Y z%^32nN;G)t1mSG&?*Vci;`?e|-j|oB)r(PhP+n$8Nu%&}o~gVl?&1Bp_~YyxIlqMd zKL-6W|Ev`Kaw78uMKj#|4Jk*#gp?J783X|m>q6lLC4^xz-jhp75lTCOf;a~fW3LGq zIj1~AFSu;jU)k@k?B!Nmp=9>#fHK#k^$foMQ|-=QlbbIDhvj2{Tj-1g;h}|6UN{Hp z1;6^>Km6kKQ)v-E8QT0}jseB_`Bk#Y0S@CV z{8%Eix66oKcjOloX6c3xCi>o0kia zMGJo+&pOZPvWh;M3aq6PQaU_i%yBXXw#9U${_^{=Xv_G$(l4FgYH9)Nw+8!hRn}6s zRc?vj8Ub?C^%N%1_%J{FwN7rfMA^u|dr9J;i3q6GYz-EeUlP$tWK4|obXCw)``#io zTDbdfoCwi+;irU_d+Rh(QL6n{OzlLKcH*g-$&h#4wNBuI>Aua(5xe%aX#-!Q#bK+~ z<H6b&bG z<&)b-o3AjkC!`%t%&{yG_z|eS28iAgDDSN6$aZ?#?S5t4`=*YL6$}T9zmJj+O3AU5 zOl*2CzT*swu%<2RzXAV7JTrx@yzx+{bT&8!Gl2`U^Fy;a8E`HOUJCM`rduCFZ}^{R zPW|U!pFCpy7tPK;BdOVJllu9yBnG>r*HFRZaGFm1Je`ZR1{}(T!Wq};Z`&B$!7kBH zW>Mt^<>#K0k(Q98J#L>zKaQJMlZCBYxHC(vl_+ly{gqF+&6js>lz4!Jzx}H)h`YQT z;5GTd@??w{ui^d)3lBs+?}qzFE*Z8~EaEn{bb5jQwK8|%!y`P}C9#bj!`qoHZCdk< zXd^sEtz{^WI3!p89yTo-UcA)U%51NuG=Z7EEIFcu=krm3gK$Ns%=ViaYC~AqvtpeddyeBRdo1w(uESM^>qK>Z4UP>sRg_&>P++Y zCW_7VQx%jV7};u$0Jaz&6&M?whvS-h%$N_$~WL_+Xu0Na3pxj=J)=wW%GwYJlXelw#D$=X12ne zE~F@-H@ReXWTK$C+Ga&}7Q&9?f5ac8GLVtBs!bkV)0E;&FNfaVcXYqF$Tt69bSyL{ z0OhDUrCmuhgTDQ#evQUg+dfYVatrp}m|8+4EYhjBRHvMD3JH(Zq4yIfpn@0)_0RZ8 zR}?5&-)Hb}0UM2+@BT*Sb2rndrj(UMtsJulVLh{F$fajQg>L;RTbsCQ)%jZMcX^bJ zf;1xZ_jo&R#92@)z$isRCtFs|3XE74H1r$L+iQstJZ(d=0PrgQ{VV0fP@e!yS8 zQ{VNGzh;lt=A$BZEo%P_{&GOHua6rbxR2BwK$12_R$^0UC^UE}{450@LvK(yRy6ZG z)p@}@60;a=hzRq$;)(QHbPX94oun^Xs|+HepK3I!RA8dLd3 zR**66PmL*2{Jux6VJNePbi7DL^EWk`5i%OBRopPsBYHP}7H_gBR{w)Vs_&nfP1p?( zPI@2a%)u8_#Jkmm^0MU9Z8^W?{3U+ne0C#Ruk_YRmx<<-TV7{t(aWZy=8#$=QG+=um6@9;sa7`7r5ef3L=N zY}qNWoKI^fNCDgwoJ#QEH%4?=p&kR#W%h}7&r?apfIVKHXD#w7DC~E9tJ(|a&bEkj3 z>WNK5dHpPRevMa+Z;yE@UFUuOE zJjQb%qU}UcGylqx(FrIDWYW`;9V4TY*JDj6X1h2cCH~>B*%3Sr*R{|vck|{86OQPO zc6a+(3C9r+!VPPJ=*1Wnb#%M&>N7HV?vB0YRV&$wj`yiLk)slQRi_0s-w{vj%^Xkj zrbb`;nGMnD7&;CUXgNb5N6wpV;L}@&5gbx4N1n8MZ8_C77@Y{?G<}1ooyY|3)q2Ze zA^w(6)YtAfblDrf_|Ya;gD}>I;(F&tXrwk! z{qjnE?eM)nUoa_u@6V-_V;2QsUWODzs1(z0+Hl{Xg89qK^|fDBkuT=Wvzo)`>{z7^ zQp%mR!_)5cwa>B(iDa4lRuDixW%ipf`9-H;`BlOD+=Oq{B7Ccsxlns!ge$wDnJJKf zc)v@9hDG<1XGwJ6m39&bum={&jLx*1KW&i1{!s4nc7^$1No$NMDc)ViKioMGHoe~bDe3u?Rg14FXMWkN3V~2HNeRaLsd^ugYw#0c}HrZ;r zBpP(L3O?#JHe=Vty;GuVrBWC~TKYbDcypOp7V$|$%BWE0iwb3d3i1h7m1`}IHTF#G z3pmmyuC0&xI^tZzhy+2KzBwGr)4$Kzx&*_WRccNYNewgE$AJCerd|LVZRr4(`aM@* zSV9aY^k&i9OYF6yVj46*S=vlb=_*F8!Ltb)anS_r-VM5AFYVuk^cEpuGB&xnHp`ON z=+P;5bKIYB!?|dJZoC@G?aG~?bLB+aySJUwQOEtVkE*^YY~yLZP0WJjrik|lTkI!z zJX+KA9(T46mrF;abt0GD_hE$1TD*Q0sob(s_HSb=t2@#9L%oDv1 zK>@OS1Z@P{_{5d28Ec;_iA|sFfk?kK*K?@1tIj?FBhc%YpS78T+@DVd0d18bE0e)-(}sg&@Y?FKfuaRjq2mc4fO}PR22Oo!LgKYQ zig`sW{u3zOp|T*3hs&9G;cbpuV^IbT0`30PR4E;69>#wH*DRrcw(+6>x7c&L%?%hC zcv1U#S>{0?t$-186*h>#oPf5#F_M=GucJ!=B-5wh+#f9%4RG(Z!1^oR6j(bX=%JW* zt8@ax0nRP}fjY6RkZC>&e1Tjs2TJSDKO^Hon>&|6`}|G*>;~gK?CpXliytid#N$vc zgsXw97(qTTM* zUXBLo*+sVkDk4!7>Chk(3?-^p^Ew1xmJ#c8kqe>v5TG$dHfVfg+^crkP@owAnY$4L z$lVNpOe_U5JsB*>tuPj@1Y}|m+6q?+yl^w7gvWE~;ZNCir&LKG<96x1=6ykdh`>eZ zEky0$88c)B-~F_QYDQ}OkV^Ev^Rkdo6Hl6**+aMa$L;cwkbsH`6Cs<(SCxyq$4wd z2FGm<@Oh7!7&$ zbh4b=Qcj79)LMz7V`SFu3N>644+oB1wMo;|5kEPU7#ER1@dAW;9}9CUXHhx;*v zs`n|Zc5oxc14zx-Kqp!xheb2#lZ0~8+P2E40m77!4QoX#ppV181jWn*eI0N7=Y(k8 zeQUlBDU{$L2fZeg13JHo}6riSy)iu|`NF4;1*Ys-Njd`QlP<(p= zL>e@non~ZjG{5_fb(jfr%O1rMNL|qchtT|ni+*H>U$=*!%{ut<$uEz)Pca7JxJPT< z&QQYfY^~K$5BA_p@sl_%96w2GeZ=|}T!eN%I4QjnY#dn!d6feL;k+Bl2U={myB!iq zwv)ugbTt^HjB2E;Ix()v9C`C)*88SkS3~f87t@gOOUA`Vi@`9Gb@lV;9 z=fS@05>@_u<*0|tkkCV0W?HppMTUj@v$eY(qQ0s*6+wqFXFM9#3-s#mS5bTDnzc#3 z%;N^G#pjFOca%=pvBH3{Sz`HaH{V`}T=8sjMC!+PVV0aZu@^8cZsi6orLb#zH+ZYB zU7|?^>$`_hvkf(*bP9-=uNGOlxKw7V!n01k3t>=+N1a?YdKqdS=4WIznL~F=CG;De z-0EbplN}cB$LTo4%p9YKIz4=Ptkkz;JOqroP~D8`xPTAPl&;E}HQMO1Mte64hf zSkWs(8cx6i7M{{tiE%5q#JZi$TXimq5Lv`MvI*QDOT5OV z1)i3V)asmx6nD&jBb|_bFs(w&vzXg2XH;8+-qbteA|wh_Kcd#-c)g8_H$;}FJvI#g zrT7ojBM^`^&s#lFU-4zHF~!NffA-tT2@bdX3ldeV4f>*MeV*h5ksHs-uCs21|H+x^ zAURI4zubGK*TffU-m1lR2|__Q0=F|URV28eq3*izve1bdYP43!s+anxRjTwvXQ297 z&V1IZYJ;1Vh&7QHh@4lA(D<`jsg$vRrO&=PyW$D_?#_$YT!2RD7KrE{P3v>rFjMw;Rb>OD1z`p$OQ}8$BX#zo0?A zddx3*4t>eWHt8N-WFdZ<&e<+GmsvTNOHO5G77{gJ^t`Fa9tlb@o*#d7S_SZB1pvPV zJ{r|nfc&r+D^#5?2-6#gOvH|0GnTD-v>--U_njEl^psh$e(r%8);$cuUbM_8i)Sg5 zJoWnzd*~0E*D07=>FAO=AT#+#Ds_-pF~vdPl<6 zS&=M$_fI5>)nEAIk`VTch4=B@|#y+|x&C5CjKd0ncWEB@_zdh-X3BO>Df zf`g|5v1!fUD3loA1uBqL2Zx!pSY1m`KcL*;ak^7(4UKxF0kbe17O*12`opCtH?bF6 zn?vOhpDThzne|<)%lgRIzcyQa+KGV~jpZRUC$%p!gN%iV9%)GGbMjUw9pNpqTC9Ps zbY(dloibUyMQ4`Hi?oZ8n7s0c-7&mw16(t8gdcc7wwCz?Ad}8Xkxs;hEh=%V`E^-4s`;^8c5ChQ43C|*QO8d54=@phGoh(R$ojTjiDNd;@Te9I) zKn{r@QB7u~7bnn-1RU&Mv&-zx4)ZHwK5f^p6uM z$XO;B#K2N0W7Td>QxcK%ysV-&8NkWr(`RJn1@zaS3@%N4-6*`7@2dO1;xkj$G^ppR zq5F-(DU!o7XOf<;Bp+S5maZhuhln=6p$dlRxy$PLY}L?K>G@W(!0Pz~vS)0*&ENJZ zf>8(oPSJsRKl5Q}aNp{|^Pa0}`|P~`kzA_de=j}uyzd?+m93I&1M}WVW_tgV*VFU< zuvMDge?^+zZQf4;Q*O=sG{|(a-$Hho_q!n-k1^#(jl(EBQL1A22dtU*sw&kGteL-6 zX8yq4sR2w-T?k%3Xy#8)ecvfL2Kt^uK05O{>npPUz2*nw)X3@kFUd*Y-yk8=_iw|l zll_lm*T43Pz@zSX)dCbtQkgz_EZdc6;W-S}Y^RDy&K1-Lh=j6nD=#)LmrVoU=24xn ze2MtOqURPfDOJ`e{DR~M$_mUz%7+V!c~4uM4BlUv0}3hiVI$wXJzGwd=$MNzTY$EB z>n?tTIs_S1N)r6vXuZ0U7*awjV#8IzC`1#r(V2MB(9G{rUKYeH;}W_+1r=4K*qu4A z{T!oOb{Nw502n&mJ+=j788U%gXTG`@DPB4~j2s z({?>N>EWMA!<}L&*4d9(@RTjOQ47xTzFLOe1Qr|I-Id=Qc(tlIZqQbj_tmNvV>_J7 z3BoexL!htw017Wruns9BnEPPpF@8ZXbz#2G3HE4)|BfKG5r&TjPMg7k+&Vh-zemq;?YDGU(9M;uz) zIVzVzm&YO|Eqtjgcg%Mn1u9<#*dK3H8?34;H4+~qC7d~+Oi=;6{D{y=uNd~5?`7w%J7MC~FRP6R;BC2;rR4hT9=IXfwr z$R6`C*}#bY_&^=9>jvsA;hn75|GqBxH>^q7mDO(qeTR=%uc zSW5sf-%f~9{8MjDti(!or_tTn@AN1#Onaef5cWQb2TT@EqGz&sc46}KWh9M9oBW7> z)*@vG$s}X1^F2%+k16j>%MUz8Tv2_>p{>+gwXjTXESgeej-z&O`yxbPi;DPH!atV^ z%;&8-Cp0rqy|5(Y8bJjQl6%ZvWLf4M z8Z~T*6(YgVYYr^-c&iqv{$m^3l$6~=zV)I_-Vw$aUEu*~{BtVdJ_n*iH++gj0I`_X~aG#lb7vRNVc24N> z&rJT3RDO$){RMBI8SF78e^+Yqz3t4sQYpyD<$=f$-D)8!cN^O*ao6=o_pCJ-oh&}{ zmud|z?_~{M=8s{Ex2nplWpEE#hClOJh9cUYf@5{;7^_f|UIo>u6fjg6+hkGLMzSL2 z^TnCfxcTVSXcRfbEFm@Krd7Mh{ZjaN@GW^(OgxktTt_Bm=?FbfH1p}PCpufaGNx~0 zY#ZZynR(PofyjmMdg8s`N34=XLw^voGdg^tP{Rtid>( zxa{EeAyrtk&2aOUp;d%c%`5u+;paG+LM9i@1`OhE%xnylAA%e4cOF4%-=#y*j13sj z9?xM2=c}r9iVML8NCv0aL)3JIUY9Z{Lj%_FNhLJwmEoNWiav})OpWG0k?S?r;!AfB z3lLuX<PV2yYtdaG?Z!+{2`(f2jO_g04M)k&)3*ErHdWB++t836t~`iEJ`g3NOKx^o zO*|`fNFCQ)ET=o(#?)4?F&E>xoN})NSmz6{ihO!dWQ&IAQhUJhsi$ofRM$W6+jTbaj8FAVDgKV zp7~osxuY@);Z6LuJUn~MxJyMkjovc`X@cEXZ;Zh0V>+|M^IEE1p?2JJ6_VQacWuXqWt z*4%$#=u{jou$kT&EN(Ew9Z)2EJIoJgF#dCD_cWVd5?oFk+a(1mkT118Y+vZ`bad(E zdj*9NSZ#y+>IZw}r%@<}1@bftD>3<<8yNSt7v0l*Q7%CBn%{`Y%%OZ(CldK;y&!a= zw<^>wLMDP$q@x8HTM|`f4TzBWKIyaHEs1!d__JSp;>e{a)g3Cbh56DUX)jRPC>RHW zK>EbN&!qb`A4=aXVr~5T12Q+r+~7CQ9y}FsLxy=wZmu)%>pIA4oOS!@r&0_E4|6%q*cxA4PGef zq>~v+qN4VTp`5B2SwWM5v;BmtKL8um}yyzfyfYD2wX7MF5*_6MY@p#M5GQQzi6=JP7?^>DpKr9$e;M z3+aE;P9_@?a1F={67Yo`kRR7lH6&nwkW-L=yA~I?t8OX_Mx~L&ooPBkRZg9%?#kUr z|I4Iep(9pF#X{8ue>H@n8%#qWE6v#WFkHpTfpg}HxTWQt2V`oEoST-XaSlQyj7IyJ zFwfc?o`HqtWbq<8P4|ya7RT>X-9LfuV=eU{(;}zMeDcvR5fZWl;;C_7@@$0zCODod3Ti8!5CAUi( z>EV(vy@K}&-rc-&jhNoZdn502d7rC0o+U{x8&8Ded0u^%9npaoMEVq5aJnE#s%hEK=dU-?MsLAw9lMdK~!Q=_DRCk0r)P2gQcP zJViRVgd43>9ei?~06-m+$p0fPwN4g-ozThY)M(CH0mW*&2;%e1YbdM6!2y>lc6+