diff --git a/.adiorc.js b/.adiorc.js index 60b1d8a9..0a80e66e 100644 --- a/.adiorc.js +++ b/.adiorc.js @@ -1,11 +1,10 @@ - module.exports = { packages: [ - 'packages/*', - 'images/*', - 'launcher' + "packages/*", + "images/*", + "launcher", ], // * Setting the cwd ensure adio runs from root of project, ie. pre-commit checks run via 'git commit' in a packages/* directory cwd: __dirname, - ignoreDirs: ['node_modules'] -} + ignoreDirs: ["node_modules"], +}; diff --git a/.eslintignore b/.eslintignore deleted file mode 100644 index b141ec6f..00000000 --- a/.eslintignore +++ /dev/null @@ -1,4 +0,0 @@ -docs/ -packages/port-queue/lib/ -packages/adapter-beequeue/lib/ -packages/adapter-zmq/types/ \ No newline at end of file diff --git a/.eslintrc.js b/.eslintrc.js deleted file mode 100644 index 9ee1d6f3..00000000 --- a/.eslintrc.js +++ /dev/null @@ -1,32 +0,0 @@ - -module.exports = { - parser: '@typescript-eslint/parser', - parserOptions: { - ecmaVersion: 2018, - sourceType: 'module' - }, - extends: [ - 'standard' // Out of the box StandardJS rules - ], - plugins: [ - '@typescript-eslint' // Let's us override rules below. - ], - rules: { - // Prevent unused vars errors when variables are only used as TS types - // see: https://github.com/typescript-eslint/typescript-eslint/blob/master/packages/eslint-plugin/docs/rules/no-unused-vars.md#options - '@typescript-eslint/no-unused-vars': [ - 'error', - { - vars: 'all', - args: 'after-used', - ignoreRestSiblings: false - } - ], - 'no-unused-vars': 'off', - /** - * hyper rejects promises with a lot of !instanceof Error, - * so we disable this rule - */ - 'prefer-promise-reject-errors': 'off' - } -} diff --git a/.github/workflows/lint-pr.yml b/.github/workflows/lint-pr.yml deleted file mode 100644 index 6892d177..00000000 --- a/.github/workflows/lint-pr.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: lint repo -on: - pull_request: - branches: [ main ] - -jobs: - build: - runs-on: ubuntu-latest - strategy: - matrix: - node-version: [14.x] - steps: - - uses: actions/checkout@v2 - - name: Use NodeJS ${{ matrix.node-version }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - run: yarn - - run: yarn lint - env: - CI: true \ No newline at end of file diff --git a/.github/workflows/publish-adapter-beequeue.yml b/.github/workflows/publish-adapter-beequeue.yml deleted file mode 100644 index 82f4a477..00000000 --- a/.github/workflows/publish-adapter-beequeue.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: publish adapter beequeue -on: - push: - branches: - - main - paths: - - "packages/adapter-beequeue" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/adapter-beequeue && yarn - - run: cd packages/adapter-beequeue && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-adapter-couchdb.yml b/.github/workflows/publish-adapter-couchdb.yml deleted file mode 100644 index e297b478..00000000 --- a/.github/workflows/publish-adapter-couchdb.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Adapter Couchdb -on: - push: - branches: - - main - paths: - - "packages/adapter-couchdb/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/adapter-couchdb && yarn - - run: cd packages/adapter-couchdb && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-adapter-elasticsearch.yml b/.github/workflows/publish-adapter-elasticsearch.yml deleted file mode 100644 index 827cda0f..00000000 --- a/.github/workflows/publish-adapter-elasticsearch.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Adapter ElasticSearch -on: - push: - branches: - - main - paths: - - "packages/adapter-elasticsearch/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/adapter-elasticsearch && yarn - - run: cd packages/adapter-elasticsearch && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-adapter-fs.yml b/.github/workflows/publish-adapter-fs.yml deleted file mode 100644 index dd52115d..00000000 --- a/.github/workflows/publish-adapter-fs.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Adapter FS -on: - push: - branches: - - main - paths: - - "packages/adapter-fs/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/adapter-fs && yarn - - run: cd packages/adapter-fs && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-adapter-memory.yml b/.github/workflows/publish-adapter-memory.yml deleted file mode 100644 index 9a57e26a..00000000 --- a/.github/workflows/publish-adapter-memory.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Adapter Memory -on: - push: - branches: - - main - paths: - - "packages/adapter-memory/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/adapter-memory && yarn - - run: cd packages/adapter-memory && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-adapter-minio.yml b/.github/workflows/publish-adapter-minio.yml deleted file mode 100644 index 0a622c03..00000000 --- a/.github/workflows/publish-adapter-minio.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Adapter Minio -on: - push: - branches: - - main - paths: - - "packages/adapter-minio/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/adapter-minio && yarn - - run: cd packages/adapter-minio && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-adapter-minisearch.yml b/.github/workflows/publish-adapter-minisearch.yml deleted file mode 100644 index a643be4f..00000000 --- a/.github/workflows/publish-adapter-minisearch.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Adapter Minisearch -on: - push: - branches: - - main - paths: - - "packages/adapter-minisearch/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/adapter-minisearch && yarn - - run: cd packages/adapter-minisearch && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-adapter-pouchdb.yml b/.github/workflows/publish-adapter-pouchdb.yml deleted file mode 100644 index e6e62ef2..00000000 --- a/.github/workflows/publish-adapter-pouchdb.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Adapter Pouchdb -on: - push: - branches: - - main - paths: - - "packages/adapter-pouchdb/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/adapter-pouchdb && yarn - - run: cd packages/adapter-pouchdb && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-adapter-redis.yml b/.github/workflows/publish-adapter-redis.yml deleted file mode 100644 index c160042b..00000000 --- a/.github/workflows/publish-adapter-redis.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Adapter Redis -on: - push: - branches: - - main - paths: - - "packages/adapter-redis/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/adapter-redis && yarn - - run: cd packages/adapter-redis && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-adapter-zmq.yml b/.github/workflows/publish-adapter-zmq.yml deleted file mode 100644 index 1912c5db..00000000 --- a/.github/workflows/publish-adapter-zmq.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: publish adapter zmq -on: - push: - branches: - - main - paths: - - "packages/adapter-zmq" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/adapter-zmq && yarn - - run: cd packages/adapter-zmq && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-app-express.yml b/.github/workflows/publish-app-express.yml deleted file mode 100644 index 30c59c86..00000000 --- a/.github/workflows/publish-app-express.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish App Express -on: - push: - branches: - - main - paths: - - "packages/app-express/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/app-express && yarn - - run: cd packages/app-express && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-core.yml b/.github/workflows/publish-core.yml deleted file mode 100644 index a9acc698..00000000 --- a/.github/workflows/publish-core.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Core -on: - push: - branches: - - main - paths: - - "packages/core/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/core && yarn - - run: cd packages/core && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-image-test.yml b/.github/workflows/publish-image-test.yml deleted file mode 100644 index 208e0dd5..00000000 --- a/.github/workflows/publish-image-test.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Test Image -on: - push: - branches: - - main - paths: - - "images/test/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper.io" - - run: cd images/test && yarn - - run: cd images/test && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-nodejs-client.yml b/.github/workflows/publish-nodejs-client.yml deleted file mode 100644 index 0acdbf03..00000000 --- a/.github/workflows/publish-nodejs-client.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish NodeJS Client -on: - push: - branches: - - main - paths: - - "packages/nodejs-client/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/nodejs-client && yarn - - run: cd packages/nodejs-client && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-port-cache.yml b/.github/workflows/publish-port-cache.yml deleted file mode 100644 index 838a6e8a..00000000 --- a/.github/workflows/publish-port-cache.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Port Cache -on: - push: - branches: - - main - paths: - - "packages/port-cache/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/port-cache && yarn - - run: cd packages/port-cache && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-port-data.yml b/.github/workflows/publish-port-data.yml deleted file mode 100644 index c61f6127..00000000 --- a/.github/workflows/publish-port-data.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Port Data -on: - push: - branches: - - main - paths: - - "packages/port-data/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/port-data && yarn - - run: cd packages/port-data && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-port-queue.yml b/.github/workflows/publish-port-queue.yml deleted file mode 100644 index d19fab61..00000000 --- a/.github/workflows/publish-port-queue.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: publish port queue -on: - push: - branches: - - main - paths: - - "packages/port-queue/**" - -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: '14.x' - registry-url: 'https://registry.npmjs.org' - scope: '@hyper63' - - run: cd packages/port-queue && yarn - - run: cd packages/port-queue && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-port-search.yml b/.github/workflows/publish-port-search.yml deleted file mode 100644 index 53299c03..00000000 --- a/.github/workflows/publish-port-search.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Port Search -on: - push: - branches: - - main - paths: - - "packages/port-search/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/port-search && yarn - - run: cd packages/port-search && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/publish-port-storage.yml b/.github/workflows/publish-port-storage.yml deleted file mode 100644 index 284af62c..00000000 --- a/.github/workflows/publish-port-storage.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Publish Port Storage -on: - push: - branches: - - main - paths: - - "packages/port-storage/**" -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - with: - node-version: "14.x" - registry-url: "https://registry.npmjs.org" - scope: "@hyper63" - - run: cd packages/port-storage && yarn - - run: cd packages/port-storage && npm publish --access public - env: - NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} diff --git a/.github/workflows/test-adapter-beequeue.yml b/.github/workflows/test-adapter-beequeue.yml deleted file mode 100644 index 6ec508e5..00000000 --- a/.github/workflows/test-adapter-beequeue.yml +++ /dev/null @@ -1,27 +0,0 @@ -name: test bee-queue adapter -on: - push: - branches-ignore: main - paths: - - "packages/adapter-beequeue/**" -jobs: - build: - runs-on: ubuntu-latest - strategy: - matrix: - node-version: [14.x] - redis-version: [6] - steps: - - uses: actions/checkout@v2 - - name: Use NodeJS ${{ matrix.node-version }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - name: Start Redis - uses: supercharge/redis-github-action@1.2.0 - with: - redis-version: ${{ matrix.redis_version }} - - run: cd packages/adapter-beequeue && yarn - - run: cd packages/adapter-beequeue && yarn test - env: - CI: true diff --git a/.github/workflows/test-adapter-couchdb.yml b/.github/workflows/test-adapter-couchdb.yml index b8fba8a9..4f5623fa 100644 --- a/.github/workflows/test-adapter-couchdb.yml +++ b/.github/workflows/test-adapter-couchdb.yml @@ -9,15 +9,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [14.x] + deno-version: [1.11.x] steps: - uses: actions/checkout@v2 - - uses: actions/setup-node@v1 - name: Use NodeJS ${{ matrix.node-version }} + - name: Use Deno ${{ matrix.deno-version}} + uses: denolib/setup-deno@master with: - node-version: ${{ matrix.node-version }} - - run: cd packages/adapter-couchdb && yarn - - run: cd packages/adapter-couchdb && yarn test + deno-version: ${{ matrix.deno-version }} + - run: cd packages/adapter-couchdb && ./scripts/test.sh env: CI: true - diff --git a/.github/workflows/test-adapter-dndb.yml b/.github/workflows/test-adapter-dndb.yml new file mode 100644 index 00000000..3b4c38e9 --- /dev/null +++ b/.github/workflows/test-adapter-dndb.yml @@ -0,0 +1,21 @@ +name: test dndb adapter +on: + push: + branches-ignore: main + paths: + - "packages/adapter-dndb/**" +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + deno-version: [1.11.x] + steps: + - uses: actions/checkout@v2 + - name: Use Deno ${{ matrix.deno-version}} + uses: denolib/setup-deno@master + with: + deno-version: ${{ matrix.deno-version }} + - run: cd packages/adapter-dndb && ./scripts/test.sh + env: + CI: true diff --git a/.github/workflows/test-adapter-elasticsearch.yml b/.github/workflows/test-adapter-elasticsearch.yml index e19037f2..a1957a4c 100644 --- a/.github/workflows/test-adapter-elasticsearch.yml +++ b/.github/workflows/test-adapter-elasticsearch.yml @@ -9,14 +9,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [14.x] + deno-version: [1.11.x] steps: - uses: actions/checkout@v2 - - name: Use NodeJS ${{ matrix.node-version }} - uses: actions/setup-node@v1 + - name: Use Deno ${{ matrix.deno-version}} + uses: denolib/setup-deno@master with: - node-version: ${{ matrix.node-version }} - - run: cd packages/adapter-elasticsearch && yarn - - run: cd packages/adapter-elasticsearch && yarn test + deno-version: ${{ matrix.deno-version }} + - run: cd packages/adapter-elasticsearch && ./scripts/test.sh env: CI: true diff --git a/.github/workflows/test-adapter-redis.yml b/.github/workflows/test-adapter-redis.yml index c19fe626..3ae00124 100644 --- a/.github/workflows/test-adapter-redis.yml +++ b/.github/workflows/test-adapter-redis.yml @@ -9,14 +9,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [14.x] + deno-version: [1.11.x] steps: - uses: actions/checkout@v2 - - name: Use NodeJS ${{ matrix.node-version }} - uses: actions/setup-node@v1 + - name: Use Deno ${{ matrix.deno-version}} + uses: denolib/setup-deno@master with: - node-version: ${{ matrix.node-version }} - - run: cd packages/adapter-redis && yarn - - run: cd packages/adapter-redis && yarn test + deno-version: ${{ matrix.deno-version }} + - run: cd packages/adapter-redis && ./scripts/test.sh env: CI: true diff --git a/.github/workflows/test-adapter-zmq.yml b/.github/workflows/test-adapter-zmq.yml deleted file mode 100644 index 46e8addc..00000000 --- a/.github/workflows/test-adapter-zmq.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: test adapter zmq -on: - push: - branches-ignore: main - paths: - - "packages/adapter-zmq/**" -jobs: - build: - runs-on: ubuntu-latest - strategy: - matrix: - node-version: [14.x] - steps: - - uses: actions/checkout@v2 - - name: Use NodeJS ${{ matrix.node-version }} - uses: actions/setup-node@v1 - with: - node-version: ${{ matrix.node-version }} - - run: cd packages/adapter-zmq && yarn - - run: cd packages/adapter-zmq && yarn test - env: - CI: true diff --git a/.github/workflows/test-port-cache.yml b/.github/workflows/test-port-cache.yml new file mode 100644 index 00000000..661d1b3c --- /dev/null +++ b/.github/workflows/test-port-cache.yml @@ -0,0 +1,21 @@ +name: test cache port +on: + push: + branches-ignore: main + paths: + - "packages/port-cache/**" +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + deno-version: [1.11.x] + steps: + - uses: actions/checkout@v2 + - name: Use Deno ${{ matrix.deno-version }} + uses: denolib/setup-deno@master + with: + deno-version: ${{ matrix.deno-version }} + - run: cd packages/port-cache && ./scripts/test.sh + env: + CI: true diff --git a/.github/workflows/test-port-data.yml b/.github/workflows/test-port-data.yml new file mode 100644 index 00000000..fb520867 --- /dev/null +++ b/.github/workflows/test-port-data.yml @@ -0,0 +1,21 @@ +name: test data port +on: + push: + branches-ignore: main + paths: + - "packages/port-data/**" +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + deno-version: [1.11.x] + steps: + - uses: actions/checkout@v2 + - name: Use Deno ${{ matrix.deno-version }} + uses: denolib/setup-deno@master + with: + deno-version: ${{ matrix.deno-version }} + - run: cd packages/port-data && ./scripts/test.sh + env: + CI: true diff --git a/.github/workflows/test-port-queue.yml b/.github/workflows/test-port-queue.yml index 634f456f..c8b45bde 100644 --- a/.github/workflows/test-port-queue.yml +++ b/.github/workflows/test-port-queue.yml @@ -1,6 +1,6 @@ name: test queue port -on: - push: +on: + push: branches-ignore: main paths: - "packages/port-queue/**" @@ -9,14 +9,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [14.x] + deno-version: [1.11.x] steps: - uses: actions/checkout@v2 - - name: Use NodeJS ${{ matrix.node-version}} - uses: actions/setup-node@v1 + - name: Use Deno ${{ matrix.deno-version}} + uses: denolib/setup-deno@master with: - node-version: ${{ matrix.node-version }} - - run: cd packages/port-queue && yarn - - run: cd packages/port-queue && yarn test - env: + deno-version: ${{ matrix.deno-version }} + - run: cd packages/port-queue && ./scripts/test.sh + env: CI: true diff --git a/.github/workflows/test-port-search.yml b/.github/workflows/test-port-search.yml index 93efdf7b..ccace3d5 100644 --- a/.github/workflows/test-port-search.yml +++ b/.github/workflows/test-port-search.yml @@ -9,14 +9,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [14.x] + deno-version: [1.11.x] steps: - uses: actions/checkout@v2 - - name: Use NodeJS ${{ matrix.node-version }} - uses: actions/setup-node@v1 + - name: Use Deno ${{ matrix.deno-version }} + uses: denolib/setup-deno@master with: - node-version: ${{ matrix.node-version }} - - run: cd packages/port-search && yarn - - run: cd packages/port-search && yarn test + deno-version: ${{ matrix.deno-version }} + - run: cd packages/port-search && ./scripts/test.sh env: CI: true diff --git a/.github/workflows/test-port-storage.yml b/.github/workflows/test-port-storage.yml new file mode 100644 index 00000000..a5c347e3 --- /dev/null +++ b/.github/workflows/test-port-storage.yml @@ -0,0 +1,21 @@ +name: test storage port +on: + push: + branches-ignore: main + paths: + - "packages/port-storage/**" +jobs: + build: + runs-on: ubuntu-latest + strategy: + matrix: + deno-version: [1.11.x] + steps: + - uses: actions/checkout@v2 + - name: Use Deno ${{ matrix.deno-version }} + uses: denolib/setup-deno@master + with: + deno-version: ${{ matrix.deno-version }} + - run: cd packages/port-storage && ./scripts/test.sh + env: + CI: true diff --git a/.github/workflows/tests-adapter-fs.yml b/.github/workflows/tests-adapter-fs.yml index 41b29ed1..33b010e8 100644 --- a/.github/workflows/tests-adapter-fs.yml +++ b/.github/workflows/tests-adapter-fs.yml @@ -9,14 +9,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [14.x] + deno-version: [1.11.x] steps: - uses: actions/checkout@v2 - - name: Use NodeJS ${{ matrix.node-version }} - uses: actions/setup-node@v1 + - name: Use Deno ${{ matrix.deno-version}} + uses: denolib/setup-deno@master with: - node-version: ${{ matrix.node-version }} - - run: cd packages/adapter-fs && yarn - - run: cd packages/adapter-fs && yarn test + deno-version: ${{ matrix.deno-version }} + - run: cd packages/adapter-fs && ./scripts/test.sh env: CI: true diff --git a/.github/workflows/tests-adapter-memory.yml b/.github/workflows/tests-adapter-memory.yml index 0bb3be9b..96d61ab5 100644 --- a/.github/workflows/tests-adapter-memory.yml +++ b/.github/workflows/tests-adapter-memory.yml @@ -9,14 +9,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [14.x] + deno-version: [1.11.x] steps: - uses: actions/checkout@v2 - - name: Use NodeJS ${{ matrix.node-version }} - uses: actions/setup-node@v1 + - name: Use Deno ${{ matrix.deno-version}} + uses: denolib/setup-deno@master with: - node-version: ${{ matrix.node-version }} - - run: cd packages/adapter-memory && yarn - - run: cd packages/adapter-memory && yarn test + deno-version: ${{ matrix.deno-version }} + - run: cd packages/adapter-memory && ./scripts/test.sh env: CI: true diff --git a/.github/workflows/tests-adapter-minisearch.yml b/.github/workflows/tests-adapter-minisearch.yml index 45db0823..66c21cd6 100644 --- a/.github/workflows/tests-adapter-minisearch.yml +++ b/.github/workflows/tests-adapter-minisearch.yml @@ -9,14 +9,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [14.x] + deno-version: [1.11.x] steps: - uses: actions/checkout@v2 - - name: Use NodeJS ${{ matrix.node-version }} - uses: actions/setup-node@v1 + - name: Use Deno ${{ matrix.deno-version}} + uses: denolib/setup-deno@master with: - node-version: ${{ matrix.node-version }} - - run: cd packages/adapter-minisearch && yarn - - run: cd packages/adapter-minisearch && yarn test + deno-version: ${{ matrix.deno-version }} + - run: cd packages/adapter-minisearch && ./scripts/test.sh env: CI: true diff --git a/.github/workflows/tests-core.yml b/.github/workflows/tests-core.yml index 8c644b44..0585611e 100644 --- a/.github/workflows/tests-core.yml +++ b/.github/workflows/tests-core.yml @@ -9,14 +9,13 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node-version: [14.x] + deno: [1.11.x] steps: - uses: actions/checkout@v2 - - name: Use NodeJS ${{ matrix.node-version }} - uses: actions/setup-node@v1 + - name: Use Deno ${{ matrix.deno }} + uses: denolib/setup-deno@master with: - node-version: ${{ matrix.node-version }} - - run: cd packages/core && yarn - - run: cd packages/core && yarn test + deno-version: ${{ matrix.deno }} + - run: cd packages/core && ./scripts/test.sh env: CI: true diff --git a/.gitpod.Dockerfile b/.gitpod.Dockerfile index c0864ef6..c44280fb 100644 --- a/.gitpod.Dockerfile +++ b/.gitpod.Dockerfile @@ -10,5 +10,13 @@ USER gitpod # sudo rm -rf /var/lib/apt/lists/* # # More information: https://www.gitpod.io/docs/config-docker/ +# install deno +RUN curl -fsSL https://deno.land/x/install/install.sh | sh +RUN /home/gitpod/.deno/bin/deno completions bash > /home/gitpod/.bashrc.d/90-deno && echo 'export DENO_INSTALL="/home/gitpod/.deno"' >> /home/gitpod/.bashrc.d/90-deno && echo 'export PATH="$DENO_INSTALL/bin:$PATH"' >> /home/gitpod/.bashrc.d/90-deno + # install redis -RUN sudo apt-get update && sudo apt-get install -y redis-server && sudo rm -rf /var/lib/apt/lists/* \ No newline at end of file +RUN sudo apt-get update && sudo apt-get install -y redis-server && sudo rm -rf /var/lib/apt/lists/* + +# install vim plug +RUN curl -fLo ~/.vim/autoload/plug.vim --create-dirs \ + https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim diff --git a/.gitpod.yml b/.gitpod.yml index 970eca60..2426adca 100644 --- a/.gitpod.yml +++ b/.gitpod.yml @@ -10,3 +10,20 @@ ports: tasks: - init: echo 'init script' # runs during prebuild command: echo 'start script' + +github: + prebuilds: + # enable for the default branch (defaults to true) + master: true + # enable for all branches in this repo (defaults to false) + branches: true + # enable for pull requests coming from this repo (defaults to true) + pullRequests: true + # enable for pull requests coming from forks (defaults to false) + pullRequestsFromForks: true + # add a check to pull requests (defaults to true) + addCheck: true + # add a "Review in Gitpod" button as a comment to pull requests (defaults to false) + addComment: true + # add a "Review in Gitpod" button to the pull request's description (defaults to false) + addBadge: false diff --git a/.husky/pre-commit b/.husky/pre-commit index cdd9fefb..36af2198 100755 --- a/.husky/pre-commit +++ b/.husky/pre-commit @@ -1,4 +1,4 @@ #!/bin/sh . "$(dirname "$0")/_/husky.sh" -npx adio && npx lint-staged +npx lint-staged diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..e40716fd --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,5 @@ +{ + "deno.enable": true, + "deno.lint": true, + "deno.unstable": true +} diff --git a/README.md b/README.md index ad2afe7c..882b7af4 100644 --- a/README.md +++ b/README.md @@ -27,9 +27,13 @@ πŸ‘‹ Hey πŸ‘‹ -Welcome to ⚑hyper63 open source project, above is plenty of links that can give you the why and what of hyper63, check them out! Also, if you are a hands on kind of developer, try out hyper63 using our playground and gitpod. You don't have to install anything to kick the tires βš™οΈ. +Welcome to ⚑hyper63 open source project, above is plenty of links that can give +you the why and what of hyper63, check them out! Also, if you are a hands on +kind of developer, try out hyper63 using our playground and gitpod. You don't +have to install anything to kick the tires βš™οΈ. ## Status + - [x] Development - [ ] Alpha - [ ] Beta @@ -41,7 +45,7 @@ You can take a 🎫 tour of the api starting here https://github.com/hyper63/tou or watch a video here https://youtu.be/J75hYi6Gqgc -## Running Locally +## Running Locally To run hyper63 on your local machine in your terminal type: @@ -51,7 +55,8 @@ To run hyper63 on your local machine in your terminal type: npx @hyper63/x ``` -> This command will run a hyper63 service on PORT `6363` and store data in `${HOME}/.hyper63` > [Ctrl/Cmd] - C will stop the service. +> This command will run a hyper63 service on PORT `6363` and store data in +> `${HOME}/.hyper63` > [Ctrl/Cmd] - C will stop the service. This `nano` version of hyper63 implements the following ports and adapters: @@ -78,9 +83,7 @@ yarn dev ## Thank you -* OpenSource Community -* CharlestonJS Community -* JRS Coding School Team -* And everyone else that has helped this project become successful! - - +- OpenSource Community +- CharlestonJS Community +- JRS Coding School Team +- And everyone else that has helped this project become successful! diff --git a/commitlint.config.js b/commitlint.config.js index 75c26962..69b4242c 100644 --- a/commitlint.config.js +++ b/commitlint.config.js @@ -1,4 +1,3 @@ - module.exports = { - extends: ['@commitlint/config-conventional'] -} + extends: ["@commitlint/config-conventional"], +}; diff --git a/design.md b/design.md index 12f4cdcf..ce832055 100644 --- a/design.md +++ b/design.md @@ -2,23 +2,28 @@ A service gateway for creating future proof applications. -hyper63 is a service gateway that encourages separation between -common services and business logic of your application. This separation is -a function of a clean architecture leveraging the ports and adapters pattern. -As products grow in complexity over time with strong pressure to ship features -in a time sensitive way, the likely hood that business rules get spread between -architectural layers is extremely likely. +hyper63 is a service gateway that encourages separation between common services +and business logic of your application. This separation is a function of a clean +architecture leveraging the ports and adapters pattern. As products grow in +complexity over time with strong pressure to ship features in a time sensitive +way, the likely hood that business rules get spread between architectural layers +is extremely likely. -> hyper63's goal is to encourage business rules to settle in a core area leveraging solid principles to keep the business logic highly maintainable as the product grows over time. +> hyper63's goal is to encourage business rules to settle in a core area +> leveraging solid principles to keep the business logic highly maintainable as +> the product grows over time. - interface/api - business rules - services What hyper63 wants to do is to generalize the services your application may need -so that you can keep your business rules cleanly separated as well as leveraging the ports -and adapter design so that your backend services can be replacable without having to modify -business rules. hyper63 is a docker container that gives you data, cache, storage, search and webhooks out of the box, without having to make any decisions, you simply `docker-compose up` and you have your backend end service up and running! +so that you can keep your business rules cleanly separated as well as leveraging +the ports and adapter design so that your backend services can be replacable +without having to modify business rules. hyper63 is a docker container that +gives you data, cache, storage, search and webhooks out of the box, without +having to make any decisions, you simply `docker-compose up` and you have your +backend end service up and running! [Inception Deck](inception.md) @@ -44,7 +49,8 @@ Currently, the services for micro are All apis use this basic pattern: -> designing the api to support multiple services currenlty only supporting data, cache, storage, search, hooks. +> designing the api to support multiple services currenlty only supporting data, +> cache, storage, search, hooks. ``` /:service/:name @@ -64,7 +70,8 @@ GET /cache/products All commands will start with an underscore -> Commands are built in urls that instruct the system to perform an action usually with a POST, PUT or DELETE method. +> Commands are built in urls that instruct the system to perform an action +> usually with a POST, PUT or DELETE method. queries a customer data store @@ -316,7 +323,6 @@ buckets.*.delete - when any deletion occurs for a file So the scope pattern would :service.:name.:action - asterisk equals all and actions are (read,write,delete) - ``` list hooks diff --git a/developers-guide.md b/developers-guide.md index e05667fd..5d813755 100644 --- a/developers-guide.md +++ b/developers-guide.md @@ -2,9 +2,8 @@ ## Requirements -* git -* node - https://nodejs.org - +- git +- node - https://nodejs.org ## Setup diff --git a/docs/README.md b/docs/README.md index 8f522696..13e7a9d8 100644 --- a/docs/README.md +++ b/docs/README.md @@ -1,11 +1,11 @@ # sapper-template -The default template for setting up a [Sapper](https://github.com/sveltejs/sapper) project. Can use either Rollup or webpack as bundler. - +The default template for setting up a +[Sapper](https://github.com/sveltejs/sapper) project. Can use either Rollup or +webpack as bundler. ## Getting started - ### Using `degit` To create a new Sapper project based on Rollup locally, run @@ -20,21 +20,26 @@ For a webpack-based project, instead run npx degit "sveltejs/sapper-template#webpack" my-app ``` -[`degit`](https://github.com/Rich-Harris/degit) is a scaffolding tool that lets you create a directory from a branch in a repository. +[`degit`](https://github.com/Rich-Harris/degit) is a scaffolding tool that lets +you create a directory from a branch in a repository. Replace `my-app` with the path where you wish to create the project. - ### Using GitHub templates -Alternatively, you can create the new project as a GitHub repository using GitHub's template feature. - -Go to either [sapper-template-rollup](https://github.com/sveltejs/sapper-template-rollup) or [sapper-template-webpack](https://github.com/sveltejs/sapper-template-webpack) and click on "Use this template" to create a new project repository initialized by the template. +Alternatively, you can create the new project as a GitHub repository using +GitHub's template feature. +Go to either +[sapper-template-rollup](https://github.com/sveltejs/sapper-template-rollup) or +[sapper-template-webpack](https://github.com/sveltejs/sapper-template-webpack) +and click on "Use this template" to create a new project repository initialized +by the template. ### Running the project -Once you have created the project, install dependencies and run the project in development mode: +Once you have created the project, install dependencies and run the project in +development mode: ```bash cd my-app @@ -42,111 +47,159 @@ npm install # or yarn npm run dev ``` -This will start the development server on [localhost:3000](http://localhost:3000). Open it and click around. +This will start the development server on +[localhost:3000](http://localhost:3000). Open it and click around. -You now have a fully functional Sapper project! To get started developing, consult [sapper.svelte.dev](https://sapper.svelte.dev). +You now have a fully functional Sapper project! To get started developing, +consult [sapper.svelte.dev](https://sapper.svelte.dev). ### Using TypeScript -By default, the template uses plain JavaScript. If you wish to use TypeScript instead, you need some changes to the project: +By default, the template uses plain JavaScript. If you wish to use TypeScript +instead, you need some changes to the project: - * Add `typescript` as well as typings as dependences in `package.json` - * Configure the bundler to use [`svelte-preprocess`](https://github.com/sveltejs/svelte-preprocess) and transpile the TypeScript code. - * Add a `tsconfig.json` file - * Update the project code to TypeScript +- Add `typescript` as well as typings as dependences in `package.json` +- Configure the bundler to use + [`svelte-preprocess`](https://github.com/sveltejs/svelte-preprocess) and + transpile the TypeScript code. +- Add a `tsconfig.json` file +- Update the project code to TypeScript -The template comes with a script that will perform these changes for you by running +The template comes with a script that will perform these changes for you by +running ```bash node scripts/setupTypeScript.js ``` -`@sapper` dependencies are resolved through `src/node_modules/@sapper`, which is created during the build. You therefore need to run or build the project once to avoid warnings about missing dependencies. +`@sapper` dependencies are resolved through `src/node_modules/@sapper`, which is +created during the build. You therefore need to run or build the project once to +avoid warnings about missing dependencies. The script does not support webpack at the moment. ## Directory structure -Sapper expects to find two directories in the root of your project β€” `src` and `static`. - +Sapper expects to find two directories in the root of your project β€” `src` and +`static`. ### src -The [src](src) directory contains the entry points for your app β€” `client.js`, `server.js` and (optionally) a `service-worker.js` β€” along with a `template.html` file and a `routes` directory. - +The [src](src) directory contains the entry points for your app β€” `client.js`, +`server.js` and (optionally) a `service-worker.js` β€” along with a +`template.html` file and a `routes` directory. #### src/routes -This is the heart of your Sapper app. There are two kinds of routes β€” *pages*, and *server routes*. +This is the heart of your Sapper app. There are two kinds of routes β€” _pages_, +and _server routes_. -**Pages** are Svelte components written in `.svelte` files. When a user first visits the application, they will be served a server-rendered version of the route in question, plus some JavaScript that 'hydrates' the page and initialises a client-side router. From that point forward, navigating to other pages is handled entirely on the client for a fast, app-like feel. (Sapper will preload and cache the code for these subsequent pages, so that navigation is instantaneous.) +**Pages** are Svelte components written in `.svelte` files. When a user first +visits the application, they will be served a server-rendered version of the +route in question, plus some JavaScript that 'hydrates' the page and initialises +a client-side router. From that point forward, navigating to other pages is +handled entirely on the client for a fast, app-like feel. (Sapper will preload +and cache the code for these subsequent pages, so that navigation is +instantaneous.) -**Server routes** are modules written in `.js` files, that export functions corresponding to HTTP methods. Each function receives Express `request` and `response` objects as arguments, plus a `next` function. This is useful for creating a JSON API, for example. +**Server routes** are modules written in `.js` files, that export functions +corresponding to HTTP methods. Each function receives Express `request` and +`response` objects as arguments, plus a `next` function. This is useful for +creating a JSON API, for example. There are three simple rules for naming the files that define your routes: -* A file called `src/routes/about.svelte` corresponds to the `/about` route. A file called `src/routes/blog/[slug].svelte` corresponds to the `/blog/:slug` route, in which case `params.slug` is available to the route -* The file `src/routes/index.svelte` (or `src/routes/index.js`) corresponds to the root of your app. `src/routes/about/index.svelte` is treated the same as `src/routes/about.svelte`. -* Files and directories with a leading underscore do *not* create routes. This allows you to colocate helper modules and components with the routes that depend on them β€” for example you could have a file called `src/routes/_helpers/datetime.js` and it would *not* create a `/_helpers/datetime` route. - +- A file called `src/routes/about.svelte` corresponds to the `/about` route. A + file called `src/routes/blog/[slug].svelte` corresponds to the `/blog/:slug` + route, in which case `params.slug` is available to the route +- The file `src/routes/index.svelte` (or `src/routes/index.js`) corresponds to + the root of your app. `src/routes/about/index.svelte` is treated the same as + `src/routes/about.svelte`. +- Files and directories with a leading underscore do _not_ create routes. This + allows you to colocate helper modules and components with the routes that + depend on them β€” for example you could have a file called + `src/routes/_helpers/datetime.js` and it would _not_ create a + `/_helpers/datetime` route. #### src/node_modules/images -Images added to `src/node_modules/images` can be imported into your code using `import 'images/'`. They will be given a dynamically generated filename containing a hash, allowing for efficient caching and serving the images on a CDN. +Images added to `src/node_modules/images` can be imported into your code using +`import 'images/'`. They will be given a dynamically generated +filename containing a hash, allowing for efficient caching and serving the +images on a CDN. See [`index.svelte`](src/routes/index.svelte) for an example. - #### src/node_modules/@sapper -This directory is managed by Sapper and generated when building. It contains all the code you import from `@sapper` modules. - +This directory is managed by Sapper and generated when building. It contains all +the code you import from `@sapper` modules. ### static -The [static](static) directory contains static assets that should be served publicly. Files in this directory will be available directly under the root URL, e.g. an `image.jpg` will be available as `/image.jpg`. +The [static](static) directory contains static assets that should be served +publicly. Files in this directory will be available directly under the root URL, +e.g. an `image.jpg` will be available as `/image.jpg`. -The default [service-worker.js](src/service-worker.js) will preload and cache these files, by retrieving a list of `files` from the generated manifest: +The default [service-worker.js](src/service-worker.js) will preload and cache +these files, by retrieving a list of `files` from the generated manifest: ```js -import { files } from '@sapper/service-worker'; +import { files } from "@sapper/service-worker"; ``` -If you have static files you do not want to cache, you should exclude them from this list after importing it (and before passing it to `cache.addAll`). +If you have static files you do not want to cache, you should exclude them from +this list after importing it (and before passing it to `cache.addAll`). Static files are served using [sirv](https://github.com/lukeed/sirv). - ## Bundler configuration -Sapper uses Rollup or webpack to provide code-splitting and dynamic imports, as well as compiling your Svelte components. With webpack, it also provides hot module reloading. As long as you don't do anything daft, you can edit the configuration files to add whatever plugins you'd like. - +Sapper uses Rollup or webpack to provide code-splitting and dynamic imports, as +well as compiling your Svelte components. With webpack, it also provides hot +module reloading. As long as you don't do anything daft, you can edit the +configuration files to add whatever plugins you'd like. ## Production mode and deployment -To start a production version of your app, run `npm run build && npm start`. This will disable live reloading, and activate the appropriate bundler plugins. +To start a production version of your app, run `npm run build && npm start`. +This will disable live reloading, and activate the appropriate bundler plugins. -You can deploy your application to any environment that supports Node 10 or above. As an example, to deploy to [Vercel Now](https://vercel.com) when using `sapper export`, run these commands: +You can deploy your application to any environment that supports Node 10 or +above. As an example, to deploy to [Vercel Now](https://vercel.com) when using +`sapper export`, run these commands: ```bash npm install -g vercel vercel ``` -If your app can't be exported to a static site, you can use the [now-sapper](https://github.com/thgh/now-sapper) builder. You can find instructions on how to do so in its [README](https://github.com/thgh/now-sapper#basic-usage). - +If your app can't be exported to a static site, you can use the +[now-sapper](https://github.com/thgh/now-sapper) builder. You can find +instructions on how to do so in its +[README](https://github.com/thgh/now-sapper#basic-usage). ## Using external components -When using Svelte components installed from npm, such as [@sveltejs/svelte-virtual-list](https://github.com/sveltejs/svelte-virtual-list), Svelte needs the original component source (rather than any precompiled JavaScript that ships with the component). This allows the component to be rendered server-side, and also keeps your client-side app smaller. +When using Svelte components installed from npm, such as +[@sveltejs/svelte-virtual-list](https://github.com/sveltejs/svelte-virtual-list), +Svelte needs the original component source (rather than any precompiled +JavaScript that ships with the component). This allows the component to be +rendered server-side, and also keeps your client-side app smaller. -Because of that, it's essential that the bundler doesn't treat the package as an *external dependency*. You can either modify the `external` option under `server` in [rollup.config.js](rollup.config.js) or the `externals` option in [webpack.config.js](webpack.config.js), or simply install the package to `devDependencies` rather than `dependencies`, which will cause it to get bundled (and therefore compiled) with your app: +Because of that, it's essential that the bundler doesn't treat the package as an +_external dependency_. You can either modify the `external` option under +`server` in [rollup.config.js](rollup.config.js) or the `externals` option in +[webpack.config.js](webpack.config.js), or simply install the package to +`devDependencies` rather than `dependencies`, which will cause it to get bundled +(and therefore compiled) with your app: ```bash npm install -D @sveltejs/svelte-virtual-list ``` - ## Bugs and feedback -Sapper is in early development, and may have the odd rough edge here and there. Please be vocal over on the [Sapper issue tracker](https://github.com/sveltejs/sapper/issues). +Sapper is in early development, and may have the odd rough edge here and there. +Please be vocal over on the +[Sapper issue tracker](https://github.com/sveltejs/sapper/issues). diff --git a/docs/rollup.config.js b/docs/rollup.config.js index 26b5e65e..83f4d4ee 100644 --- a/docs/rollup.config.js +++ b/docs/rollup.config.js @@ -1,107 +1,109 @@ -import path from 'path'; -import resolve from '@rollup/plugin-node-resolve'; -import replace from '@rollup/plugin-replace'; -import commonjs from '@rollup/plugin-commonjs'; -import url from '@rollup/plugin-url'; -import svelte from 'rollup-plugin-svelte'; -import babel from '@rollup/plugin-babel'; -import { terser } from 'rollup-plugin-terser'; -import config from 'sapper/config/rollup.js'; -import pkg from './package.json'; -import { mdsvex } from 'mdsvex'; +import path from "path"; +import resolve from "@rollup/plugin-node-resolve"; +import replace from "@rollup/plugin-replace"; +import commonjs from "@rollup/plugin-commonjs"; +import url from "@rollup/plugin-url"; +import svelte from "rollup-plugin-svelte"; +import babel from "@rollup/plugin-babel"; +import { terser } from "rollup-plugin-terser"; +import config from "sapper/config/rollup.js"; +import pkg from "./package.json"; +import { mdsvex } from "mdsvex"; const mode = process.env.NODE_ENV; -const dev = mode === 'development'; +const dev = mode === "development"; const legacy = !!process.env.SAPPER_LEGACY_BUILD; const onwarn = (warning, onwarn) => - (warning.code === 'MISSING_EXPORT' && /'preload'/.test(warning.message)) || - (warning.code === 'CIRCULAR_DEPENDENCY' && /[/\\]@sapper[/\\]/.test(warning.message)) || - onwarn(warning); + (warning.code === "MISSING_EXPORT" && /'preload'/.test(warning.message)) || + (warning.code === "CIRCULAR_DEPENDENCY" && + /[/\\]@sapper[/\\]/.test(warning.message)) || + onwarn(warning); -const extensions = ['.svelte', '.svx'] +const extensions = [".svelte", ".svx"]; export default { - client: { - input: config.client.input(), - output: config.client.output(), - plugins: [ - replace({ - 'process.browser': true, - 'process.env.NODE_ENV': JSON.stringify(mode) - }), - svelte({ - extensions, - preprocess: mdsvex(), - dev, - hydratable: true, - emitCss: true - }), - url({ - sourceDir: path.resolve(__dirname, 'src/node_modules/images'), - publicPath: '/client/' - }), - resolve({ - browser: true, - dedupe: ['svelte'] - }), - commonjs(), + client: { + input: config.client.input(), + output: config.client.output(), + plugins: [ + replace({ + "process.browser": true, + "process.env.NODE_ENV": JSON.stringify(mode), + }), + svelte({ + extensions, + preprocess: mdsvex(), + dev, + hydratable: true, + emitCss: true, + }), + url({ + sourceDir: path.resolve(__dirname, "src/node_modules/images"), + publicPath: "/client/", + }), + resolve({ + browser: true, + dedupe: ["svelte"], + }), + commonjs(), - legacy && babel({ - extensions: ['.js', '.mjs', '.html', '.svelte'], - babelHelpers: 'runtime', - exclude: ['node_modules/@babel/**'], - presets: [ - ['@babel/preset-env', { - targets: '> 0.25%, not dead' - }] - ], - plugins: [ - '@babel/plugin-syntax-dynamic-import', - ['@babel/plugin-transform-runtime', { - useESModules: true - }] - ] - }), + legacy && babel({ + extensions: [".js", ".mjs", ".html", ".svelte"], + babelHelpers: "runtime", + exclude: ["node_modules/@babel/**"], + presets: [ + ["@babel/preset-env", { + targets: "> 0.25%, not dead", + }], + ], + plugins: [ + "@babel/plugin-syntax-dynamic-import", + ["@babel/plugin-transform-runtime", { + useESModules: true, + }], + ], + }), - !dev && terser({ - module: true - }) - ], + !dev && terser({ + module: true, + }), + ], - preserveEntrySignatures: false, - onwarn, - }, + preserveEntrySignatures: false, + onwarn, + }, - server: { - input: config.server.input(), - output: config.server.output(), - plugins: [ - replace({ - 'process.browser': false, - 'process.env.NODE_ENV': JSON.stringify(mode) - }), - svelte({ - extensions, - preprocess: mdsvex(), - generate: 'ssr', - hydratable: true, - dev - }), - url({ - sourceDir: path.resolve(__dirname, 'src/node_modules/images'), - publicPath: '/client/', - emitFiles: false // already emitted by client build - }), - resolve({ - dedupe: ['svelte'] - }), - commonjs() - ], - external: Object.keys(pkg.dependencies).concat(require('module').builtinModules), - - preserveEntrySignatures: 'strict', - onwarn, - }, + server: { + input: config.server.input(), + output: config.server.output(), + plugins: [ + replace({ + "process.browser": false, + "process.env.NODE_ENV": JSON.stringify(mode), + }), + svelte({ + extensions, + preprocess: mdsvex(), + generate: "ssr", + hydratable: true, + dev, + }), + url({ + sourceDir: path.resolve(__dirname, "src/node_modules/images"), + publicPath: "/client/", + emitFiles: false, // already emitted by client build + }), + resolve({ + dedupe: ["svelte"], + }), + commonjs(), + ], + external: Object.keys(pkg.dependencies).concat( + require("module").builtinModules, + ), + preserveEntrySignatures: "strict", + onwarn, + }, }; diff --git a/docs/scripts/setupTypeScript.js b/docs/scripts/setupTypeScript.js index 1e714d74..6d6b830b 100644 --- a/docs/scripts/setupTypeScript.js +++ b/docs/scripts/setupTypeScript.js @@ -5,234 +5,280 @@ */ // @ts-check -const fs = require('fs'); -const path = require('path'); -const { argv } = require('process'); +const fs = require("fs"); +const path = require("path"); +const { argv } = require("process"); -const projectRoot = argv[2] || path.join(__dirname, '..'); +const projectRoot = argv[2] || path.join(__dirname, ".."); const isRollup = fs.existsSync(path.join(projectRoot, "rollup.config.js")); function warn(message) { - console.warn('Warning: ' + message); + console.warn("Warning: " + message); } function replaceInFile(fileName, replacements) { - if (fs.existsSync(fileName)) { - let contents = fs.readFileSync(fileName, 'utf8'); - let hadUpdates = false; - - replacements.forEach(([from, to]) => { - const newContents = contents.replace(from, to); - - const isAlreadyApplied = typeof to !== 'string' || contents.includes(to); - - if (newContents !== contents) { - contents = newContents; - hadUpdates = true; - } else if (!isAlreadyApplied) { - warn(`Wanted to update "${from}" in ${fileName}, but did not find it.`); - } - }); - - if (hadUpdates) { - fs.writeFileSync(fileName, contents); - } else { - console.log(`${fileName} had already been updated.`); - } - } else { - warn(`Wanted to update ${fileName} but the file did not exist.`); - } + if (fs.existsSync(fileName)) { + let contents = fs.readFileSync(fileName, "utf8"); + let hadUpdates = false; + + replacements.forEach(([from, to]) => { + const newContents = contents.replace(from, to); + + const isAlreadyApplied = typeof to !== "string" || contents.includes(to); + + if (newContents !== contents) { + contents = newContents; + hadUpdates = true; + } else if (!isAlreadyApplied) { + warn(`Wanted to update "${from}" in ${fileName}, but did not find it.`); + } + }); + + if (hadUpdates) { + fs.writeFileSync(fileName, contents); + } else { + console.log(`${fileName} had already been updated.`); + } + } else { + warn(`Wanted to update ${fileName} but the file did not exist.`); + } } function createFile(fileName, contents) { - if (fs.existsSync(fileName)) { - warn(`Wanted to create ${fileName}, but it already existed. Leaving existing file.`); - } else { - fs.writeFileSync(fileName, contents); - } + if (fs.existsSync(fileName)) { + warn( + `Wanted to create ${fileName}, but it already existed. Leaving existing file.`, + ); + } else { + fs.writeFileSync(fileName, contents); + } } function addDepsToPackageJson() { - const pkgJSONPath = path.join(projectRoot, 'package.json'); - const packageJSON = JSON.parse(fs.readFileSync(pkgJSONPath, 'utf8')); - packageJSON.devDependencies = Object.assign(packageJSON.devDependencies, { - ...(isRollup ? { '@rollup/plugin-typescript': '^6.0.0' } : { 'ts-loader': '^8.0.4' }), - '@tsconfig/svelte': '^1.0.10', - '@types/compression': '^1.7.0', - '@types/node': '^14.11.1', - '@types/polka': '^0.5.1', - 'svelte-check': '^1.0.46', - 'svelte-preprocess': '^4.3.0', - tslib: '^2.0.1', - typescript: '^4.0.3' - }); - - // Add script for checking - packageJSON.scripts = Object.assign(packageJSON.scripts, { - validate: 'svelte-check --ignore src/node_modules/@sapper' - }); - - // Write the package JSON - fs.writeFileSync(pkgJSONPath, JSON.stringify(packageJSON, null, ' ')); + const pkgJSONPath = path.join(projectRoot, "package.json"); + const packageJSON = JSON.parse(fs.readFileSync(pkgJSONPath, "utf8")); + packageJSON.devDependencies = Object.assign(packageJSON.devDependencies, { + ...(isRollup + ? { "@rollup/plugin-typescript": "^6.0.0" } + : { "ts-loader": "^8.0.4" }), + "@tsconfig/svelte": "^1.0.10", + "@types/compression": "^1.7.0", + "@types/node": "^14.11.1", + "@types/polka": "^0.5.1", + "svelte-check": "^1.0.46", + "svelte-preprocess": "^4.3.0", + tslib: "^2.0.1", + typescript: "^4.0.3", + }); + + // Add script for checking + packageJSON.scripts = Object.assign(packageJSON.scripts, { + validate: "svelte-check --ignore src/node_modules/@sapper", + }); + + // Write the package JSON + fs.writeFileSync(pkgJSONPath, JSON.stringify(packageJSON, null, " ")); } function changeJsExtensionToTs(dir) { - const elements = fs.readdirSync(dir, { withFileTypes: true }); - - for (let i = 0; i < elements.length; i++) { - if (elements[i].isDirectory()) { - changeJsExtensionToTs(path.join(dir, elements[i].name)); - } else if (elements[i].name.match(/^[^_]((?!json).)*js$/)) { - fs.renameSync(path.join(dir, elements[i].name), path.join(dir, elements[i].name).replace('.js', '.ts')); - } - } + const elements = fs.readdirSync(dir, { withFileTypes: true }); + + for (let i = 0; i < elements.length; i++) { + if (elements[i].isDirectory()) { + changeJsExtensionToTs(path.join(dir, elements[i].name)); + } else if (elements[i].name.match(/^[^_]((?!json).)*js$/)) { + fs.renameSync( + path.join(dir, elements[i].name), + path.join(dir, elements[i].name).replace(".js", ".ts"), + ); + } + } } function updateSingleSvelteFile({ view, vars, contextModule }) { - replaceInFile(path.join(projectRoot, 'src', `${view}.svelte`), [ - [/(?:/gm, (m, attrs) => ``], - ...(vars ? vars.map(({ name, type }) => [`export let ${name};`, `export let ${name}: ${type};`]) : []), - ...(contextModule ? contextModule.map(({ js, ts }) => [js, ts]) : []) - ]); + replaceInFile(path.join(projectRoot, "src", `${view}.svelte`), [ + [ + /(?:/gm, + (m, attrs) => + ``, + ], + ...(vars + ? vars.map(( + { name, type }, + ) => [`export let ${name};`, `export let ${name}: ${type};`]) + : []), + ...(contextModule ? contextModule.map(({ js, ts }) => [js, ts]) : []), + ]); } // Switch the *.svelte file to use TS function updateSvelteFiles() { - [ - { - view: 'components/Nav', - vars: [{ name: 'segment', type: 'string' }] - }, - { - view: 'routes/_layout', - vars: [{ name: 'segment', type: 'string' }] - }, - { - view: 'routes/_error', - vars: [ - { name: 'status', type: 'number' }, - { name: 'error', type: 'Error' } - ] - }, - { - view: 'routes/blog/index', - vars: [{ name: 'posts', type: '{ slug: string; title: string, html: any }[]' }], - contextModule: [ - { - js: '.then(r => r.json())', - ts: '.then((r: { json: () => any; }) => r.json())' - }, - { - js: '.then(posts => {', - ts: '.then((posts: { slug: string; title: string, html: any }[]) => {' - } - ] - }, - { - view: 'routes/blog/[slug]', - vars: [{ name: 'post', type: '{ slug: string; title: string, html: any }' }] - } - ].forEach(updateSingleSvelteFile); + [ + { + view: "components/Nav", + vars: [{ name: "segment", type: "string" }], + }, + { + view: "routes/_layout", + vars: [{ name: "segment", type: "string" }], + }, + { + view: "routes/_error", + vars: [ + { name: "status", type: "number" }, + { name: "error", type: "Error" }, + ], + }, + { + view: "routes/blog/index", + vars: [{ + name: "posts", + type: "{ slug: string; title: string, html: any }[]", + }], + contextModule: [ + { + js: ".then(r => r.json())", + ts: ".then((r: { json: () => any; }) => r.json())", + }, + { + js: ".then(posts => {", + ts: + ".then((posts: { slug: string; title: string, html: any }[]) => {", + }, + ], + }, + { + view: "routes/blog/[slug]", + vars: [{ + name: "post", + type: "{ slug: string; title: string, html: any }", + }], + }, + ].forEach(updateSingleSvelteFile); } function updateRollupConfig() { - // Edit rollup config - replaceInFile(path.join(projectRoot, 'rollup.config.js'), [ - // Edit imports - [ - /'rollup-plugin-terser';\n(?!import sveltePreprocess)/, - `'rollup-plugin-terser'; + // Edit rollup config + replaceInFile(path.join(projectRoot, "rollup.config.js"), [ + // Edit imports + [ + /'rollup-plugin-terser';\n(?!import sveltePreprocess)/, + `'rollup-plugin-terser'; import sveltePreprocess from 'svelte-preprocess'; import typescript from '@rollup/plugin-typescript'; -` - ], - // Edit inputs - [ - /(?`, `self.addEventListener('activate', (event: ExtendableEvent) =>`], - [`self.addEventListener('install', event =>`, `self.addEventListener('install', (event: ExtendableEvent) =>`], - [`addEventListener('fetch', event =>`, `addEventListener('fetch', (event: FetchEvent) =>`], - ]); + replaceInFile(path.join(projectRoot, "src", "service-worker.ts"), [ + [`shell.concat(files);`, `(shell as string[]).concat(files as string[]);`], + [ + `self.skipWaiting();`, + `((self as any) as ServiceWorkerGlobalScope).skipWaiting();`, + ], + [ + `self.clients.claim();`, + `((self as any) as ServiceWorkerGlobalScope).clients.claim();`, + ], + [`fetchAndCache(request)`, `fetchAndCache(request: Request)`], + [ + `self.addEventListener('activate', event =>`, + `self.addEventListener('activate', (event: ExtendableEvent) =>`, + ], + [ + `self.addEventListener('install', event =>`, + `self.addEventListener('install', (event: ExtendableEvent) =>`, + ], + [ + `addEventListener('fetch', event =>`, + `addEventListener('fetch', (event: FetchEvent) =>`, + ], + ]); } function createTsConfig() { - const tsconfig = `{ + const tsconfig = `{ "extends": "@tsconfig/svelte/tsconfig.json", "compilerOptions": { "lib": ["DOM", "ES2017", "WebWorker"] @@ -241,48 +287,51 @@ function createTsConfig() { "exclude": ["node_modules/*", "__sapper__/*", "static/*"] }`; - createFile(path.join(projectRoot, 'tsconfig.json'), tsconfig); + createFile(path.join(projectRoot, "tsconfig.json"), tsconfig); } // Adds the extension recommendation function configureVsCode() { - const dir = path.join(projectRoot, '.vscode'); + const dir = path.join(projectRoot, ".vscode"); - if (!fs.existsSync(dir)) { - fs.mkdirSync(dir); - } + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir); + } - createFile(path.join(projectRoot, '.vscode', 'extensions.json'), `{"recommendations": ["svelte.svelte-vscode"]}`); + createFile( + path.join(projectRoot, ".vscode", "extensions.json"), + `{"recommendations": ["svelte.svelte-vscode"]}`, + ); } function deleteThisScript() { - fs.unlinkSync(path.join(__filename)); - - // Check for Mac's DS_store file, and if it's the only one left remove it - const remainingFiles = fs.readdirSync(path.join(__dirname)); - if (remainingFiles.length === 1 && remainingFiles[0] === '.DS_store') { - fs.unlinkSync(path.join(__dirname, '.DS_store')); - } - - // Check if the scripts folder is empty - if (fs.readdirSync(path.join(__dirname)).length === 0) { - // Remove the scripts folder - fs.rmdirSync(path.join(__dirname)); - } + fs.unlinkSync(path.join(__filename)); + + // Check for Mac's DS_store file, and if it's the only one left remove it + const remainingFiles = fs.readdirSync(path.join(__dirname)); + if (remainingFiles.length === 1 && remainingFiles[0] === ".DS_store") { + fs.unlinkSync(path.join(__dirname, ".DS_store")); + } + + // Check if the scripts folder is empty + if (fs.readdirSync(path.join(__dirname)).length === 0) { + // Remove the scripts folder + fs.rmdirSync(path.join(__dirname)); + } } -console.log(`Adding TypeScript with ${isRollup ? "Rollup" : "webpack" }...`); +console.log(`Adding TypeScript with ${isRollup ? "Rollup" : "webpack"}...`); addDepsToPackageJson(); -changeJsExtensionToTs(path.join(projectRoot, 'src')); +changeJsExtensionToTs(path.join(projectRoot, "src")); updateSvelteFiles(); if (isRollup) { - updateRollupConfig(); + updateRollupConfig(); } else { - updateWebpackConfig(); + updateWebpackConfig(); } updateServiceWorker(); @@ -293,13 +342,13 @@ configureVsCode(); // Delete this script, but not during testing if (!argv[2]) { - deleteThisScript(); + deleteThisScript(); } -console.log('Converted to TypeScript.'); +console.log("Converted to TypeScript."); -if (fs.existsSync(path.join(projectRoot, 'node_modules'))) { - console.log(` +if (fs.existsSync(path.join(projectRoot, "node_modules"))) { + console.log(` Next: 1. run 'npm install' again to install TypeScript dependencies 2. run 'npm run build' for the @sapper imports in your project to work diff --git a/docs/src/ambient.d.ts b/docs/src/ambient.d.ts index ec71cae9..d0023589 100644 --- a/docs/src/ambient.d.ts +++ b/docs/src/ambient.d.ts @@ -9,31 +9,31 @@ ``` */ declare module "*.gif" { - const value: string; - export = value; + const value: string; + export = value; } declare module "*.jpg" { - const value: string; - export = value; + const value: string; + export = value; } declare module "*.jpeg" { - const value: string; - export = value; + const value: string; + export = value; } declare module "*.png" { - const value: string; - export = value; + const value: string; + export = value; } declare module "*.svg" { - const value: string; - export = value; + const value: string; + export = value; } declare module "*.webp" { - const value: string; - export = value; + const value: string; + export = value; } diff --git a/docs/src/client.js b/docs/src/client.js index cec91725..daa9ccbd 100644 --- a/docs/src/client.js +++ b/docs/src/client.js @@ -1,5 +1,5 @@ -import * as sapper from '@sapper/app'; +import * as sapper from "@sapper/app"; sapper.start({ - target: document.querySelector('#sapper') -}); \ No newline at end of file + target: document.querySelector("#sapper"), +}); diff --git a/docs/src/routes/blog/[slug].json.js b/docs/src/routes/blog/[slug].json.js index 176890d8..63cd99aa 100644 --- a/docs/src/routes/blog/[slug].json.js +++ b/docs/src/routes/blog/[slug].json.js @@ -1,28 +1,28 @@ -import posts from './_posts.js'; +import posts from "./_posts.js"; const lookup = new Map(); -posts.forEach(post => { - lookup.set(post.slug, JSON.stringify(post)); +posts.forEach((post) => { + lookup.set(post.slug, JSON.stringify(post)); }); export function get(req, res, next) { - // the `slug` parameter is available because - // this file is called [slug].json.js - const { slug } = req.params; + // the `slug` parameter is available because + // this file is called [slug].json.js + const { slug } = req.params; - if (lookup.has(slug)) { - res.writeHead(200, { - 'Content-Type': 'application/json' - }); + if (lookup.has(slug)) { + res.writeHead(200, { + "Content-Type": "application/json", + }); - res.end(lookup.get(slug)); - } else { - res.writeHead(404, { - 'Content-Type': 'application/json' - }); + res.end(lookup.get(slug)); + } else { + res.writeHead(404, { + "Content-Type": "application/json", + }); - res.end(JSON.stringify({ - message: `Not found` - })); - } + res.end(JSON.stringify({ + message: `Not found`, + })); + } } diff --git a/docs/src/routes/blog/_posts.js b/docs/src/routes/blog/_posts.js index 7791a21e..9d9f74b6 100644 --- a/docs/src/routes/blog/_posts.js +++ b/docs/src/routes/blog/_posts.js @@ -8,10 +8,10 @@ // underscore tells Sapper not to do that. const posts = [ - { - title: 'What is Sapper?', - slug: 'what-is-sapper', - html: ` + { + title: "What is Sapper?", + slug: "what-is-sapper", + html: `

First, you have to know what Svelte is. Svelte is a UI framework with a bold new idea: rather than providing a library that you write code with (like React or Vue, for example), it's a compiler that turns your components into highly optimized vanilla JavaScript. If you haven't already read the introductory blog post, you should!

Sapper is a Next.js-style framework (more on that here) built around Svelte. It makes it embarrassingly easy to create extremely high performance web apps. Out of the box, you get:

@@ -24,13 +24,13 @@ const posts = [

It's implemented as Express middleware. Everything is set up and waiting for you to get started, but you keep complete control over the server, service worker, webpack config and everything else, so it's as flexible as you need it to be.

- ` - }, + `, + }, - { - title: 'How to use Sapper', - slug: 'how-to-use-sapper', - html: ` + { + title: "How to use Sapper", + slug: "how-to-use-sapper", + html: `

Step one

Create a new project, using degit:

@@ -48,23 +48,23 @@ const posts = [

Step four

Resist overdone joke formats.

- ` - }, + `, + }, - { - title: 'Why the name?', - slug: 'why-the-name', - html: ` + { + title: "Why the name?", + slug: "why-the-name", + html: `

In war, the soldiers who build bridges, repair roads, clear minefields and conduct demolitions β€” all under combat conditions β€” are known as sappers.

For web developers, the stakes are generally lower than those for combat engineers. But we face our own hostile environment: underpowered devices, poor network connections, and the complexity inherent in front-end engineering. Sapper, which is short for Svelte app maker, is your courageous and dutiful ally.

- ` - }, + `, + }, - { - title: 'How is Sapper different from Next.js?', - slug: 'how-is-sapper-different-from-next', - html: ` + { + title: "How is Sapper different from Next.js?", + slug: "how-is-sapper-different-from-next", + html: `

Next.js is a React framework from Vercel, and is the inspiration for Sapper. There are a few notable differences, however:

    @@ -73,20 +73,20 @@ const posts = [
  • As well as pages (Svelte components, which render on server or client), you can create server routes in your routes directory. These are just .js files that export functions corresponding to HTTP methods, and receive Express request and response objects as arguments. This makes it very easy to, for example, add a JSON API such as the one powering this very page
  • Links are just <a> elements, rather than framework-specific <Link> components. That means, for example, that this link right here, despite being inside a blob of HTML, works with the router as you'd expect.
- ` - }, + `, + }, - { - title: 'How can I get involved?', - slug: 'how-can-i-get-involved', - html: ` + { + title: "How can I get involved?", + slug: "how-can-i-get-involved", + html: `

We're so glad you asked! Come on over to the Svelte and Sapper repos, and join us in the Discord chatroom. Everyone is welcome, especially you!

- ` - } + `, + }, ]; -posts.forEach(post => { - post.html = post.html.replace(/^\t{3}/gm, ''); +posts.forEach((post) => { + post.html = post.html.replace(/^\t{3}/gm, ""); }); export default posts; diff --git a/docs/src/routes/blog/index.json.js b/docs/src/routes/blog/index.json.js index bfd9389a..f38025af 100644 --- a/docs/src/routes/blog/index.json.js +++ b/docs/src/routes/blog/index.json.js @@ -1,16 +1,16 @@ -import posts from './_posts.js'; +import posts from "./_posts.js"; -const contents = JSON.stringify(posts.map(post => { - return { - title: post.title, - slug: post.slug - }; +const contents = JSON.stringify(posts.map((post) => { + return { + title: post.title, + slug: post.slug, + }; })); export function get(req, res) { - res.writeHead(200, { - 'Content-Type': 'application/json' - }); + res.writeHead(200, { + "Content-Type": "application/json", + }); - res.end(contents); -} \ No newline at end of file + res.end(contents); +} diff --git a/docs/src/server.js b/docs/src/server.js index c77f593b..7ce02a4f 100644 --- a/docs/src/server.js +++ b/docs/src/server.js @@ -1,17 +1,17 @@ -import sirv from 'sirv'; -import polka from 'polka'; -import compression from 'compression'; -import * as sapper from '@sapper/server'; +import sirv from "sirv"; +import polka from "polka"; +import compression from "compression"; +import * as sapper from "@sapper/server"; const { PORT, NODE_ENV } = process.env; -const dev = NODE_ENV === 'development'; +const dev = NODE_ENV === "development"; polka() // You can also use Express - .use( - compression({ threshold: 0 }), - sirv('static', { dev }), - sapper.middleware() - ) - .listen(PORT, err => { - if (err) console.log('error', err); - }); + .use( + compression({ threshold: 0 }), + sirv("static", { dev }), + sapper.middleware(), + ) + .listen(PORT, (err) => { + if (err) console.log("error", err); + }); diff --git a/docs/src/service-worker.js b/docs/src/service-worker.js index 02ab1d2d..da7f1bf6 100644 --- a/docs/src/service-worker.js +++ b/docs/src/service-worker.js @@ -1,4 +1,4 @@ -import { timestamp, files, shell } from '@sapper/service-worker'; +import { files, shell, timestamp } from "@sapper/service-worker"; const ASSETS = `cache${timestamp}`; @@ -7,80 +7,84 @@ const ASSETS = `cache${timestamp}`; const to_cache = shell.concat(files); const staticAssets = new Set(to_cache); -self.addEventListener('install', event => { - event.waitUntil( - caches - .open(ASSETS) - .then(cache => cache.addAll(to_cache)) - .then(() => { - self.skipWaiting(); - }) - ); +self.addEventListener("install", (event) => { + event.waitUntil( + caches + .open(ASSETS) + .then((cache) => cache.addAll(to_cache)) + .then(() => { + self.skipWaiting(); + }), + ); }); -self.addEventListener('activate', event => { - event.waitUntil( - caches.keys().then(async keys => { - // delete old caches - for (const key of keys) { - if (key !== ASSETS) await caches.delete(key); - } +self.addEventListener("activate", (event) => { + event.waitUntil( + caches.keys().then(async (keys) => { + // delete old caches + for (const key of keys) { + if (key !== ASSETS) await caches.delete(key); + } - self.clients.claim(); - }) - ); + self.clients.claim(); + }), + ); }); - /** - * Fetch the asset from the network and store it in the cache. + * Fetch the asset from the network and store it in the cache. * Fall back to the cache if the user is offline. */ async function fetchAndCache(request) { - const cache = await caches.open(`offline${timestamp}`) + const cache = await caches.open(`offline${timestamp}`); - try { - const response = await fetch(request); - cache.put(request, response.clone()); - return response; - } catch (err) { - const response = await cache.match(request); - if (response) return response; + try { + const response = await fetch(request); + cache.put(request, response.clone()); + return response; + } catch (err) { + const response = await cache.match(request); + if (response) return response; - throw err; - } + throw err; + } } -self.addEventListener('fetch', event => { - if (event.request.method !== 'GET' || event.request.headers.has('range')) return; +self.addEventListener("fetch", (event) => { + if (event.request.method !== "GET" || event.request.headers.has("range")) { + return; + } - const url = new URL(event.request.url); + const url = new URL(event.request.url); - // don't try to handle e.g. data: URIs - const isHttp = url.protocol.startsWith('http'); - const isDevServerRequest = url.hostname === self.location.hostname && url.port !== self.location.port; - const isStaticAsset = url.host === self.location.host && staticAssets.has(url.pathname); - const skipBecauseUncached = event.request.cache === 'only-if-cached' && !isStaticAsset; + // don't try to handle e.g. data: URIs + const isHttp = url.protocol.startsWith("http"); + const isDevServerRequest = url.hostname === self.location.hostname && + url.port !== self.location.port; + const isStaticAsset = url.host === self.location.host && + staticAssets.has(url.pathname); + const skipBecauseUncached = event.request.cache === "only-if-cached" && + !isStaticAsset; - if (isHttp && !isDevServerRequest && !skipBecauseUncached) { - event.respondWith( - (async () => { - // always serve static files and bundler-generated assets from cache. - // if your application has other URLs with data that will never change, - // set this variable to true for them and they will only be fetched once. - const cachedAsset = isStaticAsset && await caches.match(event.request); + if (isHttp && !isDevServerRequest && !skipBecauseUncached) { + event.respondWith( + (async () => { + // always serve static files and bundler-generated assets from cache. + // if your application has other URLs with data that will never change, + // set this variable to true for them and they will only be fetched once. + const cachedAsset = isStaticAsset && await caches.match(event.request); - // for pages, you might want to serve a shell `service-worker-index.html` file, - // which Sapper has generated for you. It's not right for every - // app, but if it's right for yours then uncomment this section - /* + // for pages, you might want to serve a shell `service-worker-index.html` file, + // which Sapper has generated for you. It's not right for every + // app, but if it's right for yours then uncomment this section + /* if (!cachedAsset && url.origin === self.origin && routes.find(route => route.pattern.test(url.pathname))) { return caches.match('/service-worker-index.html'); } */ - return cachedAsset || fetchAndCache(event.request); - })() - ); - } + return cachedAsset || fetchAndCache(event.request); + })(), + ); + } }); diff --git a/docs/static/manifest.json b/docs/static/manifest.json index 78ad114f..bb908fa1 100644 --- a/docs/static/manifest.json +++ b/docs/static/manifest.json @@ -1,20 +1,20 @@ { - "background_color": "#ffffff", - "theme_color": "#333333", - "name": "TODO", - "short_name": "TODO", - "display": "minimal-ui", - "start_url": "/", - "icons": [ - { - "src": "logo-192.png", - "sizes": "192x192", - "type": "image/png" - }, - { - "src": "logo-512.png", - "sizes": "512x512", - "type": "image/png" - } - ] + "background_color": "#ffffff", + "theme_color": "#333333", + "name": "TODO", + "short_name": "TODO", + "display": "minimal-ui", + "start_url": "/", + "icons": [ + { + "src": "logo-192.png", + "sizes": "192x192", + "type": "image/png" + }, + { + "src": "logo-512.png", + "sizes": "512x512", + "type": "image/png" + } + ] } diff --git a/images/couchdb/README.md b/images/couchdb/README.md index 9dc50e5b..efb49760 100644 --- a/images/couchdb/README.md +++ b/images/couchdb/README.md @@ -1,18 +1,20 @@ # hyper63 adapter for couchdb -This hyper63 image uses the `app-express` module and the `adapter-couchdb` module to instanciate an instance of hyper63 with only the data port. This -image secures the hyper63 instance using jwt middleware that supports a -shared secret. +This hyper63 image uses the `app-express` module and the `adapter-couchdb` +module to instanciate an instance of hyper63 with only the data port. This image +secures the hyper63 instance using jwt middleware that supports a shared secret. ## Setup This service depends on two environment variables: - SECRET {string} - this is the secret used to verify a signed JWT token -- COUCHDB_SERVER {url} - this is a full url used to connect to your couchdb server, it should have a key and secret which gives hyper63 full control of your - server. ex. 'https://[key]:[secret]@[host]:[port]' +- COUCHDB_SERVER {url} - this is a full url used to connect to your couchdb + server, it should have a key and secret which gives hyper63 full control of + your server. ex. 'https://[key]:[secret]@[host]:[port]' -> Need to install a couchdb server? see [Create CouchDB Server](https://blog.hyper63.com/setup-couchdb) +> Need to install a couchdb server? see +> [Create CouchDB Server](https://blog.hyper63.com/setup-couchdb) ## Run locally diff --git a/images/couchdb/hyper63.config.js b/images/couchdb/hyper63.config.js index 9f28eba5..e02dcc47 100644 --- a/images/couchdb/hyper63.config.js +++ b/images/couchdb/hyper63.config.js @@ -1,14 +1,14 @@ // app -const express = require('@hyper63/app-express') -const jwt = require('./middleware/jwt') +const express = require("@hyper63/app-express"); +const jwt = require("./middleware/jwt"); // adapters -const couchdb = require('@hyper63/adapter-couchdb') +const couchdb = require("@hyper63/adapter-couchdb"); module.exports = { app: express, adapters: [ - { port: 'data', plugins: [couchdb({ url: process.env.COUCHDB_SERVER })] } + { port: "data", plugins: [couchdb({ url: process.env.COUCHDB_SERVER })] }, ], - middleware: [jwt] -} + middleware: [jwt], +}; diff --git a/images/couchdb/index.js b/images/couchdb/index.js index fff98c20..014c5ee7 100644 --- a/images/couchdb/index.js +++ b/images/couchdb/index.js @@ -1,2 +1,2 @@ -require('dotenv').config() -require('@hyper63/core')() +require("dotenv").config(); +require("@hyper63/core")(); diff --git a/images/couchdb/middleware/jwt.js b/images/couchdb/middleware/jwt.js index 008cfa13..351a4d3d 100644 --- a/images/couchdb/middleware/jwt.js +++ b/images/couchdb/middleware/jwt.js @@ -1,9 +1,12 @@ -const jwt = require('express-jwt') +const jwt = require("express-jwt"); module.exports = (app) => { // only secure data endpoint - app.use('/data', jwt({ - secret: process.env.SECRET, - algorithms: ['HS256'] - })) - return app -} + app.use( + "/data", + jwt({ + secret: process.env.SECRET, + algorithms: ["HS256"], + }), + ); + return app; +}; diff --git a/images/dev/hyper63.config.js b/images/dev/hyper63.config.js index 5361da4c..69cb4e0c 100644 --- a/images/dev/hyper63.config.js +++ b/images/dev/hyper63.config.js @@ -1,17 +1,17 @@ -const memory = require('@hyper63/adapter-memory') -const pouchdb = require('@hyper63/adapter-pouchdb') -const jwt = require('./middleware/jwt') -const express = require('@hyper63/app-express') -const minisearch = require('@hyper63/adapter-minisearch') -const zmq = require('@hyper63/adapter-zmq') +const memory = require("@hyper63/adapter-memory"); +const pouchdb = require("@hyper63/adapter-pouchdb"); +const jwt = require("./middleware/jwt"); +const express = require("@hyper63/app-express"); +const minisearch = require("@hyper63/adapter-minisearch"); +const zmq = require("@hyper63/adapter-zmq"); module.exports = { app: express, adapters: [ - { port: 'cache', plugins: [memory()] }, - { port: 'data', plugins: [pouchdb({ dir: process.env.DATA })] }, - { port: 'search', plugins: [minisearch()] }, - { port: 'queue', plugins: [zmq('7373')] } + { port: "cache", plugins: [memory()] }, + { port: "data", plugins: [pouchdb({ dir: process.env.DATA })] }, + { port: "search", plugins: [minisearch()] }, + { port: "queue", plugins: [zmq("7373")] }, ], - middleware: [jwt] -} + middleware: [jwt], +}; diff --git a/images/dev/index.js b/images/dev/index.js index 1ce016cd..e7259e83 100644 --- a/images/dev/index.js +++ b/images/dev/index.js @@ -1,4 +1,4 @@ // load config and supply it to core -const path = require('path') -const config = require(path.join(__dirname, '/hyper63.config.js')) -require('@hyper63/core')(config) +const path = require("path"); +const config = require(path.join(__dirname, "/hyper63.config.js")); +require("@hyper63/core")(config); diff --git a/images/dev/middleware/jwt.js b/images/dev/middleware/jwt.js index 21e8e4e2..7d391062 100644 --- a/images/dev/middleware/jwt.js +++ b/images/dev/middleware/jwt.js @@ -1,8 +1,8 @@ -const jwt = require('express-jwt') +const jwt = require("express-jwt"); module.exports = (app) => { app.use(jwt({ secret: process.env.SECRET, - algorithms: ['HS256'] - })) - return app -} + algorithms: ["HS256"], + })); + return app; +}; diff --git a/images/dev/nodemon.json b/images/dev/nodemon.json index 117e476c..905036c4 100644 --- a/images/dev/nodemon.json +++ b/images/dev/nodemon.json @@ -6,4 +6,4 @@ "../../packages/adapter-minisearch", "." ] -} \ No newline at end of file +} diff --git a/images/graphql/hyper63.config.js b/images/graphql/hyper63.config.js index 063d3677..37cc374c 100644 --- a/images/graphql/hyper63.config.js +++ b/images/graphql/hyper63.config.js @@ -1,26 +1,26 @@ // app -const graphql = require('@hyper63/app-graphql') +const graphql = require("@hyper63/app-graphql"); // adapters -const memory = require('@hyper63/adapter-memory') -const pouchdb = require('@hyper63/adapter-pouchdb') -const fs = require('@hyper63/adapter-fs') -const hooks = require('@hyper63/adapter-hooks') +const memory = require("@hyper63/adapter-memory"); +const pouchdb = require("@hyper63/adapter-pouchdb"); +const fs = require("@hyper63/adapter-fs"); +const hooks = require("@hyper63/adapter-hooks"); module.exports = { app: graphql(), adapters: [ - { port: 'cache', plugins: [memory()] }, - { port: 'data', plugins: [pouchdb({ dir: process.env.DATA })] }, - { port: 'storage', plugins: [fs({ dir: process.env.DATA })] }, + { port: "cache", plugins: [memory()] }, + { port: "data", plugins: [pouchdb({ dir: process.env.DATA })] }, + { port: "storage", plugins: [fs({ dir: process.env.DATA })] }, { - port: 'hooks', + port: "hooks", plugins: [ hooks([{ - matcher: '*', - target: 'http://127.0.0.1:9200/log/_doc' - }]) - ] - } - ] -} + matcher: "*", + target: "http://127.0.0.1:9200/log/_doc", + }]), + ], + }, + ], +}; diff --git a/images/graphql/index.js b/images/graphql/index.js index b6bbed21..014c5ee7 100644 --- a/images/graphql/index.js +++ b/images/graphql/index.js @@ -1,3 +1,2 @@ - -require('dotenv').config() -require('@hyper63/core')() +require("dotenv").config(); +require("@hyper63/core")(); diff --git a/images/micro/README.md b/images/micro/README.md index 8cfc93ca..f635a630 100644 --- a/images/micro/README.md +++ b/images/micro/README.md @@ -2,12 +2,12 @@ ## hyper63 micro image -This image uses the following services in a docker compose +This image uses the following services in a docker compose -* couchdb - single-node -* redis -* minio -* elasticsearch +- couchdb - single-node +- redis +- minio +- elasticsearch ## Start Services @@ -28,4 +28,3 @@ yarn start yarn yarn dev ``` - diff --git a/images/micro/hyper63.config.js b/images/micro/hyper63.config.js index 24e38d9c..d1571069 100644 --- a/images/micro/hyper63.config.js +++ b/images/micro/hyper63.config.js @@ -1,17 +1,23 @@ -import express from '@hyper63/app-express' -import couchdb from '@hyper63/adapter-couchdb' -import redis from '@hyper63/adapter-redis' -import minio from '@hyper63/adapter-minio' -import es from '@hyper63/adapter-elasticsearch' -import bq from '@hyper63/adapter-beequeue' +import express from "@hyper63/app-express"; +import couchdb from "@hyper63/adapter-couchdb"; +import redis from "@hyper63/adapter-redis"; +import minio from "@hyper63/adapter-minio"; +import es from "@hyper63/adapter-elasticsearch"; +import bq from "@hyper63/adapter-beequeue"; export default { app: express, adapters: [ - { port: 'data', plugins: [couchdb({ url: 'http://admin:password@0.0.0.0:5984' })] }, - { port: 'cache', plugins: [redis({ url: 'redis://0.0.0.0:6379' })] }, - { port: 'storage', plugins: [minio({ url: 'http://admin:password@0.0.0.0:9000' })] }, - { port: 'search', plugins: [es({ url: 'http://0.0.0.0:9200' })] }, - { port: 'queue', plugins: [bq.default({ redis: 'redis://0.0.0.0:6379' })] } - ] -} + { + port: "data", + plugins: [couchdb({ url: "http://admin:password@0.0.0.0:5984" })], + }, + { port: "cache", plugins: [redis({ url: "redis://0.0.0.0:6379" })] }, + { + port: "storage", + plugins: [minio({ url: "http://admin:password@0.0.0.0:9000" })], + }, + { port: "search", plugins: [es({ url: "http://0.0.0.0:9200" })] }, + { port: "queue", plugins: [bq.default({ redis: "redis://0.0.0.0:6379" })] }, + ], +}; diff --git a/images/micro/index.js b/images/micro/index.js index cdcb664f..d8e254dd 100644 --- a/images/micro/index.js +++ b/images/micro/index.js @@ -1,4 +1,4 @@ -import config from './hyper63.config.js' -import hyper63 from '@hyper63/core' +import config from "./hyper63.config.js"; +import hyper63 from "@hyper63/core"; -hyper63(config) +hyper63(config); diff --git a/images/nano/hyper63.config.js b/images/nano/hyper63.config.js index 15f646e3..90c453bc 100644 --- a/images/nano/hyper63.config.js +++ b/images/nano/hyper63.config.js @@ -1,33 +1,33 @@ // app -const express = require('@hyper63/app-express') +const express = require("@hyper63/app-express"); // adapters -const memory = require('@hyper63/adapter-memory') -const pouchdb = require('@hyper63/adapter-pouchdb') -const fs = require('@hyper63/adapter-fs') -const minisearch = require('@hyper63/adapter-minisearch') -const hooks = require('@hyper63/adapter-hooks') -const q = require('@hyper63/adapter-zmq') +const memory = require("@hyper63/adapter-memory"); +const pouchdb = require("@hyper63/adapter-pouchdb"); +const fs = require("@hyper63/adapter-fs"); +const minisearch = require("@hyper63/adapter-minisearch"); +const hooks = require("@hyper63/adapter-hooks"); +const q = require("@hyper63/adapter-zmq"); module.exports = { app: express, adapters: [ - { port: 'cache', plugins: [memory()] }, - { port: 'data', plugins: [pouchdb({ dir: process.env.DATA })] }, - { port: 'storage', plugins: [fs({ dir: process.env.DATA })] }, - { port: 'search', plugins: [minisearch()] }, - { port: 'queue', plugins: [q('7373')] }, + { port: "cache", plugins: [memory()] }, + { port: "data", plugins: [pouchdb({ dir: process.env.DATA })] }, + { port: "storage", plugins: [fs({ dir: process.env.DATA })] }, + { port: "search", plugins: [minisearch()] }, + { port: "queue", plugins: [q("7373")] }, { - port: 'hooks', + port: "hooks", plugins: [ hooks([{ - matcher: '*', - target: 'http://127.0.0.1:9200/log/_doc' - }]) - ] - } + matcher: "*", + target: "http://127.0.0.1:9200/log/_doc", + }]), + ], + }, ], logs: { - level: 'INFO' // ALL, TRACE, DEBUG, INFO, WARN, ERROR, FATAL - } -} + level: "INFO", // ALL, TRACE, DEBUG, INFO, WARN, ERROR, FATAL + }, +}; diff --git a/images/nano/index.js b/images/nano/index.js index fff98c20..014c5ee7 100644 --- a/images/nano/index.js +++ b/images/nano/index.js @@ -1,2 +1,2 @@ -require('dotenv').config() -require('@hyper63/core')() +require("dotenv").config(); +require("@hyper63/core")(); diff --git a/images/test/README.md b/images/test/README.md index 70c77460..9c7b1c5b 100644 --- a/images/test/README.md +++ b/images/test/README.md @@ -1,6 +1,6 @@ # README -hyper test service creates an in memory hyper instance, this instance provides +hyper test service creates an in memory hyper instance, this instance provides an in memory data service that can be used for testing purposes. ## Run Local @@ -8,4 +8,4 @@ an in memory data service that can be used for testing purposes. ``` npm install -g @hyper.io/test npx @hyper.io/test -``` \ No newline at end of file +``` diff --git a/images/test/cli.js b/images/test/cli.js index 223df7c0..06ecb202 100755 --- a/images/test/cli.js +++ b/images/test/cli.js @@ -1,6 +1,6 @@ #!/usr/bin/env node -const path = require('path') +const path = require("path"); -const config = require(path.join(__dirname, '/hyper63.config.js')) -require('@hyper63/core')(config) +const config = require(path.join(__dirname, "/hyper63.config.js")); +require("@hyper63/core")(config); diff --git a/images/test/hyper63.config.js b/images/test/hyper63.config.js index f0222279..00a5cea3 100644 --- a/images/test/hyper63.config.js +++ b/images/test/hyper63.config.js @@ -1,7 +1,7 @@ -const pouchdb = require('@hyper63/adapter-pouchdb') -const express = require('@hyper63/app-express') +const pouchdb = require("@hyper63/adapter-pouchdb"); +const express = require("@hyper63/app-express"); module.exports = { app: express, - adapters: [{ port: 'data', plugins: [pouchdb()] }] -} + adapters: [{ port: "data", plugins: [pouchdb()] }], +}; diff --git a/images/test/index.js b/images/test/index.js index 7baa8c59..827ec148 100644 --- a/images/test/index.js +++ b/images/test/index.js @@ -1,5 +1,5 @@ -const path = require('path') +const path = require("path"); // load config and supply it to core -const config = require(path.join(__dirname, '/hyper63.config.js')) -require('@hyper63/core')(config) +const config = require(path.join(__dirname, "/hyper63.config.js")); +require("@hyper63/core")(config); diff --git a/inception.md b/inception.md index bdcea94c..13d64df8 100644 --- a/inception.md +++ b/inception.md @@ -5,20 +5,20 @@ hyper63 is a service gateway that provides -The mission of hyper63 is to take a different approach to software, this approach -focuses on shortening the gap between entry level developer to senior level. hyper63 -believes this can occur by reducing the surface area of application development and -generalizing common services. +The mission of hyper63 is to take a different approach to software, this +approach focuses on shortening the gap between entry level developer to senior +level. hyper63 believes this can occur by reducing the surface area of +application development and generalizing common services.
Elevator Pitch -atlas is a backend as a service platform that generalizes common services using a -ports and adapter model. Developers can swap out different implementations of common -services for their preferred backend stack. `atlas` is a standalone container -specifically built for micro-services. +atlas is a backend as a service platform that generalizes common services using +a ports and adapter model. Developers can swap out different implementations of +common services for their preferred backend stack. `atlas` is a standalone +container specifically built for micro-services. atlas is the fastest way to get going on your micro-service, it gives you common services for data, cache, files and notifications. @@ -59,7 +59,9 @@ What attributes do we need to make sure we account for to ship! - Security -Must address access, the purpose of this micro-service is to run within a secured environment like a kubernetes environment or a vpc. API Keys will be need to be managed for dynamic runtime environments. +Must address access, the purpose of this micro-service is to run within a +secured environment like a kubernetes environment or a vpc. API Keys will be +need to be managed for dynamic runtime environments. - Compliance @@ -67,7 +69,8 @@ N/A for micro - Support -Opensource support will be managed by a community board. Professional support services will be offered on an annual license bases. +Opensource support will be managed by a community board. Professional support +services will be offered on an annual license bases.
@@ -107,7 +110,9 @@ From design to delivery the rough guess is we have a 3 month project scope:
Be clear on what is going to give -In order to ship a viable project we must be clear on what is going to give, often times you can have all the things, you must make hard decisions on what can and cannot be accomplished within your constraints. +In order to ship a viable project we must be clear on what is going to give, +often times you can have all the things, you must make hard decisions on what +can and cannot be accomplished within your constraints. - Scope - flexible, but focus on keeping small - Quality - all business rule code needs to be tested and reviewed @@ -122,7 +127,8 @@ In order to ship a viable project we must be clear on what is going to give, oft
Show what it is going to take -This is an opensource project and it will take all of the following competencies: +This is an opensource project and it will take all of the following +competencies: - Project manager - UX Designer @@ -131,6 +137,7 @@ This is an opensource project and it will take all of the following competencies - Customer - Tester -These competencies can happen over time and input and suggestions can occur via change requests and RFCs (Request for Change). +These competencies can happen over time and input and suggestions can occur via +change requests and RFCs (Request for Change).
diff --git a/launcher/cli.js b/launcher/cli.js index 84541c3d..d66391ba 100755 --- a/launcher/cli.js +++ b/launcher/cli.js @@ -1,10 +1,10 @@ #!/usr/bin/env node -const start = require('./index') -const stop = require('./stop') -const args = process.argv.splice(2) +const start = require("./index"); +const stop = require("./stop"); +const args = process.argv.splice(2); -if (args[0] === 'stop') { - stop() +if (args[0] === "stop") { + stop(); } else { - start() + start(); } diff --git a/launcher/index.js b/launcher/index.js index 17686482..9b864baf 100644 --- a/launcher/index.js +++ b/launcher/index.js @@ -1,15 +1,15 @@ -const sh = require('shelljs') -const fs = require('fs') +const sh = require("shelljs"); +const fs = require("fs"); module.exports = () => { - if (!sh.which('docker-compose')) { + if (!sh.which("docker-compose")) { sh.echo( - 'Sorry, this script requires docker-compose which can be installed from docker desktop' - ) - sh.exit(1) + "Sorry, this script requires docker-compose which can be installed from docker desktop", + ); + sh.exit(1); } - if (!sh.test('-e', '~/.hyper63')) { - sh.mkdir('~/.hyper63') + if (!sh.test("-e", "~/.hyper63")) { + sh.mkdir("~/.hyper63"); } const config = `version: "3.8" @@ -50,12 +50,12 @@ services: - "./data:/usr/share/elasticsearch/data" environment: discovery.type: single-node -` +`; - fs.writeFileSync(`${process.env.HOME}/.hyper63/docker-compose.yml`, config) + fs.writeFileSync(`${process.env.HOME}/.hyper63/docker-compose.yml`, config); - sh.cd('~/.hyper63') - sh.exec('docker-compose up -d') + sh.cd("~/.hyper63"); + sh.exec("docker-compose up -d"); setTimeout(() => { sh.exec( @@ -63,11 +63,11 @@ services: { silent: true }, (code) => { if (code === 0) { - console.log('Successfully setup database') + console.log("Successfully setup database"); } else { - console.log('ERROR! Could not setup database, try to re-run script') + console.log("ERROR! Could not setup database, try to re-run script"); } - } - ) - }, 5000) -} + }, + ); + }, 5000); +}; diff --git a/launcher/stop.js b/launcher/stop.js index 888ac620..ffbb5f7c 100644 --- a/launcher/stop.js +++ b/launcher/stop.js @@ -1,6 +1,6 @@ -const sh = require('shelljs') +const sh = require("shelljs"); module.exports = () => { - sh.cd('~/.hyper63') - sh.exec('docker-compose down') -} + sh.cd("~/.hyper63"); + sh.exec("docker-compose down"); +}; diff --git a/lint-staged.config.js b/lint-staged.config.js index 9878134a..fc7ceba4 100644 --- a/lint-staged.config.js +++ b/lint-staged.config.js @@ -1,5 +1,4 @@ - module.exports = { - '*.{js,ts,jsx,tsx}': ['eslint --cache --fix'], - 'package.json': ['sort-package-json'] -} + "packages/**/*.{js,ts,jsx,tsx}": ["deno fmt", "deno lint"], + "package.json": ["sort-package-json"], +}; diff --git a/meetings/2020-09-23.md b/meetings/2020-09-23.md index ba10a2d3..95c8e5ad 100644 --- a/meetings/2020-09-23.md +++ b/meetings/2020-09-23.md @@ -8,18 +8,26 @@ scribe: Tom Wilson ## Welcome - ## Compliance Statement -It is important to recognize that this will be an inclusive project and community, welcoming all contributors to the community regardless of differentiation. Please read the code of conduct carefully. +It is important to recognize that this will be an inclusive project and +community, welcoming all contributors to the community regardless of +differentiation. Please read the code of conduct carefully. -All members of this group agree to the abide by the content in the code of conduct and full understand the consequences of not following such code of conduct. Failure to follow the code of conduct will be grounds for dismissal from the project. +All members of this group agree to the abide by the content in the code of +conduct and full understand the consequences of not following such code of +conduct. Failure to follow the code of conduct will be grounds for dismissal +from the project. -All members should understand the license of the project and carefully include third party dependencies that work with the license and bring to the TSC's attention as soon as discovery of a dependency that is in conflict with the projects license. +All members should understand the license of the project and carefully include +third party dependencies that work with the license and bring to the TSC's +attention as soon as discovery of a dependency that is in conflict with the +projects license. ## Roll Call -Please state your name, technical strengths and why you are considering becoming a contributor to this project. +Please state your name, technical strengths and why you are considering becoming +a contributor to this project. ## Approve minutes from last meeting @@ -27,27 +35,24 @@ N/A ## Status Update -* Project Name change -* Inception Document -* Visualization -* RFC (Request for Change) process -* Community Chat (https://discord.gg/HySmhJ) -* Meetings (cadence/template) +- Project Name change +- Inception Document +- Visualization +- RFC (Request for Change) process +- Community Chat (https://discord.gg/HySmhJ) +- Meetings (cadence/template) -## Outstanding Items +## Outstanding Items -* What is a good cadence for technical meetings? -* What is a good time to have them? -* What is missing from the agenda? +- What is a good cadence for technical meetings? +- What is a good time to have them? +- What is missing from the agenda? ## Actions/Next Steps -* Define workflow process -* Create CI/CD automated flow - -## Adjourn - -* Motion to adjourn? - +- Define workflow process +- Create CI/CD automated flow +## Adjourn +- Motion to adjourn? diff --git a/meetings/2020-10-06.md b/meetings/2020-10-06.md index 5674b864..52722a5f 100644 --- a/meetings/2020-10-06.md +++ b/meetings/2020-10-06.md @@ -8,19 +8,25 @@ scribe: Tom Wilson ## Compliance Statement -All participants of atlas project are expected to abide by our Code of Conduct, both online and during in-person events that are hosted and/or associated with atlas project. +All participants of atlas project are expected to abide by our Code of Conduct, +both online and during in-person events that are hosted and/or associated with +atlas project. ## The Pledge -In the interest of fostering an open and welcoming environment, we pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. +In the interest of fostering an open and welcoming environment, we pledge to +make participation in our project and our community a harassment-free experience +for everyone, regardless of age, body size, disability, ethnicity, gender +identity and expression, level of experience, nationality, personal appearance, +race, religion, or sexual identity and orientation. ## Roll Call -* Will -* Chris -* Ian -* Scott -* Bryan +- Will +- Chris +- Ian +- Scott +- Bryan ## Approve minutes from last meeting @@ -32,32 +38,28 @@ Current status: We have the project setup into three areas: -* api -* core -* services +- api +- core +- services -Currently, working on the cache module, this module will allow apps to create a cache store, and add, update, get, and remove json documents. +Currently, working on the cache module, this module will allow apps to create a +cache store, and add, update, get, and remove json documents. Lets walk through the implementation. -* discuss crocks and Async, Reader, and Either -* discuss the flow from API to Core to Services -* review cache design document +- discuss crocks and Async, Reader, and Either +- discuss the flow from API to Core to Services +- review cache design document ## Outstanding Items -* create an issue to validate store name -* create an issue to validate key name -* create an issue to validate result +- create an issue to validate store name +- create an issue to validate key name +- create an issue to validate result ## Actions/Next Steps -* complete first iteration of cache module -* deploy to docker hub - - -## Adjourn - - - +- complete first iteration of cache module +- deploy to docker hub +## Adjourn diff --git a/meetings/2020-11-17.md b/meetings/2020-11-17.md index dd576ad7..82c98d7d 100644 --- a/meetings/2020-11-17.md +++ b/meetings/2020-11-17.md @@ -1,40 +1,45 @@ # hyper63 Contributor Meeting Agenda -Mission is to dramatically improve the application development process to create future-proof applications. - -* What is hyper63? A service gateway? -* What is hyper63 to a hosted customer? A Data Management Service? -* Current Status -* As a software dev team what do I need from a Data Management Service to effectively build and maintain applications with high flow? - - * A RestClient in my language of choice - * A hosted service - * Examples - * Access to logs/stats - -* Migration Services -* Team Accounts, Multi-tenancy -* Comparison Information against other like things, heroku, vercel, etc - -Handing off projects from vendor to client in agencies (address migration strategies, between accounts) - -Think about projects and teams? Could separate with authentication, and a private express app for the service. This express app would handle JWT Secrets and get the account name and team name, then call a host service to get the +Mission is to dramatically improve the application development process to create +future-proof applications. + +- What is hyper63? A service gateway? +- What is hyper63 to a hosted customer? A Data Management Service? +- Current Status +- As a software dev team what do I need from a Data Management Service to + effectively build and maintain applications with high flow? + + - A RestClient in my language of choice + - A hosted service + - Examples + - Access to logs/stats + +- Migration Services +- Team Accounts, Multi-tenancy +- Comparison Information against other like things, heroku, vercel, etc + +Handing off projects from vendor to client in agencies (address migration +strategies, between accounts) + +Think about projects and teams? Could separate with authentication, and a +private express app for the service. This express app would handle JWT Secrets +and get the account name and team name, then call a host service to get the config information for the adapters. (have to think through this???) --- Free Tier -* 1000 data documents -* 1000 search documents -* 100 MB of storage -* ?? Cache - +- 1000 data documents +- 1000 search documents +- 100 MB of storage +- ?? Cache ## hyper63 product idea - DataModeling Tool -Data modeling application, allows product managers, architects, developers to create data models for an -application and generate validation schemas and other artifacts from those models. +Data modeling application, allows product managers, architects, developers to +create data models for an application and generate validation schemas and other +artifacts from those models. > It would be an electron application using svelte and tailwindcss @@ -46,7 +51,5 @@ application and generate validation schemas and other artifacts from those model Questions for students -What was the hardest part of the project? -What did technology layer did you enjoy the most? Frontend, Backend, Data? - - +What was the hardest part of the project? What did technology layer did you +enjoy the most? Frontend, Backend, Data? diff --git a/meetings/2021-1-19.md b/meetings/2021-1-19.md index 765292c6..77cc299b 100644 --- a/meetings/2021-1-19.md +++ b/meetings/2021-1-19.md @@ -6,20 +6,24 @@ Agenda 1. Welcome to 2021 2. Introductions - * Your Name - * Current Status - * Why are you interested in hyper63? + +- Your Name +- Current Status +- Why are you interested in hyper63? + 3. Review/Comment of new Website 4. Review/Comment on API Docs 5. Proposal/Proof of Concept for Deno build Pros: -* Deno compiles into single binary -* Tooling built in -* Take on less dependencies -* + +- Deno compiles into single binary +- Tooling built in +- Take on less dependencies +- + Cons: -* Less mature eco-system -* Less drivers for services -* ??? +- Less mature eco-system +- Less drivers for services +- ??? diff --git a/meetings/README.md b/meetings/README.md index 7554f72e..ce7ba7ee 100644 --- a/meetings/README.md +++ b/meetings/README.md @@ -1,7 +1,10 @@ # Project Meetings -All project meeting agendas and notes will be documented here. Each agenda will be published a few days before the meeting and encourage all members to provide status updates in the agenda so that we can minimize the need for complex note taking. - -A template of an agenda is found in the `meetings` folder and each meeting document should be named in the following format. `yyyy-mm-dd.md` and placed in the meeting folder of the repository. - - +All project meeting agendas and notes will be documented here. Each agenda will +be published a few days before the meeting and encourage all members to provide +status updates in the agenda so that we can minimize the need for complex note +taking. + +A template of an agenda is found in the `meetings` folder and each meeting +document should be named in the following format. `yyyy-mm-dd.md` and placed in +the meeting folder of the repository. diff --git a/meetings/agenda-template.md b/meetings/agenda-template.md index 154d63a3..567a5a9d 100644 --- a/meetings/agenda-template.md +++ b/meetings/agenda-template.md @@ -18,8 +18,4 @@ scribe: Author of notes ## Actions/Next Steps -## Adjourn - - - - +## Adjourn diff --git a/package.json b/package.json index 168ce7d0..4cbd8187 100644 --- a/package.json +++ b/package.json @@ -40,20 +40,13 @@ "test:core": "tape packages/core/**/**/*_test.js", "test:integration": "tape -r esm test/**/*_test.js", "test": "run-p test:core", - "lint": "eslint \"**/*.{ts,tsx,js,jsx}\"", + "lint": "deno lint --ignore=\"node_modules/*\"", "prepare": "husky install" }, "devDependencies": { "@commitlint/cli": "^12.1.4", "@commitlint/config-conventional": "^12.1.4", - "@typescript-eslint/eslint-plugin": "^4.26.0", - "@typescript-eslint/parser": "^4.26.0", "adio": "^1.2.1", - "eslint": "^7.10.0", - "eslint-config-standard": "^16.0.3", - "eslint-plugin-import": "^2.23.4", - "eslint-plugin-node": "^11.1.0", - "eslint-plugin-promise": "^5.1.0", "esm": "^3.2.25", "husky": "^6.0.0", "lint-staged": "^11.0.0", @@ -61,4 +54,4 @@ "sort-package-json": "^1.50.0", "tape": "^5.0.1" } -} \ No newline at end of file +} diff --git a/packages/adapter-couchdb/README.md b/packages/adapter-couchdb/README.md index 0aa9ebe9..daa4480e 100644 --- a/packages/adapter-couchdb/README.md +++ b/packages/adapter-couchdb/README.md @@ -1,6 +1,10 @@ # hyper63 couchdb-adapter -This adapter connects the hyper63 service framework `data` port to the couchdb database. When using this adapter, you will need to configure three environment variables, one for the `server-admin` credentials, so that the adapter can create/delete databases, and one for the `db-admin` user so a search index can be created. And finally one for the `db-user` user to manage documents. +This adapter connects the hyper63 service framework `data` port to the couchdb +database. When using this adapter, you will need to configure three environment +variables, one for the `server-admin` credentials, so that the adapter can +create/delete databases, and one for the `db-admin` user so a search index can +be created. And finally one for the `db-user` user to manage documents. .env @@ -12,14 +16,16 @@ DATA_DB_USER=XXX_URL The value of the connection url should be in the following format: -[protocol]://[key]:[secret]@[host]:[port] +> `[protocol]://[key]:[secret]@[host]:[port]` -When a new database is created, the following roles will be added to the security document: +When a new database is created, the following roles will be added to the +security document: - db-admin - db-user -Using this adapter, you will not have any access to the \_users table or the \_replicator table +Using this adapter, you will not have any access to the \_users table or the +_replicator table ## Setup a standalone couchdb server using docker @@ -32,7 +38,7 @@ RUN echo '[couchdb]' > /opt/couchdb/etc/local.d/10-single-node.ini RUN echo 'single_node=true' >> /opt/couchdb/etc/local.d/10-single-node.ini ``` -``` sh +```sh docker build -t single-couchdb:1 . docker run -d -p 5984:5984 -e COUCHDB_USER=admin -e COUCHDB_PASSWORD=password --name couch single-couchdb:1 ``` diff --git a/packages/adapter-couchdb/adapter.js b/packages/adapter-couchdb/adapter.js index b75a7c93..0a9332c0 100644 --- a/packages/adapter-couchdb/adapter.js +++ b/packages/adapter-couchdb/adapter.js @@ -1,20 +1,26 @@ -const { Async } = require('crocks') -const { asyncFetch, createHeaders, handleResponse } = require('./async-fetch') +import { crocks, R } from "./deps.js"; +import { bulk } from "./bulk.js"; +const { Async } = crocks; const { - compose, omit, map, lens, prop, assoc, over, identity, merge, - pluck -} = require('ramda') -const xId = lens(prop('_id'), assoc('id')) -const bulk = require('./bulk') - -module.exports = ({ config }) => { - const headers = createHeaders(config.username, config.password) + compose, + omit, + map, + lens, + prop, + assoc, + over, + identity, + merge, + pluck, +} = R; +const xId = lens(prop("_id"), assoc("id")); +export function adapter({ config, asyncFetch, headers, handleResponse }) { const retrieveDocument = ({ db, id }) => asyncFetch(`${config.origin}/${db}/${id}`, { - headers - }).chain(handleResponse(200)) + headers, + }).chain(handleResponse(200)); return ({ // create database needs to @@ -22,85 +28,94 @@ module.exports = ({ config }) => { // and create the security document // adding the db-admin and db-user // to the database - createDatabase: (name) => asyncFetch(`${config.origin}/${name}`, { - method: 'PUT', - headers - }) - .chain(handleResponse(201)) - // create security document - .chain(() => asyncFetch(`${config.origin}/${name}/_security`, { - method: 'PUT', + createDatabase: (name) => + asyncFetch(`${config.origin}/${name}`, { + method: "PUT", headers, - body: JSON.stringify({ - admins: { - names: [], - roles: ['db-admins'] - }, - members: { - names: [], - roles: ['db-users'] - } - }) - })) - .chain(handleResponse(200)) - .toPromise(), - - removeDatabase: (name) => asyncFetch(`${config.origin}/${name}`, { - method: 'DELETE', - headers - }).chain(handleResponse(200)).toPromise(), + }) + .chain(handleResponse(201)) + // create security document + .chain(() => + asyncFetch(`${config.origin}/${name}/_security`, { + method: "PUT", + headers, + body: JSON.stringify({ + admins: { + names: [], + roles: ["db-admins"], + }, + members: { + names: [], + roles: ["db-users"], + }, + }), + }) + ) + .chain(handleResponse(200)) + .toPromise(), + removeDatabase: (name) => + asyncFetch(`${config.origin}/${name}`, { + method: "DELETE", + headers, + }).chain(handleResponse(200)).toPromise(), createDocument: ({ db, id, doc }) => Async.of({ ...doc, _id: id }) - .chain(doc => /^_design/.test(doc._id) - ? Async.Rejected({ ok: false, msg: 'user can not create design docs' }) - : Async.Resolved(doc)) + .chain((doc) => + /^_design/.test(doc._id) + ? Async.Rejected({ + ok: false, + msg: "user can not create design docs", + }) + : Async.Resolved(doc) + ) .chain((doc) => asyncFetch(`${config.origin}/${db}`, { - method: 'POST', + method: "POST", headers, - body: JSON.stringify(doc) + body: JSON.stringify(doc), }) ) .chain(handleResponse(201)) .toPromise(), - retrieveDocument: ({ db, id }) => retrieveDocument({ db, id }) - .map(omit(['_id', '_rev'])) - .map(assoc('id', id)) - .toPromise(), + retrieveDocument: ({ db, id }) => + retrieveDocument({ db, id }) + .map(omit(["_id", "_rev"])) + .map(assoc("id", id)) + .toPromise(), updateDocument: ({ db, id, doc }) => { // need to retrieve the document if exists // then upsert if possible return asyncFetch(`${config.origin}/${db}/${id}`, { - headers + headers, }) - .chain(res => Async.fromPromise(res.json.bind(res))()) - .map(doc => { - return doc.error ? null : doc + .chain((res) => Async.fromPromise(res.json.bind(res))()) + .map((doc) => { + return doc.error ? null : doc; }) .chain((old) => old ? asyncFetch(`${config.origin}/${db}/${id}?rev=${old._rev}`, { - method: 'PUT', + method: "PUT", headers, - body: JSON.stringify(doc) + body: JSON.stringify(doc), }) : asyncFetch(`${config.origin}/${db}/${id}`, { - method: 'PUT', + method: "PUT", headers, - body: JSON.stringify(doc) + body: JSON.stringify(doc), }) ) .chain(handleResponse(201)) - .map(omit(['rev'])) - .toPromise() + .map(omit(["rev"])) + .toPromise(); }, removeDocument: ({ db, id }) => retrieveDocument({ db, id }) .chain((old) => asyncFetch(`${config.origin}/${db}/${id}?rev=${old._rev}`, { - method: 'DELETE', - headers + method: "DELETE", + headers, }) ) .chain(handleResponse(200)).toPromise(), @@ -110,60 +125,64 @@ module.exports = ({ config }) => { // or it may be easier to just make the unique id _id? // return asyncFetch(`${config.origin}/${db}/_find`, { - method: 'POST', + method: "POST", headers, - body: JSON.stringify(query) + body: JSON.stringify(query), }) .chain(handleResponse(200)) .map(({ docs }) => ({ ok: true, docs: map( compose( - omit(['_id']), - over(xId, identity) - ), docs) + omit(["_id"]), + over(xId, identity), + ), + docs, + ), })) - .toPromise() + .toPromise(); }, indexDocuments: ({ db, name, fields }) => asyncFetch(`${config.origin}/${db}/_index`, { - method: 'POST', + method: "POST", headers, body: JSON.stringify({ index: { - fields + fields, }, - ddoc: name - }) + ddoc: name, + }), }) .chain(handleResponse(200)) .map(() => ({ ok: true })) .toPromise(), listDocuments: ({ db, limit, startkey, endkey, keys, descending }) => { - let options = { include_docs: true } - options = limit ? merge({ limit: Number(limit) }, options) : options - options = startkey ? merge({ startkey }, options) : options - options = endkey ? merge({ endkey }, options) : options - options = keys ? merge({ keys }, options) : options - options = descending ? merge({ descending }, options) : options + // deno-lint-ignore camelcase + let options = { include_docs: true }; + options = limit ? merge({ limit: Number(limit) }, options) : options; + options = startkey ? merge({ startkey }, options) : options; + options = endkey ? merge({ endkey }, options) : options; + options = keys ? merge({ keys }, options) : options; + options = descending ? merge({ descending }, options) : options; return asyncFetch(`${config.origin}/${db}/_all_docs`, { - method: 'POST', + method: "POST", headers, - body: JSON.stringify(options) + body: JSON.stringify(options), }) .chain(handleResponse(200)) - .map(result => ({ + .map((result) => ({ ok: true, docs: map( compose( - omit(['_rev', '_id']), - over(xId, identity) - ), pluck('doc', result.rows) - ) + omit(["_rev", "_id"]), + over(xId, identity), + ), + pluck("doc", result.rows), + ), })) - .toPromise() + .toPromise(); }, - bulkDocuments: bulk(config.origin, headers) - }) + bulkDocuments: bulk(config.origin, asyncFetch, headers, handleResponse), + }); } diff --git a/packages/adapter-couchdb/adapter_test.js b/packages/adapter-couchdb/adapter_test.js index e7500aa7..add9e098 100644 --- a/packages/adapter-couchdb/adapter_test.js +++ b/packages/adapter-couchdb/adapter_test.js @@ -1,174 +1,213 @@ -const test = require('tape') - -const fetchMock = require('fetch-mock') - -const COUCH = 'http://localhost:5984' - -globalThis.fetch = fetchMock.sandbox() - .get(`${COUCH}/hello`, { status: 200, body: { db_name: 'hello' } }) - .put(`${COUCH}/hello`, { status: 201, body: { ok: true } }) - .put(`${COUCH}/hello/_security`, { status: 200, body: { ok: true } }) - .delete(`${COUCH}/hello`, { - status: 200, - body: { ok: true } - }) - .post(`${COUCH}/hello`, { - status: 201, - body: { ok: true } - }) - .get(`${COUCH}/hello/1`, { - status: 200, - body: { _id: '1', hello: 'world' } - }) - .post(`${COUCH}/hello/_find`, { - status: 200, - body: { - docs: [{ - _id: '1', - hello: 'world' - }] - } - }) - .post(`${COUCH}/hello/_index`, { - status: 200, - body: { - result: 'created', - id: '_design/foo', - name: 'foo' - } - }) - .post(`${COUCH}/hello/_all_docs`, { - status: 200, - body: { +import { assertEquals, assertObjectMatch } from "./deps_dev.js"; +import { asyncFetch, createHeaders, handleResponse } from "./async-fetch.js"; +import { adapter } from "./adapter.js"; + +const test = Deno.test; +const COUCH = "http://localhost:5984"; + +const testFetch = (url, options) => { + options.method = options.method || "GET"; + + if (url === "http://localhost:5984/hello" && options.method === "PUT") { + return Promise.resolve({ + status: 201, + ok: true, + json: () => Promise.resolve({ ok: true }), + }); + } + if ( + url === "http://localhost:5984/hello/_security" && options.method === "PUT" + ) { + return Promise.resolve({ + status: 200, + ok: true, + json: () => Promise.resolve({ ok: true }), + }); + } + + if (url === "http://localhost:5984/hello" && options.method === "DELETE") { + return Promise.resolve({ + status: 200, + ok: true, + json: () => Promise.resolve({ ok: true }), + }); + } + + if (url === "http://localhost:5984/hello" && options.method === "POST") { + return Promise.resolve({ + status: 201, + ok: true, + json: () => Promise.resolve({ ok: true, id: "1" }), + }); + } + if (url === "http://localhost:5984/hello/1" && options.method === "GET") { + return Promise.resolve({ + status: 200, + ok: true, + json: () => Promise.resolve({ _id: "1", _rev: "1", hello: "world" }), + }); + } + + if ( + url === "http://localhost:5984/hello/_find" && options.method === "POST" + ) { + return Promise.resolve({ + status: 200, + ok: true, + json: () => + Promise.resolve({ + ok: true, + docs: [{ _id: "1", _rev: "1", hello: "world" }], + }), + }); + } + + if ( + url === "http://localhost:5984/hello/_index" && options.method === "POST" + ) { + return Promise.resolve({ + status: 200, ok: true, - rows: [{ - key: '1', - value: { _id: '1', _rev: '1' }, - doc: { - _id: '1', - _rev: '1', - hello: 'world' - } - }] - } - }) - .get(`${COUCH}/hello/_all_docs?keys=1,2`, { - status: 200, - body: { + json: () => Promise.resolve({ ok: true }), + }); + } + + if ( + url === "http://localhost:5984/hello/_all_docs" && options.method === "POST" + ) { + return Promise.resolve({ + status: 200, ok: true, - rows: [{ - key: '1', - id: '1', - value: { rev: '1' } - }, { - key: '2', - id: '2', - value: { rev: '1' } - }] - } - }) - .post(`${COUCH}/hello/_bulk_docs`, { - status: 201, - body: [{ + json: () => + Promise.resolve({ + ok: true, + rows: [{ + key: "1", + id: "1", + value: { rev: "1" }, + doc: { _id: "1", _rev: "1", hello: "world" }, + }], + }), + }); + } + + if (url === "http://localhost:5984/hello" && options.method === "GET") { + return Promise.resolve({ + status: 200, ok: true, - id: '1', - rev: '1' - }, { + json: () => Promise.resolve({ db_name: "hello" }), + }); + } + + if ( + url === "http://localhost:5984/hello/_bulk_docs" && + options.method === "POST" + ) { + return Promise.resolve({ + status: 201, ok: true, - id: '2', - rev: '2' - }] - }) - -const createAdapter = require('./adapter') - -const adapter = createAdapter({ - config: { origin: COUCH } -}) - -test('bulk documents', async t => { - const result = await adapter.bulkDocuments({ - db: 'hello', - docs: [{ id: '1' }, { id: '2' }] - }).catch(err => ({ ok: false, err })) - t.ok(result.ok) - t.equal(result.results.length, 2) - t.end() -}) - -test('create database', async t => { - const result = await adapter.createDatabase('hello') - t.ok(result.ok) - t.end() -}) - -test('remove database', async t => { - const result = await adapter.removeDatabase('hello') - t.ok(result.ok) - t.end() -}) - -test('create document', async t => { - const result = await adapter.createDocument({ - db: 'hello', id: '1', doc: { hello: 'world' } - }) - t.ok(result.ok) - t.end() -}) - -test('can not create design document', async t => { + json: () => + Promise.resolve([{ id: "1", ok: true }, { id: "2", ok: true }]), + }); + } + console.log("URL not resolving: ", options.method, url); + + return Promise.resolve({ + status: 500, + ok: false, + json: () => Promise.resolve({ ok: true }), + }); +}; + +const a = adapter({ + config: { origin: COUCH }, + asyncFetch: asyncFetch(testFetch), + headers: createHeaders("admin", "password"), + handleResponse, +}); + +test("bulk documents", async () => { + const result = await a.bulkDocuments({ + db: "hello", + docs: [{ id: "1" }, { id: "2" }], + }).catch((err) => ({ ok: false, err })); + console.log("results", result); + assertEquals(result.ok, true); + assertEquals(result.results.length, 2); +}); + +test("create database", async () => { + const result = await a.createDatabase("hello"); + assertEquals(result.ok, true); +}); + +test("remove database", async () => { + const result = await a.removeDatabase("hello"); + assertEquals(result.ok, true); +}); + +test("create document", async () => { + const result = await a.createDocument({ + db: "hello", + id: "1", + doc: { hello: "world" }, + }); + assertEquals(result.ok, true); +}); + +test("can not create design document", async () => { try { - await adapter.createDocument({ - db: 'hello', id: '_design/1', doc: { hello: 'world' } - }) + await a.createDocument({ + db: "hello", + id: "_design/1", + doc: { hello: "world" }, + }); } catch (e) { - t.ok(!e.ok) - t.end() + assertEquals(e.ok, false); } -}) - -test('retrieve document', async t => { - const result = await adapter.retrieveDocument({ - db: 'hello', - id: '1' - }) - t.equal(result.hello, 'world') - t.end() -}) - -test('find documents', async t => { - const results = await adapter.queryDocuments({ - db: 'hello', +}); + +test("retrieve document", async () => { + const result = await a.retrieveDocument({ + db: "hello", + id: "1", + }); + assertEquals(result.hello, "world"); +}); + +test("find documents", async () => { + const results = await a.queryDocuments({ + db: "hello", query: { selector: { - id: '1' - } - } - }) - t.deepEqual(results.docs[0], { - id: '1', - hello: 'world' - }) - t.end() -}) - -test('create query index', async t => { - const results = await adapter.indexDocuments({ - db: 'hello', - name: 'foo', - fields: ['foo'] - }) - t.ok(results.ok) - t.end() -}) - -test('list documents', async t => { - const results = await adapter.listDocuments({ - db: 'hello', - limit: 1 - }) - t.deepEqual(results.docs[0], { - id: '1', - hello: 'world' - }) -}) + id: "1", + }, + }, + }); + + assertObjectMatch(results.docs[0], { + id: "1", + hello: "world", + }); +}); + +test("create query index", async () => { + const results = await a.indexDocuments({ + db: "hello", + name: "foo", + fields: ["foo"], + }); + console.log("results", results); + assertEquals(results.ok, true); +}); + +test("list documents", async () => { + const results = await a.listDocuments({ + db: "hello", + limit: 1, + }); + assertObjectMatch(results.docs[0], { + id: "1", + hello: "world", + }); +}); diff --git a/packages/adapter-couchdb/async-fetch.js b/packages/adapter-couchdb/async-fetch.js index edff176c..a2ddfc54 100644 --- a/packages/adapter-couchdb/async-fetch.js +++ b/packages/adapter-couchdb/async-fetch.js @@ -1,16 +1,21 @@ -const { Async } = require('crocks') -const { ifElse, propEq } = require('ramda') -const { composeK } = require('crocks/helpers') +import { crocks, R } from "./deps.js"; -// fetch is pulled from environment -// eslint-disable-next-line no-undef -exports.asyncFetch = Async.fromPromise(fetch) -exports.createHeaders = (username, password) => ({ - 'Content-Type': 'application/json', - authorization: `Basic ${Buffer.from(username + ':' + password).toString('base64')}` -}) -const toJSON = (result) => Async.fromPromise(result.json.bind(result))() -const toJSONReject = composeK(Async.Rejected, toJSON) +const { Async, composeK } = crocks; +const { ifElse, propEq } = R; -exports.handleResponse = (code) => - ifElse(propEq('status', code), toJSON, toJSONReject) +export const asyncFetch = (fetch) => Async.fromPromise(fetch); +export const createHeaders = (username, password) => { + const headers = { + "Content-Type": "application/json", + }; + if (username) { + headers.authorization = `Basic ${btoa(username + ":" + password)}`; + } + return headers; +}; + +const toJSON = (result) => Async.fromPromise(result.json.bind(result))(); +const toJSONReject = composeK(Async.Rejected, toJSON); + +export const handleResponse = (code) => + ifElse(propEq("status", code), toJSON, toJSONReject); diff --git a/packages/adapter-couchdb/bulk.js b/packages/adapter-couchdb/bulk.js index 6608c37a..098584e8 100644 --- a/packages/adapter-couchdb/bulk.js +++ b/packages/adapter-couchdb/bulk.js @@ -1,62 +1,91 @@ -const { Async } = require('crocks') +import { crocks, R } from "./deps.js"; + +const { Async } = crocks; const { - assoc, compose, identity, has, head, find, filter, - is, lens, map, omit, over, path, prop, propEq, pluck -} = require('ramda') -const { asyncFetch, handleResponse } = require('./async-fetch') -const lensRev = lens(path(['value', 'rev']), assoc('rev')) -const lensId = lens(prop('id'), assoc('_id')) + assoc, + compose, + identity, + has, + head, + find, + filter, + is, + lens, + map, + omit, + over, + path, + prop, + propEq, + pluck, +} = R; +const lensRev = lens(path(["value", "rev"]), assoc("rev")); +const lensId = lens(prop("id"), assoc("_id")); const xRevs = map( compose( - omit(['key', 'value']), - over(lensRev, identity) - ) -) -const mergeWithRevs = docs => revs => - map(doc => { - const rev = find(rev => doc.id === rev.id, revs) - return rev ? { _rev: rev.rev, ...doc } : doc - }, docs) - -const switchIds = map(compose(omit(['id']), over(lensId, identity))) - -const pluckIds = pluck('id') -const getDocsThatExist = (url, db, headers) => ids => - asyncFetch(`${url}/${db}/_all_docs`, { method: 'POST', body: JSON.stringify({ keys: ids }), headers }) - .chain(handleResponse(200)) - .map(prop('rows')) - .map(filter(has('value'))) - .map(xRevs) - -const applyBulkDocs = (url, db, headers) => docs => - asyncFetch(`${url}/${db}/_bulk_docs`, { - method: 'POST', - headers, - body: JSON.stringify({ docs }) - }) - .chain(handleResponse(201)) - -const checkDbExists = (url, db, headers) => docs => - asyncFetch(`${url}/${db}`, { headers }) - .chain(handleResponse(200)) - .chain(res => propEq('db_name', db, res) - ? Async.Resolved(docs) - : Async.Rejected({ ok: false, msg: 'db not found' }) - ) - -const checkDocs = docs => - is(Object, head(docs)) ? Async.Resolved(docs) : Async.Rejected({ ok: false, msg: 'docs must be objects' }) - -module.exports = (couchUrl, headers) => ({ db, docs }) => - Async.of(docs) - .chain(checkDbExists(couchUrl, db, headers)) - .chain(checkDocs) - .map(pluckIds) - .chain(getDocsThatExist(couchUrl, db, headers)) - .map(mergeWithRevs(docs)) - .map(switchIds) - .chain(applyBulkDocs(couchUrl, db, headers)) - .map(map(omit(['rev']))) - .map(results => ({ ok: true, results })) - .toPromise() + omit(["key", "value"]), + over(lensRev, identity), + ), +); +const mergeWithRevs = (docs) => + (revs) => + map((doc) => { + const rev = find((rev) => doc.id === rev.id, revs); + return rev ? { _rev: rev.rev, ...doc } : doc; + }, docs); + +const switchIds = map(compose(omit(["id"]), over(lensId, identity))); + +const pluckIds = pluck("id"); + +const checkDocs = (docs) => + is(Object, head(docs)) + ? Async.Resolved(docs) + : Async.Rejected({ ok: false, msg: "docs must be objects" }); + +export const bulk = (couchUrl, asyncFetch, headers, handleResponse) => { + const getDocsThatExist = (url, db, headers) => + (ids) => + asyncFetch(`${url}/${db}/_all_docs`, { + method: "POST", + body: JSON.stringify({ keys: ids }), + headers, + }) + .chain(handleResponse(200)) + .map(prop("rows")) + .map(filter(has("value"))) + .map(xRevs); + + const applyBulkDocs = (url, db, headers) => + (docs) => + asyncFetch(`${url}/${db}/_bulk_docs`, { + method: "POST", + headers, + body: JSON.stringify({ docs }), + }) + .chain(handleResponse(201)); + + const checkDbExists = (url, db, headers) => + (docs) => + asyncFetch(`${url}/${db}`, { headers }) + .chain(handleResponse(200)) + .chain((res) => + propEq("db_name", db, res) + ? Async.Resolved(docs) + : Async.Rejected({ ok: false, msg: "db not found" }) + ); + + return ({ db, docs }) => + Async.of(docs) + .chain(checkDbExists(couchUrl, db, headers)) + .chain(checkDocs) + .map(pluckIds) + .chain(getDocsThatExist(couchUrl, db, headers)) + .map(mergeWithRevs(docs)) + .map(switchIds) + .chain(applyBulkDocs(couchUrl, db, headers)) + .map(map(omit(["rev"]))) + .map((results) => ({ ok: true, results })) + .toPromise(); +}; diff --git a/packages/adapter-couchdb/deps.js b/packages/adapter-couchdb/deps.js new file mode 100644 index 00000000..b27aa679 --- /dev/null +++ b/packages/adapter-couchdb/deps.js @@ -0,0 +1,2 @@ +export { default as crocks } from "https://cdn.skypack.dev/crocks@0.12.4"; +export * as R from "https://cdn.skypack.dev/ramda@^0.27.1"; diff --git a/packages/adapter-couchdb/deps_dev.js b/packages/adapter-couchdb/deps_dev.js new file mode 100644 index 00000000..6a43158e --- /dev/null +++ b/packages/adapter-couchdb/deps_dev.js @@ -0,0 +1,4 @@ +export { + assertEquals, + assertObjectMatch, +} from "https://deno.land/std@0.98.0/testing/asserts.ts"; diff --git a/packages/adapter-couchdb/index.js b/packages/adapter-couchdb/index.js deleted file mode 100644 index d8d98fb4..00000000 --- a/packages/adapter-couchdb/index.js +++ /dev/null @@ -1,39 +0,0 @@ -globalThis.fetch = require('@vercel/fetch-retry')(require('node-fetch')) -const createAdapter = require('./adapter') - -/** - * @param {object} config - * @returns {object} - */ -module.exports = function CouchDataAdapter (config) { - /** - * @param {object} env - */ - function load () { - return config - } - - /** - * @param {object} env - * @returns {function} - */ - function link (env = { url: 'http://localhost:5984' }) { - /** - * @param {object} adapter - * @returns {object} - */ - return function () { - // parse url - const config = new URL(env.url) - - return createAdapter({ config }) - } - } - - return Object.freeze({ - id: 'couchdb-data-adapter', - port: 'data', - load, - link - }) -} diff --git a/packages/adapter-couchdb/index_test.js b/packages/adapter-couchdb/index_test.js deleted file mode 100644 index e69de29b..00000000 diff --git a/packages/adapter-couchdb/mod.js b/packages/adapter-couchdb/mod.js new file mode 100644 index 00000000..b510ff29 --- /dev/null +++ b/packages/adapter-couchdb/mod.js @@ -0,0 +1,44 @@ +import { adapter } from "./adapter"; +import { asyncFetch, createHeaders, handleResponse } from "./async-fetch.js"; + +/** + * @param {object} config + * @returns {object} + */ +export default function CouchDataAdapter(config) { + /** + * @param {object} env + */ + function load() { + return config; + } + + /** + * @param {object} env + * @returns {function} + */ + function link(env = { url: "http://localhost:5984" }) { + /** + * @param {object} adapter + * @returns {object} + */ + return function () { + // parse url + const config = new URL(env.url); + + return adapter({ + config, + asyncFetch: asyncFetch(fetch), + headers: createHeaders(config.username, config.password), + handleResponse, + }); + }; + } + + return Object.freeze({ + id: "couchdb-data-adapter", + port: "data", + load, + link, + }); +} diff --git a/packages/adapter-couchdb/scripts/test.sh b/packages/adapter-couchdb/scripts/test.sh new file mode 100755 index 00000000..d2d5bf1f --- /dev/null +++ b/packages/adapter-couchdb/scripts/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +deno fmt --check +deno test --allow-net diff --git a/packages/adapter-dndb/README.md b/packages/adapter-dndb/README.md new file mode 100644 index 00000000..a9cf0c1f --- /dev/null +++ b/packages/adapter-dndb/README.md @@ -0,0 +1,5 @@ +# README + +dndb is a NoSQL embeddable database built for deno + +https://dndb.crawford.ml/docs diff --git a/packages/adapter-dndb/adapter.js b/packages/adapter-dndb/adapter.js new file mode 100644 index 00000000..3350f18d --- /dev/null +++ b/packages/adapter-dndb/adapter.js @@ -0,0 +1,79 @@ +// deno-lint-ignore-file no-unused-vars + +import { cuid, R } from "./deps.js"; +import { bulk } from "./bulk.js"; + +const { assoc, compose, equals, omit } = R; +const toInternalId = compose(omit(["id"]), (doc) => assoc("_id", doc.id, doc)); + +let db = null; +export function adapter(_env, Datastore) { + // create _system json file to hold all db names + + return Object.freeze({ + createDatabase: (name) => { + try { + db = new Datastore({ filename: `./${name}.db`, autoload: true }); + } catch (e) { + return Promise.resolve({ ok: false, message: e.message }); + } + return Promise.resolve({ ok: true }); + }, + removeDatabase: async (name) => { + // todo delete file if exists + try { + await Deno.remove(`./${name}.db`); + } catch (e) { + console.log(e.message); + } + return Promise.resolve({ ok: true }); + }, + createDocument: async ({ db, id, doc }) => { + db = new Datastore({ filename: `./${db}.db` }); + doc._id = id || cuid(); + const result = await db.insert(doc); + return Promise.resolve({ ok: equals(result, doc), id: result._id }); + }, + retrieveDocument: async ({ db, id }) => { + db = new Datastore({ filename: `./${db}.db` }); + const doc = await db.findOne({ _id: id }); + // swap ids + return Promise.resolve(compose(omit(["_id"]), assoc("id", doc._id))(doc)); + }, + updateDocument: async ({ db, id, doc }) => { + db = new Datastore({ filename: `./${db}.db` }); + // swap ids + doc = toInternalId(doc); + const result = await db.updateOne({ _id: id }, { $set: doc }); + return Promise.resolve({ ok: equals(doc, result) }); + }, + removeDocument: async ({ db, id }) => { + db = new Datastore({ filename: `./${db}.db` }); + const result = await db.removeOne({ _id: id }); + if (!result) return Promise.resolve({ ok: false, message: "not found" }); + return Promise.resolve({ ok: equals(result._id, id) }); + }, + listDocuments: async ({ db }) => { + db = new Datastore({ filename: `./${db}.db` }); + const results = await db.find(); + return Promise.resolve({ ok: true, docs: results }); + }, + queryDocuments: async ({ db, query }) => { + db = new Datastore({ filename: `./${db}.db` }); + const results = await db.find(query.selector); + return Promise.resolve({ ok: true, docs: results }); + }, + indexDocuments: ({ db, name, fields }) => { + // noop - db is not built for + // optimizability yet! will add this when + // supported + return Promise.resolve({ ok: true }); + }, + bulkDocuments: ({ db, docs }) => { + db = new Datastore({ filename: `./${db}.db` }); + return bulk({ db, docs }) + .map((results) => ({ ok: true, results })) + .toPromise(); + }, + }); +} diff --git a/packages/adapter-dndb/adapter_test.js b/packages/adapter-dndb/adapter_test.js new file mode 100644 index 00000000..b6d9fa0e --- /dev/null +++ b/packages/adapter-dndb/adapter_test.js @@ -0,0 +1,109 @@ +// deno-lint-ignore-file no-unused-vars + +import { adapter } from "./adapter.js"; +import { assertEquals } from "./dev_deps.js"; + +function Datastore(config) { + return Object.freeze({ + insert: (doc) => Promise.resolve(doc), + findOne: (o) => Promise.resolve({ _id: "1", hello: "world" }), + updateOne: (criteria, action) => + Promise.resolve({ _id: "1", hello: "moon" }), + removeOne: (o) => Promise.resolve(o), + find: () => Promise.resolve([]), + update: (criteria, action) => Promise.resolve(action.$set), + }); +} + +const test = Deno.test; +const a = adapter({ filename: "./test.db" }, Datastore); + +test("create database", async () => { + const result = await a.createDatabase("foo"); + assertEquals(result.ok, true); +}); + +test("remove database", async () => { + const result = await a.removeDatabase("foo"); + assertEquals(result.ok, true); +}); + +test("create document", async () => { + const result = await a.createDocument({ + db: "foo", + id: "1", + doc: { hello: "world" }, + }); + assertEquals(result.ok, true); +}); + +test("retrieve document", async () => { + const result = await a.retrieveDocument({ + db: "foo", + id: "1", + }); + + assertEquals(result.id, "1"); +}); + +test("update document", async () => { + const result = await a.updateDocument({ + db: "foo", + id: "1", + doc: { id: "1", hello: "moon" }, + }); + assertEquals(result.ok, true); +}); + +test("remove document", async () => { + const result = await a.removeDocument({ + db: "foo", + id: "1", + }); + assertEquals(result.ok, true); +}); + +test("list documents", async () => { + const result = await a.listDocuments({ db: "foo" }); + assertEquals(result.ok, true); +}); + +test("query documents", async () => { + await a.createDocument({ + db: "foo", + id: "movid-1", + doc: { id: "movie-1", type: "movie", title: "Great Outdoors" }, + }); + + const result = await a.queryDocuments({ + db: "foo", + query: { + selector: { type: "movie" }, + }, + }); + + assertEquals(result.ok, true); +}); + +test("index documents", async () => { + const result = await a.indexDocuments({ + db: "foo", + name: "fooIndex", + fields: ["type"], + }); + assertEquals(result.ok, true); +}); + +test("bulk update/insert/remove documents", async () => { + const result = await a.bulkDocuments({ + db: "foo", + docs: [ + { id: "movie-1", type: "movie", name: "ghostbusters", _deleted: true }, + { id: "movie-2", type: "movie", name: "great outdoors" }, + { id: "movie-3", type: "movie", name: "groundhog day" }, + { id: "movie-4", type: "movie", name: "what about bob?" }, + { id: "movie-5", type: "movie", name: "spaceballs" }, + ], + }); + assertEquals(result.ok, true); +}); diff --git a/packages/adapter-dndb/bulk.js b/packages/adapter-dndb/bulk.js new file mode 100644 index 00000000..e2810613 --- /dev/null +++ b/packages/adapter-dndb/bulk.js @@ -0,0 +1,64 @@ +// deno-lint-ignore-file no-unused-vars + +import { crocks, R } from "./deps.js"; + +const { + assoc, + compose, + identity, + ifElse, + isNil, + map, + omit, + pick, + prop, + propEq, +} = R; +const { Async } = crocks; + +const toInternalId = compose(omit(["id"]), (doc) => assoc("_id", doc.id, doc)); + +export function bulk({ db, docs }) { + const remove = (doc) => + compose( + map((r) => ({ ok: true, id: doc._id, deleted: true })), + Async.fromPromise(db.remove.bind(db)), + pick(["_id"]), + )(doc); + + const isDeleted = propEq("_deleted", true); + const isNew = propEq("_new", true); + + const insert = compose( + map((r) => ({ ok: true, id: r._id })), + Async.fromPromise(db.insert.bind(db)), + omit(["_new"]), + ); + const update = (doc) => + Async.fromPromise(db.update.bind(db))(prop("_id", doc), { $set: doc }) + .map((r) => ({ ok: true, id: doc._id })); + + const findOne = Async.fromPromise(db.findOne.bind(db)); + const flagNew = (doc) => + ifElse(isNil, () => assoc("_new", true, doc), identity); + + return Async.of(docs) + .map(map(toInternalId)) + // findAll updates + .chain(compose( + Async.all, + map((doc) => + compose( + map(flagNew(doc)), + findOne, + pick(["_id"]), + )(doc) + ), + )) + .chain(compose( + Async.all, + map( + ifElse(isDeleted, remove, ifElse(isNew, insert, update)), + ), + )); +} diff --git a/packages/adapter-dndb/deps.js b/packages/adapter-dndb/deps.js new file mode 100644 index 00000000..41dc6892 --- /dev/null +++ b/packages/adapter-dndb/deps.js @@ -0,0 +1,4 @@ +export { default as Datastore } from "https://deno.land/x/dndb@0.3.3/mod.ts"; +export { cuid } from "https://deno.land/x/cuid@v1.0.0/index.js"; +export * as R from "https://cdn.skypack.dev/ramda@^0.27.1"; +export { default as crocks } from "https://cdn.skypack.dev/crocks@0.12.4"; diff --git a/packages/adapter-dndb/deps_lock.json b/packages/adapter-dndb/deps_lock.json new file mode 100644 index 00000000..84c1bd54 --- /dev/null +++ b/packages/adapter-dndb/deps_lock.json @@ -0,0 +1,81 @@ +{ + "https://cdn.skypack.dev/-/crocks@v0.12.4-Mje8nEhNx2rmIpwz3ROp/dist=es2020,mode=imports/optimized/crocks.js": "93d587d18dc5f124f30e5b38de37a6471eb65309c94ef2ffc7a36dc40ab394da", + "https://cdn.skypack.dev/-/mongobj@v1.0.9-VYPOdIIfPZvZAWJWUZ9S/dist=es2020,mode=imports/optimized/mongobj.js": "55e587a5e3f1b299c65c1970db161c81ab9265e118e041de36809c93c8625e8c", + "https://cdn.skypack.dev/-/ramda@v0.27.1-3ePaNsppsnXYRcaNcaWn/dist=es2020,mode=imports/optimized/ramda.js": "0e51cd76039fc9669f9179d935b261bcbb19ecbb11d49e7e60cfbc14360a85d2", + "https://cdn.skypack.dev/crocks@0.12.4": "d48852ce36c500f2770a2bc240cb6df9ffb2219d184b32b9be542e8560a6ff1d", + "https://cdn.skypack.dev/mongobj": "24015909b4b01851e9edefdfdeed626e18898ba9a2dfb94d1c05cecde16cd1f8", + "https://cdn.skypack.dev/ramda@^0.27.1": "fb06d7de4305dcdb997f9adc903a463afcdac75d148d638295c420a8922048fd", + "https://deno.land/std@0.99.0/_util/assert.ts": "2f868145a042a11d5ad0a3c748dcf580add8a0dbc0e876eaa0026303a5488f58", + "https://deno.land/std@0.99.0/_util/os.ts": "e282950a0eaa96760c0cf11e7463e66babd15ec9157d4c9ed49cc0925686f6a7", + "https://deno.land/std@0.99.0/async/deferred.ts": "624bef4b755b71394620508a0c234a93cb8020cbd1b04bfcdad41c174392cef6", + "https://deno.land/std@0.99.0/async/delay.ts": "9de1d8d07d1927767ab7f82434b883f3d8294fb19cad819691a2ad81a728cf3d", + "https://deno.land/std@0.99.0/async/mod.ts": "f24e4a94f9fb7de78e8345e9590e1bf23da28a212541970413a023094448031b", + "https://deno.land/std@0.99.0/async/mux_async_iterator.ts": "62abff3af9ff619e8f2adc96fc70d4ca020fa48a50c23c13f12d02ed2b760dbe", + "https://deno.land/std@0.99.0/async/pool.ts": "353ce4f91865da203a097aa6f33de8966340c91b6f4a055611c8c5d534afd12f", + "https://deno.land/std@0.99.0/async/tee.ts": "6b8f1322b6dd2396202cfbe9cde9cab158d1e962cfd9197b0a97c6657bee79ce", + "https://deno.land/std@0.99.0/bytes/bytes_list.ts": "a13287edb03f19d27ba4927dec6d6de3e5bd46254cd4aee6f7e5815810122673", + "https://deno.land/std@0.99.0/bytes/mod.ts": "1ae1ccfe98c4b979f12b015982c7444f81fcb921bea7aa215bf37d84f46e1e13", + "https://deno.land/std@0.99.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.99.0/fs/_util.ts": "f2ce811350236ea8c28450ed822a5f42a0892316515b1cd61321dec13569c56b", + "https://deno.land/std@0.99.0/fs/copy.ts": "631bbafbfe6cba282158abc8aeb7e8251cc69a7ec28ce12878ea1b75fec2add4", + "https://deno.land/std@0.99.0/fs/empty_dir.ts": "5f08b263dd064dc7917c4bbeb13de0f5505a664b9cdfe312fa86e7518cfaeb84", + "https://deno.land/std@0.99.0/fs/ensure_dir.ts": "b7c103dc41a3d1dbbb522bf183c519c37065fdc234831a4a0f7d671b1ed5fea7", + "https://deno.land/std@0.99.0/fs/ensure_file.ts": "c06031af24368e80c330897e4b8e9109efc8602ffabc8f3e2306be07529e1d13", + "https://deno.land/std@0.99.0/fs/ensure_link.ts": "26e54363508b822afd87a3f6e873bbbcd6b5993dd638f8170758c16262a75065", + "https://deno.land/std@0.99.0/fs/ensure_symlink.ts": "c07b6d19ef58b6f5c671ffa942e7f9be50315f4f78e2f9f511626fd2e13beccc", + "https://deno.land/std@0.99.0/fs/eol.ts": "afaebaaac36f48c423b920c836551997715672b80a0fee9aa7667c181a94f2df", + "https://deno.land/std@0.99.0/fs/exists.ts": "b0d2e31654819cc2a8d37df45d6b14686c0cc1d802e9ff09e902a63e98b85a00", + "https://deno.land/std@0.99.0/fs/expand_glob.ts": "73e7b13f01097b04ed782b3d63863379b718417417758ba622e282b1e5300b91", + "https://deno.land/std@0.99.0/fs/mod.ts": "26eee4b52a8c516e37d464094b080ff6822883e7f01ff0ba0a72b8dcd54b9927", + "https://deno.land/std@0.99.0/fs/move.ts": "4623058e39bbbeb3ad30aeff9c974c55d2d574ad7c480295c12b04c244686a99", + "https://deno.land/std@0.99.0/fs/walk.ts": "b91c655c60d048035f9cae0e6177991ab3245e786e3ab7d20a5b60012edf2126", + "https://deno.land/std@0.99.0/hash/sha1.ts": "1cca324b4b253885a47f121adafcfac55b4cc96113e22b338e1db26f37a730b8", + "https://deno.land/std@0.99.0/io/buffer.ts": "3ead6bb11276ebcf093c403f74f67fd2205a515dbbb9061862c468ca56f37cd8", + "https://deno.land/std@0.99.0/io/bufio.ts": "82fe6a499cacf4604844472ccf328cb0a1c0571c0f83b5ee67e475018342b4ae", + "https://deno.land/std@0.99.0/io/types.d.ts": "89a27569399d380246ca7cdd9e14d5e68459f11fb6110790cc5ecbd4ee7f3215", + "https://deno.land/std@0.99.0/io/util.ts": "318be78b7954da25f0faffe123fef0d9423ea61af98467e860c06b60265eff6d", + "https://deno.land/std@0.99.0/node/_errors.ts": "02285efd044fe3d35e7ede4fc9578404363ae9d4ad4d817c965ebd04dc6e3e8c", + "https://deno.land/std@0.99.0/node/_util/_util_callbackify.ts": "f2ac50a47572dde37612a52c7b337afeefb6faafdb849184487e06436334a5ab", + "https://deno.land/std@0.99.0/node/_util/_util_promisify.ts": "2ad6efe685f73443d5ed6ae009999789a8de4a0f01e6d2afdf242b4515477ee2", + "https://deno.land/std@0.99.0/node/_util/_util_types.ts": "ae3d21e07c975f06590ab80bbde8173670d70ff40546267c0c1df869fc2ff00c", + "https://deno.land/std@0.99.0/node/_utils.ts": "559f7c47dec95961f508273b4e12284b221d6948f13527b3ca5db10f8fa9d93a", + "https://deno.land/std@0.99.0/node/events.ts": "830b49d3b24f76c880769c488ceaa9c02ee6cf3c9c90bc07d2fa6164fecc6c11", + "https://deno.land/std@0.99.0/node/util.ts": "0b6792797342bc0e43404fad849d358873b2b3e28939fce26face4db729d26ba", + "https://deno.land/std@0.99.0/path/_constants.ts": "1247fee4a79b70c89f23499691ef169b41b6ccf01887a0abd131009c5581b853", + "https://deno.land/std@0.99.0/path/_interface.ts": "1fa73b02aaa24867e481a48492b44f2598cd9dfa513c7b34001437007d3642e4", + "https://deno.land/std@0.99.0/path/_util.ts": "2e06a3b9e79beaf62687196bd4b60a4c391d862cfa007a20fc3a39f778ba073b", + "https://deno.land/std@0.99.0/path/common.ts": "eaf03d08b569e8a87e674e4e265e099f237472b6fd135b3cbeae5827035ea14a", + "https://deno.land/std@0.99.0/path/glob.ts": "314ad9ff263b895795208cdd4d5e35a44618ca3c6dd155e226fb15d065008652", + "https://deno.land/std@0.99.0/path/mod.ts": "4465dc494f271b02569edbb4a18d727063b5dbd6ed84283ff906260970a15d12", + "https://deno.land/std@0.99.0/path/posix.ts": "f56c3c99feb47f30a40ce9d252ef6f00296fa7c0fcb6dd81211bdb3b8b99ca3b", + "https://deno.land/std@0.99.0/path/separator.ts": "8fdcf289b1b76fd726a508f57d3370ca029ae6976fcde5044007f062e643ff1c", + "https://deno.land/std@0.99.0/path/win32.ts": "77f7b3604e0de40f3a7c698e8a79e7f601dc187035a1c21cb1e596666ce112f8", + "https://deno.land/std@0.99.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.99.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c", + "https://deno.land/std@0.99.0/uuid/_common.ts": "e985d221890ce301e8dfef4e7cbd01ff45b64886f6ca65ff3f03e64d8a5ff2b5", + "https://deno.land/std@0.99.0/uuid/mod.ts": "20e2a8209ec811fd8bf7218e83cf2d666556cdc947435b393f7f8edb9dcf14a4", + "https://deno.land/std@0.99.0/uuid/v1.ts": "3b242479ab2da5a6f5cdbbfeaa5db7a31966e688237f372ff50e489b93a9be07", + "https://deno.land/std@0.99.0/uuid/v4.ts": "949cadb3df3695770b41e08d5feb59401b10be73938c85542d27b8e6d14045f4", + "https://deno.land/std@0.99.0/uuid/v5.ts": "6b17fb670d608cfb545282c4940800ff4eb603ee16b0f92c576f483fd4112ed6", + "https://deno.land/x/cuid@v1.0.0/fingerprint.js": "34d45895441ad08a1ce2c76da4b72da24c00b68346e9bb3c8dfd10eb5774d74e", + "https://deno.land/x/cuid@v1.0.0/getRandomValue.js": "52545634d1e62836ed44aeea057caa2d054ea979090049ea8facb2ececa23385", + "https://deno.land/x/cuid@v1.0.0/index.js": "7313248002e361977e2801d0cbfd6b7d20926dd59c880f1698672a4fdefe999e", + "https://deno.land/x/cuid@v1.0.0/pad.js": "11c84744f110744659e8a61f991a1223b917786aaddc928970f85027e1fe1a12", + "https://deno.land/x/dndb@0.3.3/deps.ts": "9496854c8dd2998882817168e0d76bc126c24f0e94553baf76c8cacf2a84067c", + "https://deno.land/x/dndb@0.3.3/mod.ts": "3890f580d22e97bacf44050661b74a2c4a3dcbf75e4d8e0f56fd2cbce9305f2a", + "https://deno.land/x/dndb@0.3.3/src/executor.ts": "6f0bb3539f0ffa77a065f44c1bc25d4aacc7579b5074686ff8c9fc870dce2764", + "https://deno.land/x/dndb@0.3.3/src/methods/find.js": "a279effef8ba77b7094043a26e0e2c7d3b8b93cff6351199cc5bea17912960fe", + "https://deno.land/x/dndb@0.3.3/src/methods/findOne.js": "876ad703384f2e65c0c27610cc3907a6c00b67b88886456fdb5af1cd697ceb39", + "https://deno.land/x/dndb@0.3.3/src/methods/insert.js": "4720bd796111d6a315a1c9ad5d47a0bdf659f133cb52d755dc545432d646899e", + "https://deno.land/x/dndb@0.3.3/src/methods/mod.js": "41ae6ee3efb0b03f2bd833b252e09e17514cc3572bfd816d99aa5a402532d186", + "https://deno.land/x/dndb@0.3.3/src/methods/remove.js": "d3916b5dbe2477f81972738484eae3c49f0fc47b9c74a9aaa8cff2c9d68b4c7d", + "https://deno.land/x/dndb@0.3.3/src/methods/removeOne.js": "e7eb6142267af46d28388caeb66503647077a5ed191bf1bd9f5116bd175be1aa", + "https://deno.land/x/dndb@0.3.3/src/methods/update.js": "4252483c8f36ba8240624889a303b5a496860a4bc82100e8c4c440bf781191d9", + "https://deno.land/x/dndb@0.3.3/src/methods/updateOne.js": "94ab78ad16d816da18915da7bc24404f87f610303aeb80941f1e0aeefd338357", + "https://deno.land/x/dndb@0.3.3/src/mod.ts": "1937bc7e5d3a4ba28b4c88a70a435f6f8c3a28c1e42163d346b9b4e884b94235", + "https://deno.land/x/dndb@0.3.3/src/storage.ts": "d02c656cf0f0f815edddc09c73e9711f731c62f820c41278264d4c0b5f6c6580", + "https://deno.land/x/dndb@0.3.3/src/types.ts": "01028be953f7690acb7fb2de822e6cbfacb046e50e5f26ac0aeb8a7755d0eebd", + "https://deno.land/x/dndb@0.3.3/src/types/ds.options.ts": "163fe4b18ae8b7980b17f15d59d2b1528567f724365aea6e4fecbffa2e210fb0", + "https://raw.githubusercontent.com/denyncrawford/mongo-project.node/master/dist/bundle.js": "c2df2f6fdb05d90d88bcc2ae7da3a667ece4fcee793749187bcf70ad2046ed2a", + "https://raw.githubusercontent.com/denyncrawford/safe-filter/master/dist/index.js": "5edbe8a3296b4e0f152fdd62293e923b1a142ad5d4f6dc903c745a42bcaa8fb2" +} \ No newline at end of file diff --git a/packages/adapter-dndb/dev_deps.js b/packages/adapter-dndb/dev_deps.js new file mode 100644 index 00000000..6a43158e --- /dev/null +++ b/packages/adapter-dndb/dev_deps.js @@ -0,0 +1,4 @@ +export { + assertEquals, + assertObjectMatch, +} from "https://deno.land/std@0.98.0/testing/asserts.ts"; diff --git a/packages/adapter-dndb/dev_deps_lock.json b/packages/adapter-dndb/dev_deps_lock.json new file mode 100644 index 00000000..013cbf34 --- /dev/null +++ b/packages/adapter-dndb/dev_deps_lock.json @@ -0,0 +1,5 @@ +{ + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c" +} \ No newline at end of file diff --git a/packages/adapter-dndb/mod.js b/packages/adapter-dndb/mod.js new file mode 100644 index 00000000..af59f796 --- /dev/null +++ b/packages/adapter-dndb/mod.js @@ -0,0 +1,11 @@ +import { adapter } from "./adapter.js"; +import { Datastore } from "./deps.js"; + +export default function dndbAdapter(config) { + return Object.freeze({ + id: "dndb", + port: "data", + load: () => config, + link: (env) => () => adapter(env, Datastore), + }); +} diff --git a/packages/adapter-dndb/scripts/test.sh b/packages/adapter-dndb/scripts/test.sh new file mode 100755 index 00000000..2ca5d498 --- /dev/null +++ b/packages/adapter-dndb/scripts/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +deno fmt --check +deno test --allow-env --allow-read --allow-write --unstable diff --git a/packages/adapter-elasticsearch/adapter.js b/packages/adapter-elasticsearch/adapter.js index eec57b15..5dace76f 100644 --- a/packages/adapter-elasticsearch/adapter.js +++ b/packages/adapter-elasticsearch/adapter.js @@ -1,9 +1,29 @@ +import { R } from "./deps.js"; + +import { + bulkPath, + createIndexPath, + deleteIndexPath, + getDocPath, + indexDocPath, + queryPath, + removeDocPath, + updateDocPath, +} from "./paths.js"; -const { set, lensProp, pluck, reduce, always, pipe, map, join, concat, flip, toPairs } = require('ramda') const { - createIndexPath, deleteIndexPath, indexDocPath, getDocPath, - updateDocPath, removeDocPath, bulkPath, queryPath -} = require('./paths') + set, + lensProp, + pluck, + reduce, + always, + pipe, + map, + join, + concat, + flip, + toPairs, +} = R; /** * @@ -58,146 +78,149 @@ const { * - How to support different versions of Elasticsearch? * - ? Should we expose Elasticsearch response in result as res? */ -module.exports = function ({ config, asyncFetch, headers, handleResponse }) { +export default function ({ config, asyncFetch, headers, handleResponse }) { /** * @param {IndexInfo} * @returns {Promise} * */ - function createIndex ({ index, mappings }) { - const properties = mappings.fields.reduce((a, f) => set(lensProp(f), { type: 'text' }, a), {}) - console.log('adapter-elasticsearch', properties) + function createIndex({ index, mappings }) { + const properties = mappings.fields.reduce( + (a, f) => set(lensProp(f), { type: "text" }, a), + {}, + ); + console.log("adapter-elasticsearch", properties); return asyncFetch( createIndexPath(config.origin, index), { headers, - method: 'PUT', + method: "PUT", body: JSON.stringify({ - mappings: { properties } - }) - } + mappings: { properties }, + }), + }, ) .chain( - handleResponse(res => res.status < 400) + handleResponse((res) => res.status < 400), ) .bimap( - res => ({ ok: false, msg: JSON.stringify(res) }), - always({ ok: true }) + (res) => ({ ok: false, msg: JSON.stringify(res) }), + always({ ok: true }), ) - .toPromise() + .toPromise(); } /** * @param {string} index * @returns {Promise} */ - function deleteIndex (index) { + function deleteIndex(index) { return asyncFetch( deleteIndexPath(config.origin, index), { headers, - method: 'DELETE' - } + method: "DELETE", + }, ) .chain( - handleResponse(res => res.status === 200) + handleResponse((res) => res.status === 200), ) .bimap( always({ ok: false }), - always({ ok: true }) + always({ ok: true }), ) - .toPromise() + .toPromise(); } /** * @param {SearchDoc} * @returns {Promise} */ - function indexDoc ({ index, key, doc }) { + function indexDoc({ index, key, doc }) { return asyncFetch( indexDocPath(config.origin, index, key), { headers, - method: 'PUT', - body: JSON.stringify(doc) - } + method: "PUT", + body: JSON.stringify(doc), + }, ) .chain( - handleResponse(res => res.status < 400) + handleResponse((res) => res.status < 400), ) .bimap( always({ ok: false }), - always({ ok: true }) + always({ ok: true }), ) - .toPromise() + .toPromise(); } /** * @param {SearchInfo} * @returns {Promise} */ - function getDoc ({ index, key }) { + function getDoc({ index, key }) { return asyncFetch( getDocPath(config.origin, index, key), { headers, - method: 'GET' - } + method: "GET", + }, ) .chain( - handleResponse(res => res.status < 400) + handleResponse((res) => res.status < 400), ) .bimap( always({ ok: false }), - res => ({ ok: true, doc: res }) + (res) => ({ ok: true, doc: res }), ) - .toPromise() + .toPromise(); } /** * @param {SearchDoc} * @returns {Promise} */ - function updateDoc ({ index, key, doc }) { + function updateDoc({ index, key, doc }) { return asyncFetch( updateDocPath(config.origin, index, key), { headers, - method: 'PUT', - body: JSON.stringify(doc) - } + method: "PUT", + body: JSON.stringify(doc), + }, ) .chain( - handleResponse(res => res.status < 400) + handleResponse((res) => res.status < 400), ) .bimap( always({ ok: false }), - always({ ok: true }) + always({ ok: true }), ) - .toPromise() + .toPromise(); } /** * @param {SearchInfo} * @returns {Promise} */ - function removeDoc ({ index, key }) { + function removeDoc({ index, key }) { return asyncFetch( removeDocPath(config.origin, index, key), { headers, - method: 'DELETE' - } + method: "DELETE", + }, ) .chain( - handleResponse(res => res.status < 400) + handleResponse((res) => res.status < 400), ) .bimap( always({ ok: false }), - always({ ok: true }) + always({ ok: true }), ) - .toPromise() + .toPromise(); } /** @@ -206,35 +229,37 @@ module.exports = function ({ config, asyncFetch, headers, handleResponse }) { * * TODO: maybe we could just Promise.all a map to indexDoc()? */ - function bulk ({ index, docs }) { + function bulk({ index, docs }) { return asyncFetch( bulkPath(config.origin), { headers, - method: 'POST', + method: "POST", // See https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-bulk.html#docs-bulk-api-example body: pipe( reduce( - (arr, doc) => - [...arr, { index: { _index: index, _id: doc.id } }, doc], - [] + ( + arr, + doc, + ) => [...arr, { index: { _index: index, _id: doc.id } }, doc], + [], ), // stringify each object in arr map(JSON.stringify.bind(JSON)), - join('\n'), + join("\n"), // Bulk payload must end with a newline - flip(concat)('\n') - )(docs) - } + flip(concat)("\n"), + )(docs), + }, ) .chain( - handleResponse(res => res.status < 400) + handleResponse((res) => res.status < 400), ) .bimap( always({ ok: false }), - res => ({ ok: true, results: res.items }) + (res) => ({ ok: true, results: res.items }), ) - .toPromise() + .toPromise(); } /** @@ -242,12 +267,12 @@ module.exports = function ({ config, asyncFetch, headers, handleResponse }) { * @param {SearchQuery} * @returns {Promise} */ - function query ({ index, q: { query, fields, filter } }) { + function query({ index, q: { query, fields, filter } }) { return asyncFetch( queryPath(config.origin, index), { headers, - method: 'POST', + method: "POST", // anything undefined will not be stringified, so this shorthand works body: JSON.stringify({ query: { @@ -255,27 +280,27 @@ module.exports = function ({ config, asyncFetch, headers, handleResponse }) { must: { multi_match: { query, - fields - } + fields, + }, }, filter: toPairs(filter).map( - ([key, value]) => ({ term: { [key]: value } }) - ) - } - } - }) - } + ([key, value]) => ({ term: { [key]: value } }), + ), + }, + }, + }), + }, ) - .chain(handleResponse(res => res.status < 400)) + .chain(handleResponse((res) => res.status < 400)) .bimap( // TODO: what should message be for a failed query? - res => ({ ok: false, msg: JSON.stringify(res) }), - res => ({ + (res) => ({ ok: false, msg: JSON.stringify(res) }), + (res) => ({ ok: true, - matches: pluck('_source', res.hits.hits) - }) + matches: pluck("_source", res.hits.hits), + }), ) - .toPromise() + .toPromise(); } return Object.freeze({ @@ -286,6 +311,6 @@ module.exports = function ({ config, asyncFetch, headers, handleResponse }) { updateDoc, removeDoc, bulk, - query - }) + query, + }); } diff --git a/packages/adapter-elasticsearch/adapter_test.js b/packages/adapter-elasticsearch/adapter_test.js index 4c3b710e..60807e2b 100644 --- a/packages/adapter-elasticsearch/adapter_test.js +++ b/packages/adapter-elasticsearch/adapter_test.js @@ -1,204 +1,241 @@ - -const test = require('tape') -const { Async } = require('crocks') -const fetchMock = require('fetch-mock') - -const createAdapter = require('./adapter') -const { createHeaders, handleResponse } = require('./async-fetch') -const { - deleteIndexPath, createIndexPath, indexDocPath, getDocPath, - updateDocPath, removeDocPath, bulkPath, queryPath -} = require('./paths') - -const headers = createHeaders('admin', 'password') - -const ES = 'http://localhost:9200' -const INDEX = 'movies' +import { + assert, + assertEquals, + assertObjectMatch, + resolves, + spy, +} from "./dev_deps.js"; + +import createAdapter from "./adapter.js"; +import { asyncFetch, createHeaders, handleResponse } from "./async-fetch.js"; +import { + bulkPath, + createIndexPath, + deleteIndexPath, + getDocPath, + indexDocPath, + queryPath, + removeDocPath, + updateDocPath, +} from "./paths.js"; + +const headers = createHeaders("admin", "password"); + +const ES = "http://localhost:9200"; +const INDEX = "movies"; const DOC1 = { - title: 'The Great Gatsby', - id: 'tgg', + title: "The Great Gatsby", + id: "tgg", year: 2012, - rating: 4 -} + rating: 4, +}; const DOC2 = { - title: 'The Foo Gatsby', - id: 'tfg', + title: "The Foo Gatsby", + id: "tfg", year: 2012, - rating: 6 -} + rating: 6, +}; + +const response = { json: () => Promise.resolve(), status: 200 }; + +const stubResponse = (status, body) => { + response.json = resolves(body); + response.status = status; +}; -const fetch = fetchMock.sandbox() +const fetch = spy(() => Promise.resolve(response)); const adapter = createAdapter({ config: { origin: ES }, - asyncFetch: Async.fromPromise(fetch), + asyncFetch: asyncFetch(fetch), headers, - handleResponse -}) + handleResponse, +}); -test('remove index', async t => { +Deno.test("remove index", async () => { // remove index - fetch.deleteOnce(deleteIndexPath(ES, INDEX), { - status: 200, - body: { ok: true }, - headers - }) + stubResponse(200, { ok: true }); - const result = await adapter.deleteIndex(INDEX) + const result = await adapter.deleteIndex(INDEX); - t.equals(result.ok, true) - t.end() -}) + assertObjectMatch(fetch.calls.shift(), { + args: [deleteIndexPath(ES, INDEX), { + method: "DELETE", + headers, + }], + }); -test('create index', async t => { + assertEquals(result.ok, true); +}); + +Deno.test("create index", async () => { // create index - fetch.putOnce(createIndexPath(ES, INDEX), - { - status: 201, - body: { ok: true }, - headers - } - ) + stubResponse(201, { ok: true }); const result = await adapter.createIndex({ index: INDEX, - mappings: { fields: ['title'] } - }) + mappings: { fields: ["title"] }, + }); + + assertObjectMatch(fetch.calls.shift(), { + args: [createIndexPath(ES, INDEX), { + method: "PUT", + headers, + body: '{"mappings":{"properties":{"title":{"type":"text"}}}}', + }], + }); - t.equals(result.ok, true) - t.end() -}) + assertEquals(result.ok, true); +}); -test('index document', async t => { +Deno.test("index document", async () => { // index doc - fetch.putOnce(indexDocPath(ES, INDEX, DOC1.id), { - status: 200, - body: { ok: true }, - headers - }, { - overwriteRoutes: true - }) + stubResponse(200, { ok: true }); const result = await adapter.indexDoc({ index: INDEX, key: DOC1.id, - doc: DOC1 - }) + doc: DOC1, + }); + + assertObjectMatch(fetch.calls.shift(), { + args: [indexDocPath(ES, INDEX, DOC1.id), { + method: "PUT", + headers, + body: JSON.stringify(DOC1), + }], + }); - t.equals(result.ok, true) - t.end() -}) + assertEquals(result.ok, true); +}); -test('get document', async t => { +Deno.test("get document", async () => { // get doc - fetch.getOnce(getDocPath(ES, INDEX, DOC1.id), { - status: 200, - body: DOC1, - headers - }) + stubResponse(200, DOC1); const result = await adapter.getDoc({ index: INDEX, - key: DOC1.id - }) + key: DOC1.id, + }); + + assertObjectMatch(fetch.calls.shift(), { + args: [getDocPath(ES, INDEX, DOC1.id), { + method: "GET", + headers, + }], + }); - t.equals(result.doc.title, DOC1.title) - t.equals(result.ok, true) - t.end() -}) + assertEquals(result.doc.title, DOC1.title); + assertEquals(result.ok, true); +}); -test('update document', async t => { +Deno.test("update document", async () => { // update doc - fetch.putOnce(updateDocPath(ES, INDEX, DOC1.id), { - status: 201, - body: { ok: true }, - headers - }, { - overwriteRoutes: true - }) + stubResponse(201, { ok: true }); const result = await adapter.updateDoc({ index: INDEX, key: DOC1.id, doc: { ...DOC1, - rating: 6 - } - }) - - t.equals(result.ok, true) - t.end() -}) - -test('delete document', async t => { + rating: 6, + }, + }); + + assertObjectMatch(fetch.calls.shift(), { + args: [updateDocPath(ES, INDEX, DOC1.id), { + method: "PUT", + headers, + body: JSON.stringify({ + ...DOC1, + rating: 6, + }), + }], + }); + + assertEquals(result.ok, true); +}); + +Deno.test("delete document", async () => { // remove doc - fetch.deleteOnce(removeDocPath(ES, INDEX, DOC1.id), { - status: 201, - body: { ok: true }, - headers - }) + stubResponse(201, { ok: true }); const result = await adapter.removeDoc({ index: INDEX, - key: DOC1.id - }) + key: DOC1.id, + }); - t.equals(result.ok, true) - t.end() -}) + assertObjectMatch(fetch.calls.shift(), { + args: [removeDocPath(ES, INDEX, DOC1.id), { + method: "DELETE", + headers, + }], + }); -test('bulk', async t => { + assertEquals(result.ok, true); +}); + +Deno.test("bulk", async () => { // bulk operation - fetch.postOnce(bulkPath(ES), { - status: 200, - body: { - items: [ - DOC1, - DOC2 - ] - }, - headers - }) + stubResponse(200, { + items: [ + DOC1, + DOC2, + ], + }); const result = await adapter.bulk({ index: INDEX, docs: [ DOC1, - DOC2 - ] - }) - - t.equals(result.ok, true) - t.ok(result.results) - t.end() -}) - -test('query', async t => { + DOC2, + ], + }); + + assertObjectMatch(fetch.calls.shift(), { + args: [bulkPath(ES), { + method: "POST", + headers, + // TODO: Tyler. Assert body here eventually + }], + }); + + assertEquals(result.ok, true); + assert(result.results); +}); + +Deno.test("query", async () => { // query docs - fetch.postOnce(queryPath(ES, INDEX), { - status: 200, + stubResponse(200, { hits: { hits: [ - DOC1 - ] - } - }) + DOC1, + ], + }, + }); const result = await adapter.query({ - index: 'movies', + index: "movies", q: { - query: 'gatsby', - fields: ['title'], + query: "gatsby", + fields: ["title"], filter: { - rating: 4 - } - } - }) - - t.equals(result.ok, true) - t.ok(result.matches) - t.equals(result.matches.length, 1) - t.end() -}) + rating: 4, + }, + }, + }); + + assertObjectMatch(fetch.calls.shift(), { + args: [queryPath(ES, INDEX), { + method: "POST", + headers, + // TODO: Tyler. Assert body here eventually + }], + }); + + assertEquals(result.ok, true); + assert(result.matches); + assertEquals(result.matches.length, 1); +}); diff --git a/packages/adapter-elasticsearch/async-fetch.js b/packages/adapter-elasticsearch/async-fetch.js index f405b6e5..20a97523 100644 --- a/packages/adapter-elasticsearch/async-fetch.js +++ b/packages/adapter-elasticsearch/async-fetch.js @@ -1,27 +1,25 @@ -const createFetch = require('@vercel/fetch-retry') -const nodeFetch = require('node-fetch') -const { Async } = require('crocks') -const { ifElse } = require('ramda') +import { base64Encode, crocks, R } from "./deps.js"; -const fetch = createFetch(nodeFetch) +const { Async } = crocks; +const { ifElse } = R; -const asyncFetch = Async.fromPromise(fetch) +// TODO: Tyler. wrap with opionated approach like before with https://github.com/vercel/fetch +const asyncFetch = (fetch) => Async.fromPromise(fetch); const createHeaders = (username, password) => ({ - 'Content-Type': 'application/json', - authorization: `Basic ${Buffer.from(username + ':' + password).toString('base64')}` -}) + "Content-Type": "application/json", + authorization: `Basic ${ + base64Encode(new TextEncoder().encode(username + ":" + password)) + }`, +}); -const handleResponse = pred => +const handleResponse = (pred) => ifElse( - res => pred(res), - res => Async.fromPromise(() => res.json())(), - res => Async.fromPromise(() => res.json())() - .chain(Async.Rejected) - ) + (res) => pred(res), + (res) => Async.fromPromise(() => res.json())(), + (res) => + Async.fromPromise(() => res.json())() + .chain(Async.Rejected), + ); -module.exports = { - asyncFetch, - createHeaders, - handleResponse -} +export { asyncFetch, createHeaders, handleResponse }; diff --git a/packages/adapter-elasticsearch/deps.js b/packages/adapter-elasticsearch/deps.js new file mode 100644 index 00000000..b59e06ef --- /dev/null +++ b/packages/adapter-elasticsearch/deps.js @@ -0,0 +1,4 @@ +export * as R from "https://cdn.skypack.dev/ramda@^0.27.1"; +export { default as crocks } from "https://cdn.skypack.dev/crocks@^0.12.4"; + +export { encode as base64Encode } from "https://deno.land/std@0.98.0/encoding/base64.ts"; diff --git a/packages/adapter-elasticsearch/deps_lock.json b/packages/adapter-elasticsearch/deps_lock.json new file mode 100644 index 00000000..7741be09 --- /dev/null +++ b/packages/adapter-elasticsearch/deps_lock.json @@ -0,0 +1,7 @@ +{ + "https://cdn.skypack.dev/-/crocks@v0.12.4-Mje8nEhNx2rmIpwz3ROp/dist=es2020,mode=imports/optimized/crocks.js": "93d587d18dc5f124f30e5b38de37a6471eb65309c94ef2ffc7a36dc40ab394da", + "https://cdn.skypack.dev/-/ramda@v0.27.1-3ePaNsppsnXYRcaNcaWn/dist=es2020,mode=imports/optimized/ramda.js": "0e51cd76039fc9669f9179d935b261bcbb19ecbb11d49e7e60cfbc14360a85d2", + "https://cdn.skypack.dev/crocks@^0.12.4": "d48852ce36c500f2770a2bc240cb6df9ffb2219d184b32b9be542e8560a6ff1d", + "https://cdn.skypack.dev/ramda@^0.27.1": "fb06d7de4305dcdb997f9adc903a463afcdac75d148d638295c420a8922048fd", + "https://deno.land/std@0.98.0/encoding/base64.ts": "eecae390f1f1d1cae6f6c6d732ede5276bf4b9cd29b1d281678c054dc5cc009e" +} diff --git a/packages/adapter-elasticsearch/dev_deps.js b/packages/adapter-elasticsearch/dev_deps.js new file mode 100644 index 00000000..5fcb8b63 --- /dev/null +++ b/packages/adapter-elasticsearch/dev_deps.js @@ -0,0 +1,7 @@ +export { + assert, + assertEquals, + assertObjectMatch, + assertThrows, +} from "https://deno.land/std@0.98.0/testing/asserts.ts"; +export { resolves, spy } from "https://deno.land/x/mock@v0.9.5/mod.ts"; diff --git a/packages/adapter-elasticsearch/dev_deps_lock.json b/packages/adapter-elasticsearch/dev_deps_lock.json new file mode 100644 index 00000000..39557cec --- /dev/null +++ b/packages/adapter-elasticsearch/dev_deps_lock.json @@ -0,0 +1,24 @@ +{ + "https://deno.land/std@0.93.0/async/delay.ts": "9de1d8d07d1927767ab7f82434b883f3d8294fb19cad819691a2ad81a728cf3d", + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c", + "https://deno.land/x/collections@v0.11.0/common.ts": "34e8367e3696c3f872ae417d7c421fa908a5a2125a1c4cb259f7dee9561a7096", + "https://deno.land/x/collections@v0.11.0/comparators.ts": "08563340dbb0051f032bacdcf854bcabd13d607d2e8cb1889826417419df89d0", + "https://deno.land/x/collections@v0.11.0/trees/bs_node.ts": "854d39f6d60cdcb47e1183f0fa67091e6bad59dd2b13252a8b38b1b37269fa67", + "https://deno.land/x/collections@v0.11.0/trees/bs_tree.ts": "694605e10a9f56caf8468c5eea06d60a8d81521dee75d4524a5f790b1ee713b5", + "https://deno.land/x/collections@v0.11.0/trees/rb_node.ts": "e5eecc211933140e0535fc371a4990bd5ac889b00136c96a4a610dbbf78d9ecb", + "https://deno.land/x/collections@v0.11.0/trees/rb_tree.ts": "e1e5f4e26bc255ebf41d72b498a2af903af69f0391276a4a0eac6d46fc43f942", + "https://deno.land/x/collections@v0.11.0/vector.ts": "23cb91087cc89ce9a1e10954336484b537a44bd786e21babc310ae85cb7ad52d", + "https://deno.land/x/mixins@v0.7.2/apply.ts": "dad7095324f5ce23693a0bc0eb3238f230c0ed2160ea8c285f3773ff7c76dcb0", + "https://deno.land/x/mock@v0.9.5/callbacks.ts": "610924901137b6a8ee2f5801d06a48af18da84c9486dd4775ff301de8a8d2b29", + "https://deno.land/x/mock@v0.9.5/deps/std/async/delay.ts": "b0855e5e208bcc08a6f7cb4debfc3ed408a4a3f1bc00ce36235481b94613f018", + "https://deno.land/x/mock@v0.9.5/deps/udibo/collections/comparators.ts": "c780b51a4fcdac4d506bf254d54702fdd46051bde9367fc819bd72cea041ac29", + "https://deno.land/x/mock@v0.9.5/deps/udibo/collections/trees/rb_tree.ts": "24839f7b1f66291d10d6640cb0051143d9b7effbd1e74271c3b564bb31a5b977", + "https://deno.land/x/mock@v0.9.5/deps/udibo/collections/vector.ts": "93285b7af952652e9733795c5677666e978e279ef0b7dae6013e9e017c022b30", + "https://deno.land/x/mock@v0.9.5/deps/udibo/mixins/apply.ts": "d2446714efc056d2c6a373dbf7a1a1db236991cae60b673db6e17d5c4d1bb8b7", + "https://deno.land/x/mock@v0.9.5/mod.ts": "9b44e3cbe9955ecf9f05b06cb9e8a47294bd84f2d33e0b85a8f5b4a0d740e0aa", + "https://deno.land/x/mock@v0.9.5/spy.ts": "c6c66b124f1c0b86f479e66e1daf687ef03aff068ec776ff61a3bf25ff3da5d0", + "https://deno.land/x/mock@v0.9.5/stub.ts": "762cb86de92038e17dfd1af25429036fa7aadecbafdb39fa98edfa01e20951eb", + "https://deno.land/x/mock@v0.9.5/time.ts": "7c6d42af7577c4da461f9581bf171f6e5cb7a22a243ba87ee4ed3c4f1922c2e4" +} diff --git a/packages/adapter-elasticsearch/index.js b/packages/adapter-elasticsearch/index.js deleted file mode 100644 index 529a0ffb..00000000 --- a/packages/adapter-elasticsearch/index.js +++ /dev/null @@ -1,20 +0,0 @@ -const { mergeDeepRight, defaultTo, pipe } = require('ramda') -const adapter = require('./adapter') -const { asyncFetch, createHeaders, handleResponse } = require('./async-fetch') - -module.exports = function ElasticsearchAdapter (config) { - return Object.freeze({ - id: 'elasticsearch', - port: 'search', - load: pipe( - defaultTo({}), - mergeDeepRight(config) - ), - link: env => () => { - if (!env.url) { throw new Error('Config URL is required elastic search') } - const headers = createHeaders(config.username, config.password) - // TODO: probably shouldn't use origin, so to support mounting elasticsearch on path - return adapter({ config: new URL(env.url), asyncFetch, headers, handleResponse }) - } - }) -} diff --git a/packages/adapter-elasticsearch/index_test.js b/packages/adapter-elasticsearch/index_test.js deleted file mode 100644 index d43620aa..00000000 --- a/packages/adapter-elasticsearch/index_test.js +++ /dev/null @@ -1,43 +0,0 @@ - -const test = require('tape') -const elasticsearchAdapterFactory = require('./index') - -test('validate adapter', t => { - const adapter = elasticsearchAdapterFactory({}) - t.ok(adapter) - t.end() -}) - -test('validate load()', t => { - const config = { foo: 'bar' } - const adapter = elasticsearchAdapterFactory(config) - - const loadedConfig = adapter.load({ fizz: 'buzz' }) - t.equals(loadedConfig.foo, config.foo) - t.equals(loadedConfig.fizz, 'buzz') - t.end() -}) - -test('validate link()', t => { - const config = { - username: 'foo', - password: 'bar', - url: 'http://localhost:9200' - } - - const adapter = elasticsearchAdapterFactory(config) - t.ok(adapter.link(config)()) - t.end() -}) - -test('validate link() - no url', t => { - const config = { - username: 'foo', - password: 'bar', - no_url: 'http://localhost:9200' - } - - const adapter = elasticsearchAdapterFactory(config) - t.throws(() => adapter.link(config)(), 'Config URL is required elastic search') - t.end() -}) diff --git a/packages/adapter-elasticsearch/mod.js b/packages/adapter-elasticsearch/mod.js new file mode 100644 index 00000000..de104af9 --- /dev/null +++ b/packages/adapter-elasticsearch/mod.js @@ -0,0 +1,29 @@ +import { R } from "./deps.js"; + +import { asyncFetch, createHeaders, handleResponse } from "./async-fetch.js"; +import adapter from "./adapter.js"; + +const { mergeDeepRight, defaultTo, pipe } = R; + +export default function ElasticsearchAdapter(config) { + return Object.freeze({ + id: "elasticsearch", + port: "search", + load: pipe( + defaultTo({}), + mergeDeepRight(config), + ), + link: (env) => + () => { + if (!env.url) throw new Error("Config URL is required elastic search"); + const headers = createHeaders(config.username, config.password); + // TODO: probably shouldn't use origin, so to support mounting elasticsearch on path + return adapter({ + config: new URL(env.url), + asyncFetch: asyncFetch(fetch), + headers, + handleResponse, + }); + }, + }); +} diff --git a/packages/adapter-elasticsearch/mod_test.js b/packages/adapter-elasticsearch/mod_test.js new file mode 100644 index 00000000..02a7324b --- /dev/null +++ b/packages/adapter-elasticsearch/mod_test.js @@ -0,0 +1,44 @@ +import { assert, assertEquals, assertThrows } from "./dev_deps.js"; + +import elasticsearchAdapterFactory from "./mod.js"; + +Deno.test("validate adapter", () => { + const adapter = elasticsearchAdapterFactory({}); + assert(adapter); +}); + +Deno.test("validate load()", () => { + const config = { foo: "bar" }; + const adapter = elasticsearchAdapterFactory(config); + + const loadedConfig = adapter.load({ fizz: "buzz" }); + assertEquals(loadedConfig.foo, config.foo); + assertEquals(loadedConfig.fizz, "buzz"); +}); + +Deno.test("validate link()", () => { + const config = { + username: "foo", + password: "bar", + url: "http://localhost:9200", + }; + + const adapter = elasticsearchAdapterFactory(config); + assert(adapter.link(config)()); +}); + +Deno.test("validate link() - no url", () => { + const config = { + username: "foo", + password: "bar", + // deno-lint-ignore camelcase + no_url: "http://localhost:9200", + }; + + const adapter = elasticsearchAdapterFactory(config); + assertThrows( + () => adapter.link(config)(), + Error, + "Config URL is required elastic search", + ); +}); diff --git a/packages/adapter-elasticsearch/package.json b/packages/adapter-elasticsearch/package.json index be932cbb..37a4e660 100644 --- a/packages/adapter-elasticsearch/package.json +++ b/packages/adapter-elasticsearch/package.json @@ -1,19 +1,5 @@ { "name": "@hyper63/adapter-elasticsearch", "version": "0.1.1", - "license": "MIT", - "main": "index.js", - "scripts": { - "test": "tape *_test.js" - }, - "dependencies": { - "@vercel/fetch-retry": "^5.0.3", - "crocks": "^0.12.4", - "node-fetch": "^2.6.1", - "ramda": "^0.27.1" - }, - "devDependencies": { - "fetch-mock": "^9.10.7", - "tape": "^5.0.1" - } + "license": "MIT" } diff --git a/packages/adapter-elasticsearch/paths.js b/packages/adapter-elasticsearch/paths.js index e51fa9d4..ebc491d7 100644 --- a/packages/adapter-elasticsearch/paths.js +++ b/packages/adapter-elasticsearch/paths.js @@ -1,43 +1,42 @@ - -function createIndexPath (root, index) { - return `${root}/${index}` +function createIndexPath(root, index) { + return `${root}/${index}`; } -function deleteIndexPath (root, index) { - return `${root}/${index}` +function deleteIndexPath(root, index) { + return `${root}/${index}`; } -function indexDocPath (root, index, key) { - return `${root}/${index}/_doc/${key}` +function indexDocPath(root, index, key) { + return `${root}/${index}/_doc/${key}`; } -function updateDocPath (root, index, key) { - return `${root}/${index}/_doc/${key}` +function updateDocPath(root, index, key) { + return `${root}/${index}/_doc/${key}`; } -function removeDocPath (root, index, key) { - return `${root}/${index}/_doc/${key}` +function removeDocPath(root, index, key) { + return `${root}/${index}/_doc/${key}`; } -function getDocPath (root, index, key) { - return `${root}/${index}/_doc/${key}/_source` +function getDocPath(root, index, key) { + return `${root}/${index}/_doc/${key}/_source`; } -function bulkPath (root) { - return `${root}/_bulk` +function bulkPath(root) { + return `${root}/_bulk`; } -function queryPath (root, index) { - return `${root}/${index}/_search` +function queryPath(root, index) { + return `${root}/${index}/_search`; } -module.exports = { +export { + bulkPath, createIndexPath, deleteIndexPath, + getDocPath, indexDocPath, - updateDocPath, + queryPath, removeDocPath, - getDocPath, - bulkPath, - queryPath -} + updateDocPath, +}; diff --git a/packages/adapter-elasticsearch/scripts/test.sh b/packages/adapter-elasticsearch/scripts/test.sh new file mode 100755 index 00000000..09bdb23f --- /dev/null +++ b/packages/adapter-elasticsearch/scripts/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +deno fmt --check +deno test diff --git a/packages/adapter-fs/README.md b/packages/adapter-fs/README.md index 29a6a0af..31ddde83 100644 --- a/packages/adapter-fs/README.md +++ b/packages/adapter-fs/README.md @@ -1,26 +1,25 @@ # hyper63 file storage adapter -This adapter uses the file system to store unstructured objects -in the hyper63 service framework. +This adapter uses the file system to store unstructured objects in the hyper63 +service framework. ## How to configure -``` sh +```sh npm install @hyper63/adapter-fs ``` In config -``` js -import fs from '@hyper63/adapter-fs' +```js +import fs from "@hyper63/adapter-fs"; export default { app: express, adapters: [ - ... - { port: 'storage', plugins: [fs({dir: './data'})]} - ] -} + ...{ port: "storage", plugins: [fs({ dir: "./data" })] }, + ], +}; ``` ## How to use diff --git a/packages/adapter-fs/adapter.js b/packages/adapter-fs/adapter.js index 38354686..f8b308f9 100644 --- a/packages/adapter-fs/adapter.js +++ b/packages/adapter-fs/adapter.js @@ -1,7 +1,7 @@ -const fs = require('fs') -const path = require('path') -const { Async } = require('crocks') -const { always } = require('ramda') +import { crocks, path, R } from "./deps.js"; + +const { Async } = crocks; +const { always, identity } = R; /** * hyper63 adapter for the storage port @@ -28,107 +28,154 @@ const { always } = require('ramda') * @param {string} path * @returns {Object} */ -module.exports = function (root) { - if (!root) { throw new Error('STORAGE: FS_Adapter: root directory required for this service!') } +export default function (root) { + if (!root) { + throw new Error( + "STORAGE: FS_Adapter: root directory required for this service!", + ); + } /** * @param {string} name * @returns {Promise} */ - function makeBucket (name) { - if (!name) { return Promise.reject({ ok: false, msg: 'name for bucket is required!' }) } - const mkdir = Async.fromNode(fs.mkdir) - return mkdir(path.resolve(root + '/' + name)) - .map(always({ ok: true })) - .toPromise() + function makeBucket(name) { + if (!name) { + return Promise.reject({ ok: false, msg: "name for bucket is required!" }); + } + + const mkdir = Async.fromPromise(Deno.mkdir.bind(Deno)); + + return mkdir(path.resolve(path.join(root, name))) + .bimap( + (err) => ({ ok: false, error: err.message }), + always({ ok: true }), + ) + .toPromise(); } /** * @param {string} name * @returns {Promise} */ - function removeBucket (name) { - if (!name) { return Promise.reject({ ok: false, msg: 'name for bucket is required!' }) } - const rmdir = Async(function (reject, resolve) { - fs.rmdir(path.resolve(root + '/' + name), (err) => { - if (err) { return reject({ ok: false, error: err.message }) } - resolve({ ok: true }) - }) - }) - - return rmdir - .map(always({ ok: true })) - .toPromise() + function removeBucket(name) { + if (!name) { + return Promise.reject({ ok: false, msg: "name for bucket is required!" }); + } + + const rmdir = Async.fromPromise(Deno.remove.bind(Deno)); + + // TODO: Tyler. Do we want to do a recursive remove here? + return rmdir(path.resolve(path.join(root, name))) + .bimap( + (err) => ({ ok: false, error: err.message }), + always({ ok: true }), + ) + .toPromise(); } /** * @param {StorageObject} * @returns {Promise} */ - function putObject ({ bucket, object, stream }) { - if (!bucket) { return Promise.reject({ ok: false, msg: 'bucket name required' }) } - if (!object) { return Promise.reject({ ok: false, msg: 'object name required' }) } - if (!stream) { return Promise.reject({ ok: false, msg: 'stream is required' }) } - - return new Promise(function (resolve, reject) { - const s = fs.createWriteStream( - path.resolve(`${root}/${bucket}`) + `/${object}` - ) + async function putObject({ bucket, object, stream }) { + if (!bucket) { + return Promise.reject({ ok: false, msg: "bucket name required" }); + } + if (!object) { + return Promise.reject({ ok: false, msg: "object name required" }); + } + if (!stream) { + return Promise.reject({ ok: false, msg: "stream is required" }); + } - stream.on('end', () => { - resolve({ ok: true }) - }) + let file; + try { + // Create Writer + file = await Deno.create( + path.join( + path.resolve(path.join(root, bucket)), + object, + ), + ); - stream.on('error', (e) => { - reject({ ok: false, msg: e.message }) - }) + // Copy Reader into Writer + await Deno.copy(stream, file); - stream.pipe(s) - }) + return { ok: true }; + } catch (err) { + return { ok: false, msg: err.message }; + } finally { + file && await file.close(); + } } /** * @param {StorageInfo} * @returns {Promise} */ - function removeObject ({ - bucket, - object - }) { - if (!bucket) { return Promise.reject({ ok: false, msg: 'bucket name required' }) } - if (!object) { return Promise.reject({ ok: false, msg: 'object name required' }) } - - const rm = Async(function (reject, resolve) { - fs.unlink(path.resolve(`${root}/${bucket}/${object}`), (err) => { - if (err) { return reject({ ok: false, msg: err.message }) } - resolve({ ok: true }) - }) - }) - rm.toPromise() + function removeObject({ bucket, object }) { + if (!bucket) { + return Promise.reject({ ok: false, msg: "bucket name required" }); + } + if (!object) { + return Promise.reject({ ok: false, msg: "object name required" }); + } + + const rm = Async.fromPromise(Deno.remove.bind(Deno)); + + return rm( + path.resolve(path.join(root, bucket, object)), + ).bimap( + (err) => ({ ok: false, error: err.message }), + always({ ok: true }), + ).toPromise(); } /** * @param {StorageInfo} * @returns {Promise} */ - function getObject ({ - bucket, - object - }) { - if (!bucket) { return Promise.reject({ ok: false, msg: 'bucket name required' }) } - if (!object) { return Promise.reject({ ok: false, msg: 'object name required' }) } - return Async(function (reject, resolve) { - try { - const s = fs.createReadStream(path.resolve(`${root}/${bucket}/${object}`)) - resolve(s) - } catch (e) { - reject({ ok: false, msg: e.message }) - } - }).toPromise() + function getObject({ bucket, object }) { + if (!bucket) { + return Promise.reject({ ok: false, msg: "bucket name required" }); + } + if (!object) { + return Promise.reject({ ok: false, msg: "object name required" }); + } + + const open = Async.fromPromise(Deno.open.bind(Deno)); + + return open( + path.resolve(path.join(root, bucket, object)), + { + read: true, + write: false, + }, + ).bimap( + (err) => ({ ok: false, msg: err.message }), + identity, + ).toPromise(); } - function listObjects ({ bucket, prefix = '' }) { - if (!bucket) { return Promise.reject({ ok: false, msg: 'bucket name required' }) } - return fs.promises.readdir(path.resolve(`${root}/${bucket}/${prefix}`)) + async function listObjects({ bucket, prefix = "" }) { + if (!bucket) { + return Promise.reject({ ok: false, msg: "bucket name required" }); + } + + const files = []; + try { + for await ( + const dirEntry of Deno.readDir( + path.resolve(path.join(root, bucket, prefix)), + ) + ) { + files.push(dirEntry.name); + } + + return files; + } catch (err) { + return { ok: false, error: err.message }; + } } return Object.freeze({ @@ -138,7 +185,6 @@ module.exports = function (root) { putObject, removeObject, getObject, - listObjects - - }) + listObjects, + }); } diff --git a/packages/adapter-fs/adapter_test.js b/packages/adapter-fs/adapter_test.js index cbafba41..aaeaa11c 100644 --- a/packages/adapter-fs/adapter_test.js +++ b/packages/adapter-fs/adapter_test.js @@ -1,96 +1,165 @@ -const test = require('tape') -const { v4 } = require('uuid') -const adapter = require('./adapter')('./') -const values = require('pull-stream/sources/values') -const pull = require('pull-stream/pull') -const concat = require('pull-stream/sinks/concat') - -const toStream = require('pull-stream-to-stream') -const toPull = require('stream-to-pull-stream') - -test('fs adapter make bucket', async t => { - t.plan(1) - const bucket = v4() - const result = await adapter.makeBucket(bucket) - t.ok(result.ok) - await adapter.removeBucket(bucket) -}) - -test('fs adapter put object', async t => { - t.plan(1) +import { + assert, + assertEquals, + readAll, + v4 as v4Generator, +} from "./dev_deps.js"; + +import createAdapter from "./adapter.js"; + +const v4 = v4Generator.generate.bind(v4Generator); + +const adapter = createAdapter("./"); + +function emptyReader() { + return { + read(_) { + return Promise.resolve(null); + }, + }; +} + +/** + * Given a string, return a Reader that read the encoded string + * into the provided buffer + * + * @param {string} text - the string to stream + * @returns {Deno.Reader} - a Reader implementation + */ +function textReader(text = "") { + const encoded = new TextEncoder().encode(text); + let totalRead = 0; + let finished = false; + + async function read(buf) { + if (finished) { + return null; + } + + let result; + const remaining = encoded.length - totalRead; + + // read into the buffer + buf.set(encoded.subarray(totalRead, buf.byteLength), 0); + + if (remaining >= buf.byteLength) { + result = buf.byteLength; + } else { + result = remaining; + } + + if (result) { + totalRead += result; + } + finished = totalRead === encoded.length; + + return await result; + } + + return { read }; +} + +Deno.test("fs adapter make bucket", async () => { + const bucket = v4(); + const result = await adapter.makeBucket(bucket); + assert(result.ok); + await adapter.removeBucket(bucket); +}); + +Deno.test("fs adapter put object", async () => { // setup - const bucket = v4() - const object = v4() + '.tmp' - await adapter.makeBucket(bucket) + const bucket = v4(); + const object = v4() + ".txt"; + await adapter.makeBucket(bucket); // test - const stream = toStream.source(values(['hello', 'world'])) + const stream = textReader("woop woop"); const result = await adapter.putObject({ bucket, object, - stream - }) - t.ok(result.ok) + stream, + }); + assert(result.ok); // clean up // remove file await adapter.removeObject({ bucket, - object - }) + object, + }); // remove Bucket - await adapter.removeBucket(bucket).catch(err => { - console.log(JSON.stringify(err)) - return { ok: false } - }) -}) - -test('fs adapter get object', async t => { - const bucket = v4() - const object = v4() + '.tmp' - await adapter.makeBucket(bucket) - - const stream = toStream.source(values(['hello', 'world'])) + await adapter.removeBucket(bucket).catch((err) => { + console.log(JSON.stringify(err)); + return { ok: false }; + }); +}); + +Deno.test("fs adapter get object", async () => { + const bucket = v4(); + const object = v4() + ".txt"; + await adapter.makeBucket(bucket); + + const stream = textReader("hello world"); + await adapter.putObject({ bucket, object, - stream - }) + stream, + }); // test const s = await adapter.getObject({ bucket, - object - }) - await new Promise((resolve) => { - pull( - toPull.source(s), - concat(async (_err, data) => { - t.equal(data, 'helloworld') - // cleanup - // remove file - await adapter.removeObject({ - bucket, - object - }) - // remove Bucket - await adapter.removeBucket(bucket).catch(() => { - return { ok: false } - }) - resolve() - }) - ) - }) - t.end() -}) - -test('list files', async t => { + object, + }); + + const encodedResult = await readAll(s); + // close the Reader + s.close(); + + assertEquals(new TextDecoder().decode(encodedResult), "hello world"); + + // cleanup + // remove file + await adapter.removeObject({ + bucket, + object, + }); + // remove Bucket + await adapter.removeBucket(bucket).catch(() => { + return { ok: false }; + }); +}); + +Deno.test("list files", async () => { + const bucket = v4(); + const object = v4() + ".tmp"; + + // setup + await adapter.makeBucket(bucket); + + const stream = emptyReader(); + + await adapter.putObject({ + bucket, + object, + stream, + }); + + // test const list = await adapter.listObjects({ - bucket: 'node_modules' - }) - t.ok( - list.find(file => file === 'tape') - ) - t.end() -}) + bucket, + }); + + assert( + list.find((file) => file === object), + ); + + // clean up + await adapter.removeObject({ + bucket, + object, + }); + await adapter.removeBucket(bucket); +}); diff --git a/packages/adapter-fs/deps.js b/packages/adapter-fs/deps.js new file mode 100644 index 00000000..bc7d5346 --- /dev/null +++ b/packages/adapter-fs/deps.js @@ -0,0 +1,4 @@ +export * as R from "https://cdn.skypack.dev/ramda@^0.27.1"; +export { default as crocks } from "https://cdn.skypack.dev/crocks@^0.12.4"; + +export * as path from "https://deno.land/std@0.98.0/path/mod.ts"; diff --git a/packages/adapter-fs/deps_lock.json b/packages/adapter-fs/deps_lock.json new file mode 100644 index 00000000..e58d4541 --- /dev/null +++ b/packages/adapter-fs/deps_lock.json @@ -0,0 +1,17 @@ +{ + "https://cdn.skypack.dev/-/crocks@v0.12.4-Mje8nEhNx2rmIpwz3ROp/dist=es2020,mode=imports/optimized/crocks.js": "93d587d18dc5f124f30e5b38de37a6471eb65309c94ef2ffc7a36dc40ab394da", + "https://cdn.skypack.dev/-/ramda@v0.27.1-3ePaNsppsnXYRcaNcaWn/dist=es2020,mode=imports/optimized/ramda.js": "0e51cd76039fc9669f9179d935b261bcbb19ecbb11d49e7e60cfbc14360a85d2", + "https://cdn.skypack.dev/crocks@^0.12.4": "d48852ce36c500f2770a2bc240cb6df9ffb2219d184b32b9be542e8560a6ff1d", + "https://cdn.skypack.dev/ramda@^0.27.1": "fb06d7de4305dcdb997f9adc903a463afcdac75d148d638295c420a8922048fd", + "https://deno.land/std@0.98.0/_util/assert.ts": "2f868145a042a11d5ad0a3c748dcf580add8a0dbc0e876eaa0026303a5488f58", + "https://deno.land/std@0.98.0/_util/os.ts": "e282950a0eaa96760c0cf11e7463e66babd15ec9157d4c9ed49cc0925686f6a7", + "https://deno.land/std@0.98.0/path/_constants.ts": "1247fee4a79b70c89f23499691ef169b41b6ccf01887a0abd131009c5581b853", + "https://deno.land/std@0.98.0/path/_interface.ts": "1fa73b02aaa24867e481a48492b44f2598cd9dfa513c7b34001437007d3642e4", + "https://deno.land/std@0.98.0/path/_util.ts": "2e06a3b9e79beaf62687196bd4b60a4c391d862cfa007a20fc3a39f778ba073b", + "https://deno.land/std@0.98.0/path/common.ts": "eaf03d08b569e8a87e674e4e265e099f237472b6fd135b3cbeae5827035ea14a", + "https://deno.land/std@0.98.0/path/glob.ts": "314ad9ff263b895795208cdd4d5e35a44618ca3c6dd155e226fb15d065008652", + "https://deno.land/std@0.98.0/path/mod.ts": "4465dc494f271b02569edbb4a18d727063b5dbd6ed84283ff906260970a15d12", + "https://deno.land/std@0.98.0/path/posix.ts": "f56c3c99feb47f30a40ce9d252ef6f00296fa7c0fcb6dd81211bdb3b8b99ca3b", + "https://deno.land/std@0.98.0/path/separator.ts": "8fdcf289b1b76fd726a508f57d3370ca029ae6976fcde5044007f062e643ff1c", + "https://deno.land/std@0.98.0/path/win32.ts": "77f7b3604e0de40f3a7c698e8a79e7f601dc187035a1c21cb1e596666ce112f8" +} diff --git a/packages/adapter-fs/dev_deps.js b/packages/adapter-fs/dev_deps.js new file mode 100644 index 00000000..aa99e9ca --- /dev/null +++ b/packages/adapter-fs/dev_deps.js @@ -0,0 +1,7 @@ +export { + assert, + assertEquals, + assertObjectMatch, +} from "https://deno.land/std@0.98.0/testing/asserts.ts"; +export { v4 } from "https://deno.land/std@0.98.0/uuid/mod.ts"; +export { readAll } from "https://deno.land/std@0.98.0/io/util.ts"; diff --git a/packages/adapter-fs/dev_deps_lock.json b/packages/adapter-fs/dev_deps_lock.json new file mode 100644 index 00000000..e6b58a63 --- /dev/null +++ b/packages/adapter-fs/dev_deps_lock.json @@ -0,0 +1,16 @@ +{ + "https://deno.land/std@0.98.0/_util/assert.ts": "2f868145a042a11d5ad0a3c748dcf580add8a0dbc0e876eaa0026303a5488f58", + "https://deno.land/std@0.98.0/bytes/mod.ts": "1ae1ccfe98c4b979f12b015982c7444f81fcb921bea7aa215bf37d84f46e1e13", + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/hash/sha1.ts": "1cca324b4b253885a47f121adafcfac55b4cc96113e22b338e1db26f37a730b8", + "https://deno.land/std@0.98.0/io/buffer.ts": "3ead6bb11276ebcf093c403f74f67fd2205a515dbbb9061862c468ca56f37cd8", + "https://deno.land/std@0.98.0/io/types.d.ts": "89a27569399d380246ca7cdd9e14d5e68459f11fb6110790cc5ecbd4ee7f3215", + "https://deno.land/std@0.98.0/io/util.ts": "318be78b7954da25f0faffe123fef0d9423ea61af98467e860c06b60265eff6d", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c", + "https://deno.land/std@0.98.0/uuid/_common.ts": "e985d221890ce301e8dfef4e7cbd01ff45b64886f6ca65ff3f03e64d8a5ff2b5", + "https://deno.land/std@0.98.0/uuid/mod.ts": "20e2a8209ec811fd8bf7218e83cf2d666556cdc947435b393f7f8edb9dcf14a4", + "https://deno.land/std@0.98.0/uuid/v1.ts": "3b242479ab2da5a6f5cdbbfeaa5db7a31966e688237f372ff50e489b93a9be07", + "https://deno.land/std@0.98.0/uuid/v4.ts": "949cadb3df3695770b41e08d5feb59401b10be73938c85542d27b8e6d14045f4", + "https://deno.land/std@0.98.0/uuid/v5.ts": "6b17fb670d608cfb545282c4940800ff4eb603ee16b0f92c576f483fd4112ed6" +} diff --git a/packages/adapter-fs/index.js b/packages/adapter-fs/mod.js similarity index 71% rename from packages/adapter-fs/index.js rename to packages/adapter-fs/mod.js index 4661da03..d9e44d16 100644 --- a/packages/adapter-fs/index.js +++ b/packages/adapter-fs/mod.js @@ -1,5 +1,8 @@ -const { merge } = require('ramda') -const adapter = require('./adapter') +import { R } from "./deps.js"; + +import adapter from "./adapter.js"; + +const { merge } = R; /** * hyper63 fs plugin for the storage port. This plugin is an adapter that @@ -28,11 +31,11 @@ const adapter = require('./adapter') * * storage section */ -module.exports = function (config) { +export default function (config) { return ({ - id: 'fs', - port: 'storage', + id: "fs", + port: "storage", load: merge(config), - link: ({ dir }) => () => adapter(dir) - }) + link: ({ dir }) => () => adapter(dir), + }); } diff --git a/packages/adapter-fs/package.json b/packages/adapter-fs/package.json index 8d2aee0f..ba6df634 100644 --- a/packages/adapter-fs/package.json +++ b/packages/adapter-fs/package.json @@ -4,20 +4,5 @@ "description": "hyper63 storage adapter using file system", "homepage": "https://github.com/hyper63/hyper63#readme", "repository": "https://github.com/hyper63/hyper63", - "license": "Apache-2.0", - "main": "index.js", - "scripts": { - "test": "tape *_test.js" - }, - "dependencies": { - "crocks": "^0.12.4", - "ramda": "^0.27.1" - }, - "devDependencies": { - "pull-stream": "^3.6.14", - "pull-stream-to-stream": "^2.0.0", - "stream-to-pull-stream": "^1.7.3", - "tape": "^5.0.1", - "uuid": "^8.3.1" - } + "license": "Apache-2.0" } diff --git a/packages/adapter-fs/scripts/test.sh b/packages/adapter-fs/scripts/test.sh new file mode 100755 index 00000000..e05493bd --- /dev/null +++ b/packages/adapter-fs/scripts/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +deno fmt --check +deno test --allow-read --allow-write diff --git a/packages/adapter-hooks/README.md b/packages/adapter-hooks/README.md index ed4f4cc9..6bdd0199 100644 --- a/packages/adapter-hooks/README.md +++ b/packages/adapter-hooks/README.md @@ -1,5 +1,3 @@ # hyper63 hooks adapter -The hooks adapter manages -all of the events from -the hyper63 service framework +The hooks adapter manages all of the events from the hyper63 service framework diff --git a/packages/adapter-hooks/adapter.js b/packages/adapter-hooks/adapter.js index 54f63b59..8200a16c 100644 --- a/packages/adapter-hooks/adapter.js +++ b/packages/adapter-hooks/adapter.js @@ -1,58 +1,64 @@ -const { Async } = require('crocks') -const { map } = require('ramda') +import { crocks, R } from "./deps.js"; -module.exports = function ({ asyncFetch, hooks }) { - const doNotify = action => hooks => Async.all( - map(notify(action), hooks) - ) +const { Async } = crocks; +const { map } = R; - return ({ - call: (action) => Async - .of(hooks) - - .map(matcher(action.type)) - .map(v => { console.log(`${action.type}: ${JSON.stringify(action.payload)}`); return v }) - .chain(doNotify(action)) +export default function ({ asyncFetch, hooks }) { + const doNotify = (action) => + (hooks) => + Async.all( + map(notify(action), hooks), + ); - .toPromise() - }) + return ({ + call: (action) => + Async + .of(hooks) + .map(matcher(action.type)) + .map((v) => { + console.log(`${action.type}: ${JSON.stringify(action.payload)}`); + return v; + }) + .chain(doNotify(action)) + .toPromise(), + }); - function notify (action) { + function notify(action) { return function (hook) { return asyncFetch(`${hook.target}`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify(action) + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(action), }) - .chain(resp => Async.fromPromise(resp.json.bind(resp))()) - } + .chain((resp) => Async.fromPromise(resp.json.bind(resp))()); + }; } } -function matcher (actionType) { +function matcher(actionType) { return function (hooks) { // simple brittle implementation // TODO: Refine and make safe before release... - return hooks.filter(hook => { - if (hook.matcher === '*') { - return true + return hooks.filter((hook) => { + if (hook.matcher === "*") { + return true; } if ( - hook.matcher.split(':')[0] === actionType.split(':')[0] && - hook.matcher.split(':')[1] === '*' + hook.matcher.split(":")[0] === actionType.split(":")[0] && + hook.matcher.split(":")[1] === "*" ) { - return true + return true; } if ( - hook.matcher.split(':')[1] === actionType.split(':')[1] && - hook.matcher.split(':')[0] === '*' + hook.matcher.split(":")[1] === actionType.split(":")[1] && + hook.matcher.split(":")[0] === "*" ) { - return true + return true; } if (hook.matcher === actionType) { - return true + return true; } - return false - }) - } + return false; + }); + }; } diff --git a/packages/adapter-hooks/adapter_test.js b/packages/adapter-hooks/adapter_test.js index e8f62364..ee55e615 100644 --- a/packages/adapter-hooks/adapter_test.js +++ b/packages/adapter-hooks/adapter_test.js @@ -1,42 +1,55 @@ -const test = require('tape') -const { Async } = require('crocks') -const createAdapter = require('./adapter.js') -const fetchMock = require('fetch-mock') +import { crocks } from "./deps.js"; +import { + assert, + assertEquals, + assertObjectMatch, + resolves, + spy, +} from "./dev_deps.js"; -const logDb = 'http://127.0.0.1:9200/log/_doc' +import createAdapter from "./adapter.js"; + +const { Async } = crocks; + +const logDb = "http://127.0.0.1:9200/log/_doc"; const hooks = [{ - matcher: '*', - target: logDb + matcher: "*", + target: logDb, }, { - matcher: 'TEST:*', - target: logDb + matcher: "TEST:*", + target: logDb, }, { - matcher: '*:METHOD', - target: logDb + matcher: "*:METHOD", + target: logDb, }, { - matcher: 'FOO:BAR', - target: logDb -}] - -const fetch = fetchMock.sandbox() - .post(`${logDb}`, - { - status: 201, - body: { ok: true }, - headers: { 'content-type': 'application/json' } - } - ) - -const asyncFetch = Async.fromPromise(fetch) - -test('using hooks log event', async t => { - const adapter = createAdapter({ asyncFetch, hooks }) - const result = await adapter.call({ - type: 'TEST:METHOD', - payload: { date: new Date().toISOString() } - }) - t.ok(result[0].ok) - t.equal(result.length, 3) - t.end() -}) + matcher: "FOO:BAR", + target: logDb, +}]; + +const fetch = spy(() => Promise.resolve(({ json: resolves({ ok: true }) }))); + +const asyncFetch = Async.fromPromise(fetch); + +Deno.test("using hooks log event", async () => { + const adapter = createAdapter({ asyncFetch, hooks }); + + const action = { + type: "TEST:METHOD", + payload: { date: new Date().toISOString() }, + }; + + const result = await adapter.call(action); + assert(result[0].ok); + assertEquals(result.length, 3); + assertObjectMatch(fetch.calls[0], { + args: [ + logDb, + { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(action), + }, + ], + }); +}); diff --git a/packages/adapter-hooks/deps.js b/packages/adapter-hooks/deps.js new file mode 100644 index 00000000..c943a792 --- /dev/null +++ b/packages/adapter-hooks/deps.js @@ -0,0 +1,2 @@ +export * as R from "https://cdn.skypack.dev/ramda@^0.27.1"; +export { default as crocks } from "https://cdn.skypack.dev/crocks@^0.12.4"; diff --git a/packages/adapter-hooks/deps_lock.json b/packages/adapter-hooks/deps_lock.json new file mode 100644 index 00000000..4437f1b4 --- /dev/null +++ b/packages/adapter-hooks/deps_lock.json @@ -0,0 +1,6 @@ +{ + "https://cdn.skypack.dev/-/crocks@v0.12.4-Mje8nEhNx2rmIpwz3ROp/dist=es2020,mode=imports/optimized/crocks.js": "93d587d18dc5f124f30e5b38de37a6471eb65309c94ef2ffc7a36dc40ab394da", + "https://cdn.skypack.dev/-/ramda@v0.27.1-3ePaNsppsnXYRcaNcaWn/dist=es2020,mode=imports/optimized/ramda.js": "0e51cd76039fc9669f9179d935b261bcbb19ecbb11d49e7e60cfbc14360a85d2", + "https://cdn.skypack.dev/crocks@^0.12.4": "d48852ce36c500f2770a2bc240cb6df9ffb2219d184b32b9be542e8560a6ff1d", + "https://cdn.skypack.dev/ramda@^0.27.1": "fb06d7de4305dcdb997f9adc903a463afcdac75d148d638295c420a8922048fd" +} diff --git a/packages/adapter-hooks/dev_deps.js b/packages/adapter-hooks/dev_deps.js new file mode 100644 index 00000000..e6b8e9a3 --- /dev/null +++ b/packages/adapter-hooks/dev_deps.js @@ -0,0 +1,6 @@ +export { + assert, + assertEquals, + assertObjectMatch, +} from "https://deno.land/std@0.98.0/testing/asserts.ts"; +export { resolves, spy } from "https://deno.land/x/mock@v0.9.5/mod.ts"; diff --git a/packages/adapter-hooks/dev_deps_lock.json b/packages/adapter-hooks/dev_deps_lock.json new file mode 100644 index 00000000..39557cec --- /dev/null +++ b/packages/adapter-hooks/dev_deps_lock.json @@ -0,0 +1,24 @@ +{ + "https://deno.land/std@0.93.0/async/delay.ts": "9de1d8d07d1927767ab7f82434b883f3d8294fb19cad819691a2ad81a728cf3d", + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c", + "https://deno.land/x/collections@v0.11.0/common.ts": "34e8367e3696c3f872ae417d7c421fa908a5a2125a1c4cb259f7dee9561a7096", + "https://deno.land/x/collections@v0.11.0/comparators.ts": "08563340dbb0051f032bacdcf854bcabd13d607d2e8cb1889826417419df89d0", + "https://deno.land/x/collections@v0.11.0/trees/bs_node.ts": "854d39f6d60cdcb47e1183f0fa67091e6bad59dd2b13252a8b38b1b37269fa67", + "https://deno.land/x/collections@v0.11.0/trees/bs_tree.ts": "694605e10a9f56caf8468c5eea06d60a8d81521dee75d4524a5f790b1ee713b5", + "https://deno.land/x/collections@v0.11.0/trees/rb_node.ts": "e5eecc211933140e0535fc371a4990bd5ac889b00136c96a4a610dbbf78d9ecb", + "https://deno.land/x/collections@v0.11.0/trees/rb_tree.ts": "e1e5f4e26bc255ebf41d72b498a2af903af69f0391276a4a0eac6d46fc43f942", + "https://deno.land/x/collections@v0.11.0/vector.ts": "23cb91087cc89ce9a1e10954336484b537a44bd786e21babc310ae85cb7ad52d", + "https://deno.land/x/mixins@v0.7.2/apply.ts": "dad7095324f5ce23693a0bc0eb3238f230c0ed2160ea8c285f3773ff7c76dcb0", + "https://deno.land/x/mock@v0.9.5/callbacks.ts": "610924901137b6a8ee2f5801d06a48af18da84c9486dd4775ff301de8a8d2b29", + "https://deno.land/x/mock@v0.9.5/deps/std/async/delay.ts": "b0855e5e208bcc08a6f7cb4debfc3ed408a4a3f1bc00ce36235481b94613f018", + "https://deno.land/x/mock@v0.9.5/deps/udibo/collections/comparators.ts": "c780b51a4fcdac4d506bf254d54702fdd46051bde9367fc819bd72cea041ac29", + "https://deno.land/x/mock@v0.9.5/deps/udibo/collections/trees/rb_tree.ts": "24839f7b1f66291d10d6640cb0051143d9b7effbd1e74271c3b564bb31a5b977", + "https://deno.land/x/mock@v0.9.5/deps/udibo/collections/vector.ts": "93285b7af952652e9733795c5677666e978e279ef0b7dae6013e9e017c022b30", + "https://deno.land/x/mock@v0.9.5/deps/udibo/mixins/apply.ts": "d2446714efc056d2c6a373dbf7a1a1db236991cae60b673db6e17d5c4d1bb8b7", + "https://deno.land/x/mock@v0.9.5/mod.ts": "9b44e3cbe9955ecf9f05b06cb9e8a47294bd84f2d33e0b85a8f5b4a0d740e0aa", + "https://deno.land/x/mock@v0.9.5/spy.ts": "c6c66b124f1c0b86f479e66e1daf687ef03aff068ec776ff61a3bf25ff3da5d0", + "https://deno.land/x/mock@v0.9.5/stub.ts": "762cb86de92038e17dfd1af25429036fa7aadecbafdb39fa98edfa01e20951eb", + "https://deno.land/x/mock@v0.9.5/time.ts": "7c6d42af7577c4da461f9581bf171f6e5cb7a22a243ba87ee4ed3c4f1922c2e4" +} diff --git a/packages/adapter-hooks/index.js b/packages/adapter-hooks/index.js deleted file mode 100644 index ee4cdf81..00000000 --- a/packages/adapter-hooks/index.js +++ /dev/null @@ -1,17 +0,0 @@ -const { merge } = require('ramda') -const { Async } = require('crocks') -const createFetch = require('@vercel/fetch') -const nodeFetch = require('node-fetch') -const createAdapter = require('./adapter.js') - -const fetch = createFetch(nodeFetch) -const asyncFetch = Async.fromPromise(fetch) - -module.exports = function (hooks) { - return Object.freeze({ - id: 'hooks', - port: 'hooks', - load: merge({ hooks }), - link: () => () => createAdapter({ asyncFetch, hooks }) - }) -} diff --git a/packages/adapter-hooks/index_test.js b/packages/adapter-hooks/index_test.js deleted file mode 100644 index 8a92daf2..00000000 --- a/packages/adapter-hooks/index_test.js +++ /dev/null @@ -1,8 +0,0 @@ -const test = require('tape') -const hooksAdapter = require('./index.js') - -test('call hooks', async t => { - const adapter = hooksAdapter().link()() - t.ok(adapter.call) - t.end() -}) diff --git a/packages/adapter-hooks/mod.js b/packages/adapter-hooks/mod.js new file mode 100644 index 00000000..59d57ef6 --- /dev/null +++ b/packages/adapter-hooks/mod.js @@ -0,0 +1,18 @@ +import { crocks, R } from "./deps.js"; + +import createAdapter from "./adapter.js"; + +const { merge } = R; +const { Async } = crocks; + +// TODO: Tyler. wrap with opionated approach like before with https://github.com/vercel/fetch +const asyncFetch = Async.fromPromise(fetch); + +export default function (hooks) { + return Object.freeze({ + id: "hooks", + port: "hooks", + load: merge({ hooks }), + link: () => () => createAdapter({ asyncFetch, hooks }), + }); +} diff --git a/packages/adapter-hooks/mod_test.js b/packages/adapter-hooks/mod_test.js new file mode 100644 index 00000000..b9cfe029 --- /dev/null +++ b/packages/adapter-hooks/mod_test.js @@ -0,0 +1,9 @@ +import { assert } from "./dev_deps.js"; + +import hooksAdapter from "./mod.js"; + +Deno.test("call hooks", () => { + const adapter = hooksAdapter().link()(); + + assert(adapter.call); +}); diff --git a/packages/adapter-hooks/package.json b/packages/adapter-hooks/package.json index 88050abb..307e7e9d 100644 --- a/packages/adapter-hooks/package.json +++ b/packages/adapter-hooks/package.json @@ -3,19 +3,5 @@ "version": "1.0.4", "homepage": "https://github.com/hyper63/hyper63#readme", "repository": "https://github.com/hyper63/hyper63", - "license": "Apache-2.0", - "main": "index.js", - "scripts": { - "test": "tape *_test.js" - }, - "dependencies": { - "@vercel/fetch": "^6.1.0", - "crocks": "^0.12.4", - "node-fetch": "^2.6.1", - "ramda": "^0.27.1" - }, - "devDependencies": { - "fetch-mock": "^9.10.7", - "tape": "^5.0.1" - } + "license": "Apache-2.0" } diff --git a/packages/adapter-memory/README.md b/packages/adapter-memory/README.md index 4c384632..47cd22ef 100644 --- a/packages/adapter-memory/README.md +++ b/packages/adapter-memory/README.md @@ -1,7 +1,6 @@ # hyper63 memory adapter -The memory adapter is an adapter for the `CACHE` port in the -hyper63 gateway. +The memory adapter is an adapter for the `CACHE` port in the hyper63 gateway. ## How to use @@ -35,7 +34,6 @@ export default { yarn test ``` - ## More information https://github.com/hyper63/hyper63 diff --git a/packages/adapter-memory/adapter.js b/packages/adapter-memory/adapter.js index 1c7bae0b..326b8bd4 100644 --- a/packages/adapter-memory/adapter.js +++ b/packages/adapter-memory/adapter.js @@ -1,4 +1,6 @@ -const { keys, merge, omit } = require('ramda') +import { R } from "./deps.js"; + +const { keys, merge, omit } = R; /** * hyper63 memory adapter @@ -27,102 +29,116 @@ const { keys, merge, omit } = require('ramda') * @property {Array} [docs] * @property {string} [msg] */ -module.exports = function adapter () { - let stores = {} +export default function adapter() { + let stores = {}; /** * @returns {Promise} */ - function index () { - return Promise.resolve(keys(stores)) + function index() { + return Promise.resolve(keys(stores)); } /** * @param {string} name * @returns {Promise} */ - function createStore (name) { + function createStore(name) { if (!name) { - return Promise.reject({ ok: false, msg: 'name must be a string value' }) + return Promise.reject({ ok: false, msg: "name must be a string value" }); } - const store = new Map() - stores = merge({ [name]: store }, stores) - return Promise.resolve({ ok: true }) + const store = new Map(); + stores = merge({ [name]: store }, stores); + return Promise.resolve({ ok: true }); } /** * @param {string} name * @returns {Promise} */ - function destroyStore (name) { - stores = omit([name], stores) - return Promise.resolve({ ok: true }) + function destroyStore(name) { + stores = omit([name], stores); + return Promise.resolve({ ok: true }); } /** * @param {CacheDoc} * @returns {Promise} */ - function createDoc ({ store, key, value }) { - if (!stores[store]) { return Promise.reject({ ok: false, msg: 'store is not found!' }) } + function createDoc({ store, key, value }) { + if (!stores[store]) { + return Promise.reject({ ok: false, msg: "store is not found!" }); + } - stores[store].set(key, value) - return Promise.resolve({ ok: true }) + stores[store].set(key, value); + return Promise.resolve({ ok: true }); } /** * @param {CacheInfo} * @returns {Promise} */ - function getDoc ({ store, key }) { - if (!stores[store]) { return Promise.reject({ ok: false, msg: 'store is not found!' }) } - const doc = stores[store].get(key) - return doc ? Promise.resolve(doc) : Promise.reject({ ok: false, status: 404, msg: 'doc not found' }) + function getDoc({ store, key }) { + if (!stores[store]) { + return Promise.reject({ ok: false, msg: "store is not found!" }); + } + const doc = stores[store].get(key); + return doc + ? Promise.resolve(doc) + : Promise.reject({ ok: false, status: 404, msg: "doc not found" }); } /** * @param {CacheDoc} * @returns {Promise} */ - function updateDoc ({ store, key, value }) { - if (!stores[store]) { return Promise.reject({ ok: false, msg: 'store is not found!' }) } + function updateDoc({ store, key, value }) { + if (!stores[store]) { + return Promise.reject({ ok: false, msg: "store is not found!" }); + } - stores[store].set(key, value) - return Promise.resolve({ ok: true }) + stores[store].set(key, value); + return Promise.resolve({ ok: true }); } /** * @param {CacheInfo} * @returns {Promise} */ - function deleteDoc ({ store, key }) { - if (!stores[store]) { return Promise.reject({ ok: false, msg: 'store is not found!' }) } + function deleteDoc({ store, key }) { + if (!stores[store]) { + return Promise.reject({ ok: false, msg: "store is not found!" }); + } - stores[store].delete(key) - return Promise.resolve({ ok: true }) + stores[store].delete(key); + return Promise.resolve({ ok: true }); } /** * @param {CacheQuery} * @returns {Promise} */ - function listDocs ({ store, pattern }) { - if (!stores[store]) { return Promise.reject({ ok: false, msg: 'store is not found!' }) } + function listDocs({ store, pattern }) { + if (!stores[store]) { + return Promise.reject({ ok: false, msg: "store is not found!" }); + } // https://stackoverflow.com/questions/26246601/wildcard-string-comparison-in-javascript - const docs = [] - function match (str, rule) { + const docs = []; + function match(str, rule) { // eslint-disable-next-line no-useless-escape - const escapeRegex = (str) => str.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, '\\$1') - return new RegExp('^' + rule.split('*').map(escapeRegex).join('.*') + '$').test(str) + const escapeRegex = (str) => + str.replace(/([.*+?^=!:${}()|\[\]\/\\])/g, "\\$1"); + return new RegExp("^" + rule.split("*").map(escapeRegex).join(".*") + "$") + .test(str); } stores[store].forEach((value, key) => { if (match(key, pattern)) { - docs.push({ key, value }) + docs.push({ key, value }); } - }) - return Promise.resolve({ ok: true, docs }) + }); + return Promise.resolve({ ok: true, docs }); } return Object.freeze({ @@ -133,6 +149,6 @@ module.exports = function adapter () { getDoc, updateDoc, deleteDoc, - listDocs - }) + listDocs, + }); } diff --git a/packages/adapter-memory/adapter_test.js b/packages/adapter-memory/adapter_test.js index 3ce67276..55f78dae 100644 --- a/packages/adapter-memory/adapter_test.js +++ b/packages/adapter-memory/adapter_test.js @@ -1,143 +1,153 @@ -const { v4 } = require('uuid') -const test = require('tape') -const memory = require('./adapter')() +import { + assert, + assertEquals, + assertObjectMatch, + v4 as v4Generator, +} from "./dev_deps.js"; -test('try to create cache store with no name', async t => { - const result = await memory.createStore(null).catch(e => e) +import createAdapter from "./adapter.js"; - t.ok(!result.ok, 'should be false') - t.equal(result.msg, 'name must be a string value', 'error msg is correct') +const v4 = v4Generator.generate.bind(v4Generator); - const result2 = await memory.createStore(undefined).catch(e => e) +const memory = createAdapter(); - t.ok(!result2.ok, 'should be false') - t.equal(result2.msg, 'name must be a string value', 'error msg is correct') +Deno.test("try to create cache store with no name", async () => { + const result = await memory.createStore(null).catch((e) => e); - t.end() -}) + assert(!result.ok, "should be false"); + assertEquals( + result.msg, + "name must be a string value", + "error msg is correct", + ); -test('find documents', async t => { - await memory.createStore('demo') + const result2 = await memory.createStore(undefined).catch((e) => e); + + assert(!result2.ok, "should be false"); + assertEquals( + result2.msg, + "name must be a string value", + "error msg is correct", + ); +}); + +Deno.test("find documents", async () => { + await memory.createStore("demo"); await memory.createDoc({ - store: 'demo', - key: 'marvel-spiderman', + store: "demo", + key: "marvel-spiderman", value: { - hero: 'spiderman', - name: 'Peter Parker', - universe: 'marvel' - } - }) + hero: "spiderman", + name: "Peter Parker", + universe: "marvel", + }, + }); await memory.createDoc({ - store: 'demo', - key: 'marvel-ironman', + store: "demo", + key: "marvel-ironman", value: { - hero: 'ironman', - name: 'Tony Stark', - universe: 'marvel' - } - }) + hero: "ironman", + name: "Tony Stark", + universe: "marvel", + }, + }); await memory.createDoc({ - store: 'demo', - key: 'dc-superman', + store: "demo", + key: "dc-superman", value: { - hero: 'superman', - name: 'Clark Kent', - universe: 'dc' - } - }) + hero: "superman", + name: "Clark Kent", + universe: "dc", + }, + }); const results = await memory.listDocs({ - store: 'demo', - pattern: 'dc-*' - }) - t.ok(results.ok, 'list docs was successful') - t.equal(results.docs[0].value.hero, 'superman', 'found match') - await memory.destroyStore('demo') - t.end() -}) - -test('create store', async t => { - t.plan(1) + store: "demo", + pattern: "dc-*", + }); + assert(results.ok, "list docs was successful"); + assertEquals(results.docs[0].value.hero, "superman", "found match"); + await memory.destroyStore("demo"); +}); - const result = await memory.createStore('default') - t.ok(result.ok) -}) +Deno.test("create store", async () => { + const result = await memory.createStore("default"); + assert(result.ok); +}); -test('delete store', async t => { - t.plan(1) - const result = await memory.destroyStore('default') - t.ok(result.ok) -}) +Deno.test("delete store", async () => { + const result = await memory.destroyStore("default"); + assert(result.ok); +}); -test('create doc', async t => { - t.plan(1) - const store = v4() - await memory.createStore(store) +Deno.test("create doc", async () => { + const store = v4(); + await memory.createStore(store); await memory.createDoc({ store: store, - key: '1', - value: { hello: 'world' } - }) + key: "1", + value: { hello: "world" }, + }); const result = await memory.getDoc({ store: store, - key: '1' - }) - t.deepEqual(result, { hello: 'world' }) - await memory.destroyStore(store) -}) + key: "1", + }); + assertObjectMatch(result, { hello: "world" }); + await memory.destroyStore(store); +}); -test('get doc', async t => { - t.plan(1) - const store = v4() - await memory.createStore(store) +Deno.test("get doc", async () => { + const store = v4(); + await memory.createStore(store); await memory.createDoc({ store, - key: '2', - value: { foo: 'bar' } - }) + key: "2", + value: { foo: "bar" }, + }); const result = await memory.getDoc({ - store, key: '2' - }) - t.deepEqual(result, { foo: 'bar' }) - await memory.destroyStore(store) -}) + store, + key: "2", + }); + assertObjectMatch(result, { foo: "bar" }); + await memory.destroyStore(store); +}); -test('update doc', async t => { - t.plan(1) - const store = v4() - await memory.createStore(store) +Deno.test("update doc", async () => { + const store = v4(); + await memory.createStore(store); await memory.createDoc({ store, - key: '2', - value: { foo: 'bar' } - }) + key: "2", + value: { foo: "bar" }, + }); await memory.updateDoc({ store, - key: '2', - value: { beep: 'boop' } - }) + key: "2", + value: { beep: "boop" }, + }); const result = await memory.getDoc({ - store, key: '2' - }) - t.deepEqual(result, { beep: 'boop' }) - await memory.destroyStore(store) -}) + store, + key: "2", + }); + assertObjectMatch(result, { beep: "boop" }); + await memory.destroyStore(store); +}); -test('delete doc', async t => { - t.plan(1) - const store = v4() - await memory.createStore(store) +Deno.test("delete doc", async () => { + const store = v4(); + await memory.createStore(store); await memory.createDoc({ store, - key: '2', - value: { foo: 'bar' } - }) + key: "2", + value: { foo: "bar" }, + }); await memory.deleteDoc({ store, - key: '2' - }) + key: "2", + }); const result = await memory.getDoc({ - store, key: '2' - }).catch(e => e) - t.notOk(result.ok) - await memory.destroyStore(store) -}) + store, + key: "2", + }).catch((e) => e); + assertEquals(result.ok, false); + await memory.destroyStore(store); +}); diff --git a/packages/adapter-memory/deps.js b/packages/adapter-memory/deps.js new file mode 100644 index 00000000..75a43989 --- /dev/null +++ b/packages/adapter-memory/deps.js @@ -0,0 +1 @@ +export * as R from "https://cdn.skypack.dev/ramda@^0.27.1"; diff --git a/packages/adapter-memory/deps_lock.json b/packages/adapter-memory/deps_lock.json new file mode 100644 index 00000000..c956c18b --- /dev/null +++ b/packages/adapter-memory/deps_lock.json @@ -0,0 +1,4 @@ +{ + "https://cdn.skypack.dev/-/ramda@v0.27.1-3ePaNsppsnXYRcaNcaWn/dist=es2020,mode=imports/optimized/ramda.js": "0e51cd76039fc9669f9179d935b261bcbb19ecbb11d49e7e60cfbc14360a85d2", + "https://cdn.skypack.dev/ramda@^0.27.1": "fb06d7de4305dcdb997f9adc903a463afcdac75d148d638295c420a8922048fd" +} diff --git a/packages/adapter-memory/dev_deps.js b/packages/adapter-memory/dev_deps.js new file mode 100644 index 00000000..1fa2d981 --- /dev/null +++ b/packages/adapter-memory/dev_deps.js @@ -0,0 +1,6 @@ +export { + assert, + assertEquals, + assertObjectMatch, +} from "https://deno.land/std@0.98.0/testing/asserts.ts"; +export { v4 } from "https://deno.land/std@0.98.0/uuid/mod.ts"; diff --git a/packages/adapter-memory/dev_deps_lock.json b/packages/adapter-memory/dev_deps_lock.json new file mode 100644 index 00000000..f557aa80 --- /dev/null +++ b/packages/adapter-memory/dev_deps_lock.json @@ -0,0 +1,12 @@ +{ + "https://deno.land/std@0.98.0/_util/assert.ts": "2f868145a042a11d5ad0a3c748dcf580add8a0dbc0e876eaa0026303a5488f58", + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/hash/sha1.ts": "1cca324b4b253885a47f121adafcfac55b4cc96113e22b338e1db26f37a730b8", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c", + "https://deno.land/std@0.98.0/uuid/_common.ts": "e985d221890ce301e8dfef4e7cbd01ff45b64886f6ca65ff3f03e64d8a5ff2b5", + "https://deno.land/std@0.98.0/uuid/mod.ts": "20e2a8209ec811fd8bf7218e83cf2d666556cdc947435b393f7f8edb9dcf14a4", + "https://deno.land/std@0.98.0/uuid/v1.ts": "3b242479ab2da5a6f5cdbbfeaa5db7a31966e688237f372ff50e489b93a9be07", + "https://deno.land/std@0.98.0/uuid/v4.ts": "949cadb3df3695770b41e08d5feb59401b10be73938c85542d27b8e6d14045f4", + "https://deno.land/std@0.98.0/uuid/v5.ts": "6b17fb670d608cfb545282c4940800ff4eb603ee16b0f92c576f483fd4112ed6" +} diff --git a/packages/adapter-memory/index.js b/packages/adapter-memory/index.js deleted file mode 100644 index d3c8f7e6..00000000 --- a/packages/adapter-memory/index.js +++ /dev/null @@ -1,17 +0,0 @@ -const { identity } = require('ramda') -const adapter = require('./adapter') - -/** - * hyper63 memory plugin adapter - * - * This memory plugin for the cache root is an adapter - * that just uses a JS Map to store documents in memory. - */ -module.exports = function memory () { - return ({ - id: 'memory', - port: 'cache', - load: identity, - link: () => () => adapter() - }) -} diff --git a/packages/adapter-memory/mod.js b/packages/adapter-memory/mod.js new file mode 100644 index 00000000..656278a4 --- /dev/null +++ b/packages/adapter-memory/mod.js @@ -0,0 +1,20 @@ +import { R } from "./deps.js"; + +import adapter from "./adapter.js"; + +const { identity } = R; + +/** + * hyper63 memory plugin adapter + * + * This memory plugin for the cache root is an adapter + * that just uses a JS Map to store documents in memory. + */ +export default function memory() { + return ({ + id: "memory", + port: "cache", + load: identity, + link: () => () => adapter(), + }); +} diff --git a/packages/adapter-memory/package.json b/packages/adapter-memory/package.json index cceb5b28..8358d560 100644 --- a/packages/adapter-memory/package.json +++ b/packages/adapter-memory/package.json @@ -4,16 +4,5 @@ "description": "hyper63 cache adapter", "homepage": "https://github.com/hyper63/hyper63#readme", "repository": "https://github.com/hyper63/hyper63", - "license": "Apache-2.0", - "main": "index.js", - "scripts": { - "test": "tape *_test.js" - }, - "dependencies": { - "ramda": "^0.27.1" - }, - "devDependencies": { - "tape": "^5.0.1", - "uuid": "^8.3.1" - } + "license": "Apache-2.0" } diff --git a/packages/adapter-memory/scripts/test.sh b/packages/adapter-memory/scripts/test.sh new file mode 100755 index 00000000..09bdb23f --- /dev/null +++ b/packages/adapter-memory/scripts/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +deno fmt --check +deno test diff --git a/packages/adapter-minio/README.md b/packages/adapter-minio/README.md index 12ede700..59aba88a 100644 --- a/packages/adapter-minio/README.md +++ b/packages/adapter-minio/README.md @@ -1,8 +1,7 @@ # hyper63 MINIO Adapter -MinIO is a storage service that allows you to create -buckets and upload objects, like files, and media to -a storage solution. +MinIO is a storage service that allows you to create buckets and upload objects, +like files, and media to a storage solution. ## Install diff --git a/packages/adapter-minio/adapter.js b/packages/adapter-minio/adapter.js index 5ecd5d97..ba648a3b 100644 --- a/packages/adapter-minio/adapter.js +++ b/packages/adapter-minio/adapter.js @@ -1,69 +1,69 @@ -const { Async } = require('crocks') +const { Async } = require("crocks"); -const prop = (key) => (obj) => obj[key] +const prop = (key) => (obj) => obj[key]; const asyncify = (client, method) => - Async.fromPromise(client[method].bind(client)) + Async.fromPromise(client[method].bind(client)); module.exports = (client) => ({ makeBucket: (name) => asyncify( client, - 'makeBucket' + "makeBucket", )(name).map(() => ({ - ok: true + ok: true, })).toPromise() - .catch(err => { + .catch((err) => { return { ok: false, - msg: err.code - } + msg: err.code, + }; }), removeBucket: (name) => asyncify( client, - 'removeBucket' + "removeBucket", )(name).map(() => ({ - ok: true + ok: true, })).toPromise() - .catch(err => { + .catch((err) => { return { ok: false, - msg: err.code - } + msg: err.code, + }; }), listBuckets: () => - asyncify(client, 'listBuckets')().map((buckets) => ({ + asyncify(client, "listBuckets")().map((buckets) => ({ ok: true, - buckets: buckets.map(prop('name')) + buckets: buckets.map(prop("name")), })).toPromise(), putObject: ({ bucket, object, stream }) => - asyncify(client, 'putObject')(bucket, object, stream).map(() => ({ - ok: true + asyncify(client, "putObject")(bucket, object, stream).map(() => ({ + ok: true, })).toPromise(), removeObject: ({ bucket, object }) => - asyncify(client, 'removeObject')(bucket, object).map(() => ({ - ok: true + asyncify(client, "removeObject")(bucket, object).map(() => ({ + ok: true, })).toPromise(), getObject: ({ bucket, object }) => - asyncify(client, 'getObject')(bucket, object).toPromise(), - listObjects: ({ bucket, prefix = '' }) => + asyncify(client, "getObject")(bucket, object).toPromise(), + listObjects: ({ bucket, prefix = "" }) => Async.of(client.listObjects(bucket, prefix)) .chain((s) => { return Async.fromPromise( () => new Promise((resolve, reject) => { - let objects = [] - s.on('data', (obj) => (objects = [...objects, obj.name])) - s.on('error', reject) - s.on('end', () => resolve(objects)) - }) - )() + let objects = []; + s.on("data", (obj) => (objects = [...objects, obj.name])); + s.on("error", reject); + s.on("end", () => resolve(objects)); + }), + )(); }) .map((result) => { return { ok: true, - objects: result - } - }).toPromise() -}) + objects: result, + }; + }).toPromise(), +}); diff --git a/packages/adapter-minio/adapter_test.js b/packages/adapter-minio/adapter_test.js index cc9da022..70d7fc07 100644 --- a/packages/adapter-minio/adapter_test.js +++ b/packages/adapter-minio/adapter_test.js @@ -1,27 +1,27 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ +// deno-lint-ignore-file no-unused-vars -const test = require('tape') -const createAdapter = require('./adapter') +const test = require("tape"); +const createAdapter = require("./adapter"); const client = ({ - makeBucket (name) { - return Promise.resolve() + makeBucket(name) { + return Promise.resolve(); }, - removeBucket (name) { - return Promise.resolve() - } -}) + removeBucket(name) { + return Promise.resolve(); + }, +}); -const adapter = createAdapter(client) +const adapter = createAdapter(client); -test('make bucket', async t => { - const result = await adapter.makeBucket('hello') - t.ok(result.ok) - t.end() -}) +test("make bucket", async (t) => { + const result = await adapter.makeBucket("hello"); + t.ok(result.ok); + t.end(); +}); -test('remove bucket', async t => { - const result = await adapter.removeBucket('hello') - t.ok(result.ok) - t.end() -}) +test("remove bucket", async (t) => { + const result = await adapter.removeBucket("hello"); + t.ok(result.ok); + t.end(); +}); diff --git a/packages/adapter-minio/index.js b/packages/adapter-minio/index.js index c675d07d..90f5b0a3 100644 --- a/packages/adapter-minio/index.js +++ b/packages/adapter-minio/index.js @@ -1,45 +1,45 @@ -const createAdapter = require('./adapter') -const Minio = require('minio') +const createAdapter = require("./adapter"); +const Minio = require("minio"); /** * @param {object} config * @returns {object} */ -module.exports = function MinioStorageAdapter (config) { +module.exports = function MinioStorageAdapter(config) { /** * @param {object} env */ - function load () { - return config + function load() { + return config; } /** * @param {object} env * @returns {function} */ - function link (env) { + function link(env) { /** * @param {object} adapter * @returns {object} */ return function () { // parse url - const config = new URL(env.url) + const config = new URL(env.url); const client = new Minio.Client({ endPoint: config.hostname, accessKey: config.username, secretKey: config.password, - useSSL: config.protocol === 'https:', - port: Number(config.port) - }) - return createAdapter(client) - } + useSSL: config.protocol === "https:", + port: Number(config.port), + }); + return createAdapter(client); + }; } return Object.freeze({ - id: 'minio-storage-adapter', - port: 'storage', + id: "minio-storage-adapter", + port: "storage", load, - link - }) -} + link, + }); +}; diff --git a/packages/adapter-minisearch/README.md b/packages/adapter-minisearch/README.md index afebc7b9..a87c2630 100644 --- a/packages/adapter-minisearch/README.md +++ b/packages/adapter-minisearch/README.md @@ -1,7 +1,6 @@ # MiniSearch Adapter -This adapter is for the search port, and it -implements an embedded search called +This adapter is for the search port, and it implements an embedded search called minisearch. ## How to use diff --git a/packages/adapter-minisearch/adapter.js b/packages/adapter-minisearch/adapter.js index 802457f5..b1ec02ce 100644 --- a/packages/adapter-minisearch/adapter.js +++ b/packages/adapter-minisearch/adapter.js @@ -1,5 +1,7 @@ -const MiniSearch = require('minisearch') -const { allPass, keys, reduce } = require('ramda') +import { MiniSearch, R } from "./deps.js"; + +const { allPass, keys, reduce } = R; + // types /** @@ -41,139 +43,167 @@ const { allPass, keys, reduce } = require('ramda') * @property {string} [msg] */ -module.exports = function () { - const indexes = new Map() - const datastores = new Map() +export default function () { + const indexes = new Map(); + const datastores = new Map(); /** * @param {IndexInfo} * @returns {Promise} */ - function createIndex ({ index, mappings }) { - if (!index) { return Promise.reject({ ok: false, msg: 'name is required to create index' }) } - if (!mappings) { return Promise.reject({ ok: false, msg: 'mappings object required, it should have fields property and storedFields property.' }) } - const sindex = new MiniSearch(mappings) - const store = new Map() - indexes.set(index, sindex) - datastores.set(index, store) - return Promise.resolve({ ok: true }) + function createIndex({ index, mappings }) { + if (!index) { + return Promise.reject({ + ok: false, + msg: "name is required to create index", + }); + } + if (!mappings) { + return Promise.reject({ + ok: false, + msg: + "mappings object required, it should have fields property and storedFields property.", + }); + } + const sindex = new MiniSearch(mappings); + const store = new Map(); + indexes.set(index, sindex); + datastores.set(index, store); + return Promise.resolve({ ok: true }); } /** * @param {string} name * @returns {Promise} */ - function deleteIndex (name) { - if (!name) { return Promise.reject({ ok: false, msg: 'name is required to create index' }) } - indexes.delete(name) - datastores.delete(name) - return Promise.resolve({ ok: true }) + function deleteIndex(name) { + if (!name) { + return Promise.reject({ + ok: false, + msg: "name is required to create index", + }); + } + indexes.delete(name); + datastores.delete(name); + return Promise.resolve({ ok: true }); } /** * @param {SearchDoc} * @returns {Promise} */ - function indexDoc ({ index, key, doc }) { - if (!index) { return Promise.reject({ ok: false, msg: 'index name is required!' }) } - if (!key) { return Promise.reject({ ok: false, msg: 'key is required!' }) } - if (!doc) { return Promise.reject({ ok: false, msg: 'doc is required!' }) } - - const search = indexes.get(index) - const store = datastores.get(index) - search.add(doc) - store.set(key, doc) - return Promise.resolve({ ok: true }) + function indexDoc({ index, key, doc }) { + if (!index) { + return Promise.reject({ ok: false, msg: "index name is required!" }); + } + if (!key) return Promise.reject({ ok: false, msg: "key is required!" }); + if (!doc) return Promise.reject({ ok: false, msg: "doc is required!" }); + + const search = indexes.get(index); + const store = datastores.get(index); + search.add(doc); + store.set(key, doc); + return Promise.resolve({ ok: true }); } /** * @param {SearchInfo} * @returns {Promise} */ - function getDoc ({ index, key }) { - if (!index) { return Promise.reject({ ok: false, msg: 'index name is required!' }) } - if (!key) { return Promise.reject({ ok: false, msg: 'key is required!' }) } + function getDoc({ index, key }) { + if (!index) { + return Promise.reject({ ok: false, msg: "index name is required!" }); + } + if (!key) return Promise.reject({ ok: false, msg: "key is required!" }); - const store = datastores.get(index) - const doc = store.get(key) - return Promise.resolve(doc === undefined ? null : doc) + const store = datastores.get(index); + const doc = store.get(key); + return Promise.resolve(doc === undefined ? null : doc); } /** * @param {SearchDoc} * @returns {Promise} */ - function updateDoc ({ index, key, doc }) { - if (!index) { return Promise.reject({ ok: false, msg: 'index name is required!' }) } - if (!key) { return Promise.reject({ ok: false, msg: 'key is required!' }) } - if (!doc) { return Promise.reject({ ok: false, msg: 'doc is required!' }) } - - const search = indexes.get(index) - const store = datastores.get(index) - const oldDoc = store.get(key) - search.remove(oldDoc) - search.add(doc) - store.set(key, doc) - return Promise.resolve({ ok: true }) + function updateDoc({ index, key, doc }) { + if (!index) { + return Promise.reject({ ok: false, msg: "index name is required!" }); + } + if (!key) return Promise.reject({ ok: false, msg: "key is required!" }); + if (!doc) return Promise.reject({ ok: false, msg: "doc is required!" }); + + const search = indexes.get(index); + const store = datastores.get(index); + const oldDoc = store.get(key); + search.remove(oldDoc); + search.add(doc); + store.set(key, doc); + return Promise.resolve({ ok: true }); } /** * @param {SearchInfo} * @returns {Promise} */ - function removeDoc ({ index, key }) { - if (!index) { return Promise.reject({ ok: false, msg: 'index name is required!' }) } - if (!key) { return Promise.reject({ ok: false, msg: 'key is required!' }) } - - const search = indexes.get(index) - const store = datastores.get(index) - const oldDoc = store.get(key) - search.remove(oldDoc) - store.delete(key) - return Promise.resolve({ ok: true }) + function removeDoc({ index, key }) { + if (!index) { + return Promise.reject({ ok: false, msg: "index name is required!" }); + } + if (!key) return Promise.reject({ ok: false, msg: "key is required!" }); + + const search = indexes.get(index); + const store = datastores.get(index); + const oldDoc = store.get(key); + search.remove(oldDoc); + store.delete(key); + return Promise.resolve({ ok: true }); } /** * @param {BulkIndex} * @returns {Promise} */ - function bulk ({ index, docs }) { - if (!index) { return Promise.reject({ ok: false, msg: 'index name is required!' }) } - if (!docs) { return Promise.reject({ ok: false, msg: 'docs is required!' }) } + function bulk({ index, docs }) { + if (!index) { + return Promise.reject({ ok: false, msg: "index name is required!" }); + } + if (!docs) return Promise.reject({ ok: false, msg: "docs is required!" }); - const search = indexes.get(index) - search.addAll(docs) - return Promise.resolve({ ok: true, results: [] }) + const search = indexes.get(index); + search.addAll(docs); + return Promise.resolve({ ok: true, results: [] }); } - function createFilterFn (object) { + function createFilterFn(object) { return allPass(reduce( (acc, k) => { - return acc.concat(result => result[k] === object[k]) + return acc.concat((result) => result[k] === object[k]); }, [], - keys(object) - )) + keys(object), + )); } /** * * @param {SearchQuery} * @returns {Promise} */ - function query ({ index, q: { query, fields, filter } }) { - if (!index) { return Promise.reject({ ok: false, msg: 'index name is required!' }) } - if (!query) { return Promise.reject({ ok: false, msg: 'query is required!' }) } + function query({ index, q: { query, fields, filter } }) { + if (!index) { + return Promise.reject({ ok: false, msg: "index name is required!" }); + } + if (!query) return Promise.reject({ ok: false, msg: "query is required!" }); - const search = indexes.get(index) - let options = {} + const search = indexes.get(index); + let options = {}; // if fields - options = fields ? { ...options, fields } : options + options = fields ? { ...options, fields } : options; if (filter) { - options = { ...options, filter: createFilterFn(filter) } + options = { ...options, filter: createFilterFn(filter) }; } - const results = search.search(query, options) - return Promise.resolve({ ok: true, matches: results }) + const results = search.search(query, options); + return Promise.resolve({ ok: true, matches: results }); } return Object.freeze({ @@ -184,6 +214,6 @@ module.exports = function () { updateDoc, removeDoc, bulk, - query - }) + query, + }); } diff --git a/packages/adapter-minisearch/adapter_test.js b/packages/adapter-minisearch/adapter_test.js index 5c5ecba2..f2ae8ffa 100644 --- a/packages/adapter-minisearch/adapter_test.js +++ b/packages/adapter-minisearch/adapter_test.js @@ -1,86 +1,103 @@ -const test = require('tape') -const adapter = require('./adapter')() +import { assert, assertEquals } from "./dev_deps.js"; -test('minisearch tests', async t => { - t.plan(10) +import createAdapter from "./adapter.js"; + +const adapter = createAdapter(); + +// TODO: Tyler. Make tests independent of each other + +Deno.test("create index", async () => { const result = await adapter.createIndex({ - index: 'default', + index: "default", mappings: { - fields: ['title', 'body'], - storeFields: ['title', 'body', 'category'] - } - }) - t.ok(result.ok, 'create index') - + fields: ["title", "body"], + storeFields: ["title", "body", "category"], + }, + }); + assert(result.ok); +}); + +Deno.test("index doc", async () => { const result2 = await adapter.indexDoc({ - index: 'default', - key: '1', + index: "default", + key: "1", doc: { - id: '1', - title: 'Search is fun', - body: 'This is a search post about cool and exciting stuff', - category: 'search' - } - }) + id: "1", + title: "Search is fun", + body: "This is a search post about cool and exciting stuff", + category: "search", + }, + }); - t.ok(result2.ok, 'index doc') + assert(result2.ok); +}); +Deno.test("get document", async () => { const result3 = await adapter.getDoc({ - index: 'default', - key: '1' - }) - t.equal(result3.id, '1', 'get document') + index: "default", + key: "1", + }); + assertEquals(result3.id, "1"); +}); + +Deno.test("update document", async () => { const result4 = await adapter.updateDoc({ - index: 'default', - key: '1', + index: "default", + key: "1", doc: { - id: '1', - title: 'Search is cool', - body: 'This is a search post and it is fun', - category: 'search' - } - }) - - t.ok(result4.ok) + id: "1", + title: "Search is cool", + body: "This is a search post and it is fun", + category: "search", + }, + }); const newDoc = await adapter.getDoc({ - index: 'default', - key: '1' - }) + index: "default", + key: "1", + }); - t.equal(newDoc.title, 'Search is cool') + assertEquals(newDoc.title, "Search is cool"); + assert(result4.ok); +}); +Deno.test("query doc", async () => { const searchResults = await adapter.query({ - index: 'default', - q: { query: 'Search is cool' } - }) + index: "default", + q: { query: "Search is cool" }, + }); - t.equal(searchResults.matches[0].id, '1', 'found doc') + assertEquals(searchResults.matches[0].id, "1"); const searchResults2 = await adapter.query({ - index: 'default', + index: "default", q: { - query: 'Search is cool', - filter: { category: 'search' } - } - }) + query: "Search is cool", + filter: { category: "search" }, + }, + }); - t.equal(searchResults2.matches[0].id, '1', 'found doc') + assertEquals(searchResults2.matches[0].id, "1", "found doc"); +}); +Deno.test("remove doc", async () => { const docDeleteResult = await adapter.removeDoc({ - index: 'default', - key: '1' - }) - t.ok(docDeleteResult.ok, 'deleted doc') + index: "default", + key: "1", + }); + + assert(docDeleteResult.ok); const deletedDoc = await adapter.getDoc({ - index: 'default', - key: '1' - }) + index: "default", + key: "1", + }); - t.equal(deletedDoc, null, 'could not find doc') + assertEquals(deletedDoc, null); +}); - const deleteResult = await adapter.deleteIndex('default') - t.ok(deleteResult.ok, 'delete index') -}) +Deno.test("delete index", async () => { + const deleteResult = await adapter.deleteIndex("default"); + assert(deleteResult.ok); +}); diff --git a/packages/adapter-minisearch/deps.js b/packages/adapter-minisearch/deps.js new file mode 100644 index 00000000..e19ca57c --- /dev/null +++ b/packages/adapter-minisearch/deps.js @@ -0,0 +1,2 @@ +export * as R from "https://cdn.skypack.dev/ramda@^0.27.1"; +export { default as MiniSearch } from "https://cdn.skypack.dev/minisearch"; diff --git a/packages/adapter-minisearch/deps_lock.json b/packages/adapter-minisearch/deps_lock.json new file mode 100644 index 00000000..dde086f6 --- /dev/null +++ b/packages/adapter-minisearch/deps_lock.json @@ -0,0 +1,6 @@ +{ + "https://cdn.skypack.dev/-/minisearch@v3.0.2-jqmvazcIQ6riIWVKlgnX/dist=es2020,mode=imports/optimized/minisearch.js": "eb73afc17ca41033d1322446c2137953a7d7864b857a788037ed9e5ab7954353", + "https://cdn.skypack.dev/-/ramda@v0.27.1-3ePaNsppsnXYRcaNcaWn/dist=es2020,mode=imports/optimized/ramda.js": "0e51cd76039fc9669f9179d935b261bcbb19ecbb11d49e7e60cfbc14360a85d2", + "https://cdn.skypack.dev/minisearch": "b5f61eda24249e6afbdc9f4800ac9ec101678a35ab075192ff0e1b4c7ad0cb7d", + "https://cdn.skypack.dev/ramda@^0.27.1": "fb06d7de4305dcdb997f9adc903a463afcdac75d148d638295c420a8922048fd" +} diff --git a/packages/adapter-minisearch/dev_deps.js b/packages/adapter-minisearch/dev_deps.js new file mode 100644 index 00000000..4d0288dc --- /dev/null +++ b/packages/adapter-minisearch/dev_deps.js @@ -0,0 +1,4 @@ +export { + assert, + assertEquals, +} from "https://deno.land/std@0.98.0/testing/asserts.ts"; diff --git a/packages/adapter-minisearch/dev_deps_lock.json b/packages/adapter-minisearch/dev_deps_lock.json new file mode 100644 index 00000000..7cf4ec42 --- /dev/null +++ b/packages/adapter-minisearch/dev_deps_lock.json @@ -0,0 +1,5 @@ +{ + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c" +} diff --git a/packages/adapter-minisearch/index.js b/packages/adapter-minisearch/mod.js similarity index 71% rename from packages/adapter-minisearch/index.js rename to packages/adapter-minisearch/mod.js index 2de9407d..74990fd7 100644 --- a/packages/adapter-minisearch/index.js +++ b/packages/adapter-minisearch/mod.js @@ -1,5 +1,8 @@ -const { identity } = require('ramda') -const adapter = require('./adapter') +import { R } from "./deps.js"; + +import adapter from "./adapter.js"; + +const { identity } = R; /** * hyper63 search plugin for the search port. This plugin is an adapter that @@ -28,11 +31,11 @@ const adapter = require('./adapter') * * search section */ -module.exports = function memory () { +export default function memory() { return ({ - id: 'minisearch', - port: 'search', + id: "minisearch", + port: "search", load: identity, - link: () => () => adapter() - }) + link: () => () => adapter(), + }); } diff --git a/packages/adapter-minisearch/package.json b/packages/adapter-minisearch/package.json index 9b101d01..ba3b01c4 100644 --- a/packages/adapter-minisearch/package.json +++ b/packages/adapter-minisearch/package.json @@ -3,16 +3,5 @@ "version": "1.0.9", "homepage": "https://github.com/hyper63/hyper63#readme", "repository": "https://github.com/hyper63/hyper63", - "license": "Apache-2.0", - "main": "index.js", - "scripts": { - "test": "tape *_test.js" - }, - "dependencies": { - "minisearch": "^3.0.2", - "ramda": "^0.27.1" - }, - "devDependencies": { - "tape": "^5.0.1" - } + "license": "Apache-2.0" } diff --git a/packages/adapter-minisearch/scripts/test.sh b/packages/adapter-minisearch/scripts/test.sh new file mode 100755 index 00000000..09bdb23f --- /dev/null +++ b/packages/adapter-minisearch/scripts/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +deno fmt --check +deno test diff --git a/packages/adapter-pouchdb/README.md b/packages/adapter-pouchdb/README.md index c6156614..ac86218f 100644 --- a/packages/adapter-pouchdb/README.md +++ b/packages/adapter-pouchdb/README.md @@ -1,30 +1,31 @@ # hyper63 PouchDB Adapter -hyper63 is a service framework that provides a common set of service commands for applications. +hyper63 is a service framework that provides a common set of service commands +for applications. -* data -* cache -* storage -* search +- data +- cache +- storage +- search -This adapter is for the data service, it uses pouchdb as the data service stores. +This adapter is for the data service, it uses pouchdb as the data service +stores. ## How to configure -``` sh +```sh npm install @hyper63/adapter-pouchdb ``` -``` js -import pouchdb from '@hyper63/adapter-pouchdb' +```js +import pouchdb from "@hyper63/adapter-pouchdb"; export default { app: express, adapters: [ - ... - { port: 'data', plugins: [pouchdb({dir: './data'})]} - ] -} + ...{ port: "data", plugins: [pouchdb({ dir: "./data" })] }, + ], +}; ``` ## How to use @@ -33,9 +34,6 @@ see https://purple-elephants.surge.sh ## Testing -``` sh +```sh yarn test ``` - - - diff --git a/packages/adapter-pouchdb/adapter.js b/packages/adapter-pouchdb/adapter.js index 13e88fc8..46a446fb 100644 --- a/packages/adapter-pouchdb/adapter.js +++ b/packages/adapter-pouchdb/adapter.js @@ -1,23 +1,37 @@ -const pouchdb = require('pouchdb') -const memory = require('pouchdb-adapter-memory') -const pouchdbFind = require('pouchdb-find') -const mkdirp = require('mkdirp') -const rimraf = require('rimraf') -const path = require('path') -const { Async } = require('crocks') -const { bichain } = require('crocks/pointfree') -const allow409 = require('./handle409') -const R = require('ramda') -const { assoc, compose, filter, identity, lens, map, merge, omit, over, pick, pluck, prop, propEq } = R -const bulk = require('./lib/bulk') +const pouchdb = require("pouchdb"); +const memory = require("pouchdb-adapter-memory"); +const pouchdbFind = require("pouchdb-find"); +const mkdirp = require("mkdirp"); +const rimraf = require("rimraf"); +const path = require("path"); +const { Async } = require("crocks"); +const { bichain } = require("crocks/pointfree"); +const allow409 = require("./handle409"); +const R = require("ramda"); +const { + assoc, + compose, + filter, + identity, + lens, + map, + merge, + omit, + over, + pick, + pluck, + prop, + propEq, +} = R; +const bulk = require("./lib/bulk"); -const makedir = Async.fromPromise(mkdirp) +const makedir = Async.fromPromise(mkdirp); // const rmdir = Async.fromNode(fs.rmdir) -const rmrf = Async.fromNode(rimraf) +const rmrf = Async.fromNode(rimraf); // add plugins -pouchdb.plugin(pouchdbFind) -pouchdb.plugin(memory) +pouchdb.plugin(pouchdbFind); +pouchdb.plugin(memory); /** * @typedef {Object} DataObject @@ -35,7 +49,12 @@ pouchdb.plugin(memory) * */ -const getDbNames = compose(map(prop('name')), filter(propEq('type', 'db')), pluck('doc'), prop('rows')) +const getDbNames = compose( + map(prop("name")), + filter(propEq("type", "db")), + pluck("doc"), + prop("rows"), +); /** * @param {string} root - databases location @@ -43,101 +62,124 @@ const getDbNames = compose(map(prop('name')), filter(propEq('type', 'db')), pluc module.exports = function (root) { // if no root then set pouchdb engine to be memory // if (!root) { throw new Error('root storage location required!') } - if (root) { makedir(path.resolve(root)) } + if (root) makedir(path.resolve(root)); // create master db to hold docs to databases - const sys = !root ? pouchdb('_system', { adapter: 'memory' }) : pouchdb(`${root}/_system`) - const databases = new Map() + const sys = !root + ? pouchdb("_system", { adapter: "memory" }) + : pouchdb(`${root}/_system`); + const databases = new Map(); sys.allDocs({ include_docs: true }) .then(getDbNames) // load databases .then( - map(n => databases.set(n, pouchdb(`${root}/${n}`))) + map((n) => databases.set(n, pouchdb(`${root}/${n}`))), ) - .catch(() => console.log('ERROR: Could not get databases!')) + .catch(() => console.log("ERROR: Could not get databases!")); /** * @param {string} name * @returns {Promise} */ - function createDatabase (name) { - if (!name) { return Promise.reject({ ok: false, msg: 'name is required!' }) } - return Async.of(root - ? pouchdb(path.resolve(`${root}/${name}`)) - : pouchdb(name, { adapter: 'memory' }) + function createDatabase(name) { + if (!name) return Promise.reject({ ok: false, msg: "name is required!" }); + return Async.of( + root + ? pouchdb(path.resolve(`${root}/${name}`)) + : pouchdb(name, { adapter: "memory" }), ) // add to system database - .chain(db => - // want to capture Reject and return Resolve if error is 409 + .chain((db) => + // want to capture Reject and return Resolve if error is 409 bichain( allow409, Async.Resolved, - Async.fromPromise(sys.put.bind(sys))({ _id: name, type: 'db', name: name }) + Async.fromPromise(sys.put.bind(sys))({ + _id: name, + type: "db", + name: name, + }), ) .map(() => db) ) // set in Map - .map(db => { - databases.set(name, db) - return { ok: true } + .map((db) => { + databases.set(name, db); + return { ok: true }; }) - .toPromise() + .toPromise(); } /** * @param {string} name * @returns {Promise} */ - function removeDatabase (name) { - if (!name) { return Promise.reject({ ok: false, msg: 'name is required!' }) } - databases.delete(name) + function removeDatabase(name) { + if (!name) return Promise.reject({ ok: false, msg: "name is required!" }); + databases.delete(name); return rmrf( path.resolve(`${root}/${name}*`), - { recursive: true } + { recursive: true }, ) .chain(() => { - const get = Async.fromPromise(sys.get) - const remove = Async.fromPromise(sys.remove) - return get(name).chain(remove) + const get = Async.fromPromise(sys.get); + const remove = Async.fromPromise(sys.remove); + return get(name).chain(remove); }) .map(() => ({ ok: true })) - .toPromise() + .toPromise(); } /** * @param {DataObject} * @returns {Promise} */ - function createDocument ({ db, id, doc }) { - if (!db) { return Promise.reject({ ok: false, msg: 'dbname is required!' }) } - if (!id) { return Promise.reject({ ok: false, msg: 'unique identifier is required!' }) } - if (!doc) { return Promise.reject({ ok: false, msg: 'data document is required!' }) } + function createDocument({ db, id, doc }) { + if (!db) return Promise.reject({ ok: false, msg: "dbname is required!" }); + if (!id) { + return Promise.reject({ + ok: false, + msg: "unique identifier is required!", + }); + } + if (!doc) { + return Promise.reject({ ok: false, msg: "data document is required!" }); + } - const pouch = databases.get(db) - if (!pouch) { return Promise.reject({ ok: false, msg: 'database not initalized!' }) } + const pouch = databases.get(db); + if (!pouch) { + return Promise.reject({ ok: false, msg: "database not initalized!" }); + } return pouch.put({ _id: id, - ...doc - }).then(omit(['rev'])) + ...doc, + }).then(omit(["rev"])); } /** * @param {DataInfo} * @returns {Promise} */ - function retrieveDocument ({ db, id }) { - if (!db) { return Promise.reject({ ok: false, msg: 'dbname is required!' }) } - if (!id) { return Promise.reject({ ok: false, msg: 'unique identifier is required!' }) } + function retrieveDocument({ db, id }) { + if (!db) return Promise.reject({ ok: false, msg: "dbname is required!" }); + if (!id) { + return Promise.reject({ + ok: false, + msg: "unique identifier is required!", + }); + } - const pouch = databases.get(db) - if (!pouch) { return Promise.reject({ ok: false, msg: 'database not initalized!' }) } + const pouch = databases.get(db); + if (!pouch) { + return Promise.reject({ ok: false, msg: "database not initalized!" }); + } return pouch.get(id) .then(compose( - omit(['_id', '_rev']), - assoc('id', id) - )) + omit(["_id", "_rev"]), + assoc("id", id), + )); // .then(doc => ({ok: true, doc})) } @@ -145,33 +187,49 @@ module.exports = function (root) { * @param {DataObject} * @returns {Promise} */ - function updateDocument ({ db, id, doc }) { - if (!db) { return Promise.reject({ ok: false, msg: 'dbname is required!' }) } - if (!id) { return Promise.reject({ ok: false, msg: 'unique identifier is required!' }) } - if (!doc) { return Promise.reject({ ok: false, msg: 'data document is required!' }) } + function updateDocument({ db, id, doc }) { + if (!db) return Promise.reject({ ok: false, msg: "dbname is required!" }); + if (!id) { + return Promise.reject({ + ok: false, + msg: "unique identifier is required!", + }); + } + if (!doc) { + return Promise.reject({ ok: false, msg: "data document is required!" }); + } - const pouch = databases.get(db) - if (!pouch) { return Promise.reject({ ok: false, msg: 'database not initalized!' }) } + const pouch = databases.get(db); + if (!pouch) { + return Promise.reject({ ok: false, msg: "database not initalized!" }); + } return pouch.get(id) - .then(pick(['_id', '_rev'])) + .then(pick(["_id", "_rev"])) .then(merge(doc)) .then(pouch.put.bind(pouch)) - .then(omit(['rev'])) + .then(omit(["rev"])); } /** * @param {DataInfo} * @returns {Promise} */ - function removeDocument ({ db, id }) { - if (!db) { return Promise.reject({ ok: false, msg: 'dbname is required!' }) } - if (!id) { return Promise.reject({ ok: false, msg: 'unique identifier is required!' }) } + function removeDocument({ db, id }) { + if (!db) return Promise.reject({ ok: false, msg: "dbname is required!" }); + if (!id) { + return Promise.reject({ + ok: false, + msg: "unique identifier is required!", + }); + } - const pouch = databases.get(db) - if (!pouch) { return Promise.reject({ ok: false, msg: 'database not initalized!' }) } + const pouch = databases.get(db); + if (!pouch) { + return Promise.reject({ ok: false, msg: "database not initalized!" }); + } return pouch.get(id) - .then(pouch.remove.bind(pouch)) + .then(pouch.remove.bind(pouch)); } /** @@ -194,13 +252,15 @@ module.exports = function (root) { * @param {DataQuery} * @returns {Promise} */ - function queryDocuments ({ db, query }) { - if (!db) { return Promise.reject({ ok: false, msg: 'dbname is required!' }) } - if (!query) { return Promise.reject({ ok: false, msg: 'query is required!' }) } - const xId = lens(prop('_id'), assoc('id')) + function queryDocuments({ db, query }) { + if (!db) return Promise.reject({ ok: false, msg: "dbname is required!" }); + if (!query) return Promise.reject({ ok: false, msg: "query is required!" }); + const xId = lens(prop("_id"), assoc("id")); - const pouch = databases.get(db) - if (!pouch) { return Promise.reject({ ok: false, msg: 'database not initalized!' }) } + const pouch = databases.get(db); + if (!pouch) { + return Promise.reject({ ok: false, msg: "database not initalized!" }); + } return pouch.find(query) .then(({ docs }) => { @@ -208,13 +268,13 @@ module.exports = function (root) { ok: true, docs: map( compose( - omit(['_id', '_rev']), - over(xId, identity) + omit(["_id", "_rev"]), + over(xId, identity), ), - docs - ) - }) - }) + docs, + ), + }); + }); } /** @@ -228,15 +288,24 @@ module.exports = function (root) { * @param {IndexInfo} * @returns {Promise} */ - function indexDocuments ({ db, name, fields }) { - if (!db) { return Promise.reject({ ok: false, msg: 'dbname is required!' }) } - if (!name) { return Promise.reject({ ok: false, msg: 'index name is required!' }) } - if (!fields) { return Promise.reject({ ok: false, msg: 'fields for index is required!' }) } + function indexDocuments({ db, name, fields }) { + if (!db) return Promise.reject({ ok: false, msg: "dbname is required!" }); + if (!name) { + return Promise.reject({ ok: false, msg: "index name is required!" }); + } + if (!fields) { + return Promise.reject({ + ok: false, + msg: "fields for index is required!", + }); + } - const pouch = databases.get(db) - if (!pouch) { return Promise.reject({ ok: false, msg: 'database not initalized!' }) } + const pouch = databases.get(db); + if (!pouch) { + return Promise.reject({ ok: false, msg: "database not initalized!" }); + } return pouch.createIndex({ index: { fields }, ddoc: name }) - .then(result => ({ ok: true, msg: result.result })) + .then((result) => ({ ok: true, msg: result.result })); } /** @@ -252,27 +321,30 @@ module.exports = function (root) { * @param {DataList} * @returns {Promise} */ - function listDocuments ({ db, limit, startkey, endkey, keys, descending }) { - const pouch = databases.get(db) - let options = { include_docs: true } - const xid = lens(prop('_id'), assoc('id')) + function listDocuments({ db, limit, startkey, endkey, keys, descending }) { + const pouch = databases.get(db); + // deno-lint-ignore camelcase + let options = { include_docs: true }; + const xid = lens(prop("_id"), assoc("id")); - options = limit ? merge({ limit }, options) : options - options = startkey ? merge({ startkey }, options) : options - options = endkey ? merge({ endkey }, options) : options - options = keys ? merge({ keys }, options) : options - options = descending ? merge({ descending }, options) : options - console.log({ options }) - return pouch.allDocs(options).then(results => { + options = limit ? merge({ limit }, options) : options; + options = startkey ? merge({ startkey }, options) : options; + options = endkey ? merge({ endkey }, options) : options; + options = keys ? merge({ keys }, options) : options; + options = descending ? merge({ descending }, options) : options; + console.log({ options }); + return pouch.allDocs(options).then((results) => { return ({ ok: true, docs: map( compose( - omit(['_rev', '_id']), - over(xid, identity) - ), pluck('doc', results.rows)) - }) - }) + omit(["_rev", "_id"]), + over(xid, identity), + ), + pluck("doc", results.rows), + ), + }); + }); } return Object.freeze({ @@ -285,6 +357,6 @@ module.exports = function (root) { queryDocuments, indexDocuments, listDocuments, - bulkDocuments: bulk(databases) - }) -} + bulkDocuments: bulk(databases), + }); +}; diff --git a/packages/adapter-pouchdb/adapter_test.js b/packages/adapter-pouchdb/adapter_test.js index 69097164..ccefa033 100644 --- a/packages/adapter-pouchdb/adapter_test.js +++ b/packages/adapter-pouchdb/adapter_test.js @@ -1,187 +1,188 @@ -const test = require('tape') -const createAdapter = require('./adapter') -const { v4 } = require('uuid') -const faker = require('faker') -const { times } = require('ramda') +const test = require("tape"); +const createAdapter = require("./adapter"); +const { v4 } = require("uuid"); +const faker = require("faker"); +const { times } = require("ramda"); -test('pouchdb add bulk docs non objects', async t => { - const adapter = createAdapter('/tmp') - const dbName = v4() - await adapter.createDatabase(dbName) +test("pouchdb add bulk docs non objects", async (t) => { + const adapter = createAdapter("/tmp"); + const dbName = v4(); + await adapter.createDatabase(dbName); const result = await adapter.bulkDocuments({ db: dbName, - docs: [1, 2, 3] - }).catch(e => e) - t.notOk(result.ok) - t.equal(result.msg, 'documents must be objects') - const r = await adapter.listDocuments({ db: dbName }) - console.log(r) - t.end() -}) - -test('pouchdb add bulk docs db not found', async t => { - const adapter = createAdapter('/tmp') + docs: [1, 2, 3], + }).catch((e) => e); + t.notOk(result.ok); + t.equal(result.msg, "documents must be objects"); + const r = await adapter.listDocuments({ db: dbName }); + console.log(r); + t.end(); +}); + +test("pouchdb add bulk docs db not found", async (t) => { + const adapter = createAdapter("/tmp"); const result = await adapter.bulkDocuments({ - db: 'foo', + db: "foo", docs: [ - { id: '1', type: 'movie', title: 'Ghostbusters' }, - { id: '2', type: 'movie', title: 'Groundhog Day' }, - { id: '3', _deleted: true } - ] - }).catch(e => e) - t.notOk(result.ok) - - t.equal(result.msg, 'db not found') - - t.end() -}) -test('pouchdb add bulk docs no db', async t => { - const adapter = createAdapter('/tmp') + { id: "1", type: "movie", title: "Ghostbusters" }, + { id: "2", type: "movie", title: "Groundhog Day" }, + { id: "3", _deleted: true }, + ], + }).catch((e) => e); + t.notOk(result.ok); + + t.equal(result.msg, "db not found"); + + t.end(); +}); +test("pouchdb add bulk docs no db", async (t) => { + const adapter = createAdapter("/tmp"); const result = await adapter.bulkDocuments({ db: null, docs: [ - { id: '1', type: 'movie', title: 'Ghostbusters' }, - { id: '2', type: 'movie', title: 'Groundhog Day' }, - { id: '3', _deleted: true } - ] - }).catch(e => e) - t.notOk(result.ok) - - t.equal(result.msg, 'db not defined') - - t.end() -}) - -test('pouchdb add bulk docs', async t => { - const adapter = createAdapter('/tmp') - const dbName = v4() - await adapter.createDatabase(dbName) + { id: "1", type: "movie", title: "Ghostbusters" }, + { id: "2", type: "movie", title: "Groundhog Day" }, + { id: "3", _deleted: true }, + ], + }).catch((e) => e); + t.notOk(result.ok); + + t.equal(result.msg, "db not defined"); + + t.end(); +}); + +test("pouchdb add bulk docs", async (t) => { + const adapter = createAdapter("/tmp"); + const dbName = v4(); + await adapter.createDatabase(dbName); await adapter.createDocument({ db: dbName, - id: '2', - doc: { hello: 'world' } - }) + id: "2", + doc: { hello: "world" }, + }); await adapter.createDocument({ db: dbName, - id: '3', - doc: { hello: 'world' } - }) + id: "3", + doc: { hello: "world" }, + }); const result = await adapter.bulkDocuments({ db: dbName, docs: [ - { id: '1', type: 'movie', title: 'Ghostbusters' }, - { id: '2', type: 'movie', title: 'Groundhog Day' }, - { id: '3', _deleted: true } - ] - }) - t.ok(result.ok) - t.equal(result.results.length, 3) + { id: "1", type: "movie", title: "Ghostbusters" }, + { id: "2", type: "movie", title: "Groundhog Day" }, + { id: "3", _deleted: true }, + ], + }); + t.ok(result.ok); + t.equal(result.results.length, 3); await adapter.removeDocument({ db: dbName, - id: '2' - }) - - t.end() -}) - -test('pouchdb create same db', async t => { - const adapter = createAdapter('/tmp') - const dbName = v4() - await adapter.createDatabase(dbName) - const result = await adapter.createDatabase(dbName).catch(e => e) - console.log(result) - t.ok(true) - t.end() -}) -test('pouchdb find', async t => { - const adapter = createAdapter('/tmp') - const dbName = v4() - await adapter.createDatabase(dbName) + id: "2", + }); + + t.end(); +}); + +test("pouchdb create same db", async (t) => { + const adapter = createAdapter("/tmp"); + const dbName = v4(); + await adapter.createDatabase(dbName); + const result = await adapter.createDatabase(dbName).catch((e) => e); + console.log(result); + t.ok(true); + t.end(); +}); +test("pouchdb find", async (t) => { + const adapter = createAdapter("/tmp"); + const dbName = v4(); + await adapter.createDatabase(dbName); await adapter.createDocument({ db: dbName, id: v4(), doc: { - username: 'twilson63', - name: 'Tom Wilson' - } - }) + username: "twilson63", + name: "Tom Wilson", + }, + }); await Promise.all( - times(() => adapter.createDocument({ - db: dbName, - id: v4(), - doc: faker.helpers.createCard() - }), 10) - ) + times(() => + adapter.createDocument({ + db: dbName, + id: v4(), + doc: faker.helpers.createCard(), + }), 10), + ); const results = await adapter.listDocuments({ db: dbName, - limit: 5 - }) - t.equal(results.docs.length, 5) + limit: 5, + }); + t.equal(results.docs.length, 5); await adapter.indexDocuments({ db: dbName, - name: 'username', - fields: ['username'] - }) + name: "username", + fields: ["username"], + }); const searchResults = await adapter.queryDocuments({ db: dbName, query: { selector: { - username: 'twilson63' + username: "twilson63", }, - use_index: 'username' - } - }) + use_index: "username", + }, + }); - await adapter.removeDatabase(dbName) - t.ok(searchResults.ok) - t.end() -}) + await adapter.removeDatabase(dbName); + t.ok(searchResults.ok); + t.end(); +}); -test('pouchdb adapter tests', async t => { - t.plan(5) - const adapter = createAdapter('/tmp') - const dbName = v4() +test("pouchdb adapter tests", async (t) => { + t.plan(5); + const adapter = createAdapter("/tmp"); + const dbName = v4(); - await adapter.createDatabase(dbName) + await adapter.createDatabase(dbName); const result = await adapter.createDocument({ db: dbName, - id: '1234', - doc: { hello: 'world' } - }) + id: "1234", + doc: { hello: "world" }, + }); - t.ok(result.ok, 'create doc success') + t.ok(result.ok, "create doc success"); const doc = await adapter.retrieveDocument({ db: dbName, - id: '1234' - }) + id: "1234", + }); - t.deepEqual(doc, { hello: 'world', id: '1234' }, 'verify get doc') + t.deepEqual(doc, { hello: "world", id: "1234" }, "verify get doc"); const updateResult = await adapter.updateDocument({ db: dbName, - id: '1234', - doc: { foo: 'bar' } - }) - t.ok(updateResult.ok, 'update doc success') + id: "1234", + doc: { foo: "bar" }, + }); + t.ok(updateResult.ok, "update doc success"); const newDoc = await adapter.retrieveDocument({ db: dbName, - id: '1234' - }) - t.deepEqual(newDoc, { foo: 'bar', id: '1234' }, 'verify updated doc') + id: "1234", + }); + t.deepEqual(newDoc, { foo: "bar", id: "1234" }, "verify updated doc"); const deleteResult = await adapter.removeDocument({ db: dbName, - id: '1234' - }) + id: "1234", + }); - t.ok(deleteResult.ok, 'delete document') - await adapter.removeDatabase(dbName) -}) + t.ok(deleteResult.ok, "delete document"); + await adapter.removeDatabase(dbName); +}); diff --git a/packages/adapter-pouchdb/handle409.js b/packages/adapter-pouchdb/handle409.js index 3dd00ea4..01f9adcf 100644 --- a/packages/adapter-pouchdb/handle409.js +++ b/packages/adapter-pouchdb/handle409.js @@ -1,12 +1,12 @@ -const equals = require('crocks/pointfree/equals') -const maybeToAsync = require('crocks/Async/maybeToAsync') -const propSatisfies = require('crocks/predicates/propSatisfies') -const safe = require('crocks/Maybe/safe') -const substitution = require('crocks/combinators/substitution') +const equals = require("crocks/pointfree/equals"); +const maybeToAsync = require("crocks/Async/maybeToAsync"); +const propSatisfies = require("crocks/predicates/propSatisfies"); +const safe = require("crocks/Maybe/safe"); +const substitution = require("crocks/combinators/substitution"); const allow409 = substitution( maybeToAsync, - safe(propSatisfies('status', equals(409))) -) + safe(propSatisfies("status", equals(409))), +); -module.exports = allow409 +module.exports = allow409; diff --git a/packages/adapter-pouchdb/index.js b/packages/adapter-pouchdb/index.js index 6564739d..c2f10162 100644 --- a/packages/adapter-pouchdb/index.js +++ b/packages/adapter-pouchdb/index.js @@ -1,5 +1,5 @@ -const { merge } = require('ramda') -const adapter = require('./adapter') +const { merge } = require("ramda"); +const adapter = require("./adapter"); /** * hyper63 data plugin for the data port. This plugin is an adapter that @@ -29,11 +29,11 @@ const adapter = require('./adapter') * data section */ -module.exports = function pouchdb (config) { +module.exports = function pouchdb(config) { return ({ - id: 'pouchdb', - port: 'data', + id: "pouchdb", + port: "data", load: merge(config), - link: ({ dir }) => () => adapter(dir) - }) -} + link: ({ dir }) => () => adapter(dir), + }); +}; diff --git a/packages/adapter-pouchdb/lib/bulk.js b/packages/adapter-pouchdb/lib/bulk.js index 776c74aa..50ec464e 100644 --- a/packages/adapter-pouchdb/lib/bulk.js +++ b/packages/adapter-pouchdb/lib/bulk.js @@ -1,5 +1,21 @@ -const { assoc, compose, find, filter, identity, has, is, isNil, lens, map, omit, over, path, pluck, prop } = require('ramda') -const { Async } = require('crocks') +const { + assoc, + compose, + find, + filter, + identity, + has, + is, + isNil, + lens, + map, + omit, + over, + path, + pluck, + prop, +} = require("ramda"); +const { Async } = require("crocks"); /** * @@ -8,69 +24,71 @@ const { Async } = require('crocks') * @property {Array} docs * */ -const lensId = lens(prop('id'), assoc('_id')) -const lensRev = lens(path(['value', 'rev']), assoc('rev')) +const lensId = lens(prop("id"), assoc("_id")); +const lensRev = lens(path(["value", "rev"]), assoc("rev")); const xRevs = map( compose( - omit(['key', 'value']), - over(lensRev, identity) - ) -) + omit(["key", "value"]), + over(lensRev, identity), + ), +); const switchIds = map( compose( - omit(['id']), - over(lensId, identity) - ) -) + omit(["id"]), + over(lensId, identity), + ), +); -const pluckIds = pluck('id') -const getDocsThatExist = pouch => ids => - Async.fromPromise(pouch.allDocs.bind(pouch))({ - keys: ids - }) - .map(prop('rows')) - .map(filter(has('value'))) - .map(xRevs) +const pluckIds = pluck("id"); +const getDocsThatExist = (pouch) => + (ids) => + Async.fromPromise(pouch.allDocs.bind(pouch))({ + keys: ids, + }) + .map(prop("rows")) + .map(filter(has("value"))) + .map(xRevs); -const mergeWithRevs = docs => revs => - map(doc => { - const rev = find(rev => doc.id === rev.id, revs) - return rev ? { _rev: rev.rev, ...doc } : doc - }, docs) +const mergeWithRevs = (docs) => + (revs) => + map((doc) => { + const rev = find((rev) => doc.id === rev.id, revs); + return rev ? { _rev: rev.rev, ...doc } : doc; + }, docs); -const applyBulkDocs = pouch => - Async.fromPromise(pouch.bulkDocs.bind(pouch)) +const applyBulkDocs = (pouch) => Async.fromPromise(pouch.bulkDocs.bind(pouch)); /** * @param {BulkInput} * @returns {Promise} */ // NEED to handle bulk PUTS which require revs -const bulkDocuments = databases => ({ db, docs }) => { - if (isNil(db)) { - return Promise.reject({ ok: false, msg: 'db not defined' }) - } - const pouch = databases.get(db) +const bulkDocuments = (databases) => + ({ db, docs }) => { + if (isNil(db)) { + return Promise.reject({ ok: false, msg: "db not defined" }); + } + const pouch = databases.get(db); - if (isNil(pouch)) { - return Promise.reject({ ok: false, msg: 'db not found' }) - } + if (isNil(pouch)) { + return Promise.reject({ ok: false, msg: "db not found" }); + } - if (docs && !is(Object, docs[0])) { - return Promise.reject({ ok: false, msg: 'documents must be objects' }) - } - return Async.of(docs) - // validate that the docs have an id - // maybe reject if they don't? - .map(pluckIds) - .chain(getDocsThatExist(pouch)) - .map(mergeWithRevs(docs)) - .map(switchIds) - .chain(applyBulkDocs(pouch)) - .map(map(omit(['rev']))) - .map(docResults => ({ ok: true, results: docResults })) - .toPromise() -} + if (docs && !is(Object, docs[0])) { + return Promise.reject({ ok: false, msg: "documents must be objects" }); + } + return Async.of(docs) + // validate that the docs have an id + // maybe reject if they don't? + .map(pluckIds) + .chain(getDocsThatExist(pouch)) + .map(mergeWithRevs(docs)) + .map(switchIds) + .chain(applyBulkDocs(pouch)) + .map(map(omit(["rev"]))) + .map((docResults) => ({ ok: true, results: docResults })) + .toPromise(); + }; -module.exports = bulkDocuments +module.exports = bulkDocuments; diff --git a/packages/adapter-redis/adapter.js b/packages/adapter-redis/adapter.js index a2bab1be..f07f16a3 100644 --- a/packages/adapter-redis/adapter.js +++ b/packages/adapter-redis/adapter.js @@ -1,21 +1,28 @@ -const { Async } = require('crocks') -const { always, append, compose, identity, ifElse, isNil, map, not, remove } = require('ramda') +import { crocks, R } from "./deps.js"; -const createKey = (store, key) => `${store}_${key}` +const { Async } = crocks; +const { always, append, identity, ifElse, isNil, map, not, remove } = R; -module.exports = function (client) { - let stores = [] +const createKey = (store, key) => `${store}_${key}`; + +export default function (client) { + let stores = []; // redis commands - const get = Async.fromNode(client.get.bind(client)) - const set = Async.fromNode(client.set.bind(client)) - const del = Async.fromNode(client.del.bind(client)) - const keys = Async.fromNode(client.keys.bind(client)) - const scan = Async.fromNode(client.scan.bind(client)) + // key: Promise + const get = Async.fromPromise(client.get.bind(client)); + // key, value, { px, ex }: Promise + const set = Async.fromPromise(client.set.bind(client)); + // key, key, key: Promise + const del = Async.fromPromise(client.del.bind(client)); + // key: Promise + const keys = Async.fromPromise(client.keys.bind(client)); + // cursor, { type, pattern }: Promise<[string, string[]]> + const scan = Async.fromPromise(client.scan.bind(client)); const index = () => { - console.log('stores', stores) - return Promise.resolve(stores) - } + console.log("stores", stores); + return Promise.resolve(stores); + }; /** * @param {string} name @@ -23,36 +30,36 @@ module.exports = function (client) { */ const createStore = (name) => Async.of([]) - .map(append(createKey('store', name))) - .map(append('active')) - .chain(set) - .map(v => { - stores = append(name, stores) - return v + .map(append(createKey("store", name))) + .map(append("active")) + .chain((args) => set(...args)) + .map((v) => { + stores = append(name, stores); + return v; }) .map(always({ ok: true })) - .toPromise() + .toPromise(); /** * @param {string} name * @returns {Promise} */ const destroyStore = (name) => - del(createKey('store', name)) - .chain(() => keys(name + '_*')) + del(createKey("store", name)) + .chain(() => keys(name + "_*")) .chain( ifElse( (keys) => keys.length > 0, - del, - (keys) => Async.of(keys) - ) + (args) => del(...args), + (keys) => Async.of(keys), + ), ) - .map(v => { - stores = remove([name], stores) - return v + .map((v) => { + stores = remove([name], stores); + return v; }) .map(always({ ok: true })) - .toPromise() + .toPromise(); /** * @param {CacheDoc} @@ -65,16 +72,16 @@ module.exports = function (client) { .map( ifElse( () => not(isNil(ttl)), - compose(append(ttl), append('PX')), - identity - ) + append(({ px: ttl })), + identity, + ), ) - .chain(set) + .chain((args) => set(...args)) .map(() => ({ ok: true, - doc: value + doc: value, })) - .toPromise() + .toPromise(); /** * @param {CacheDoc} @@ -83,11 +90,15 @@ module.exports = function (client) { const getDoc = ({ store, key }) => get(createKey(store, key)).chain((v) => { if (!v) { - return Async.Rejected({ ok: false, status: 404, msg: 'document not found' }) + return Async.Rejected({ + ok: false, + status: 404, + msg: "document not found", + }); } - return Async.Resolved(JSON.parse(v)) + return Async.Resolved(JSON.parse(v)); }) - .toPromise() + .toPromise(); /** * @param {CacheDoc} @@ -100,35 +111,35 @@ module.exports = function (client) { .map( ifElse( () => not(isNil(ttl)), - compose(append(ttl), append('PX')), - identity - ) + append({ px: ttl }), + identity, + ), ) .chain((args) => set(...args)) .map(() => ({ - ok: true + ok: true, })) - .toPromise() + .toPromise(); /** * @param {CacheDoc} * @returns {Promise} */ const deleteDoc = ({ store, key }) => - del(createKey(store, key)).map(always({ ok: true })).toPromise() + del(createKey(store, key)).map(always({ ok: true })).toPromise(); /** * @param {CacheQuery} * @returns {Promise} */ - const listDocs = async ({ store, pattern = '*' }) => { - const matcher = `${store}_${pattern}` - return scan(0, 'MATCH', matcher) + const listDocs = async ({ store, pattern = "*" }) => { + const matcher = `${store}_${pattern}`; + return await scan(0, { pattern: matcher }) .chain(getKeys(scan, matcher)) .chain(getValues(get, store)) .map(formatResponse) - .toPromise() - } + .toPromise(); + }; return Object.freeze({ index, @@ -138,34 +149,32 @@ module.exports = function (client) { getDoc, updateDoc, deleteDoc, - listDocs - }) + listDocs, + }); } -function formatResponse (docs) { - return { ok: true, docs } +function formatResponse(docs) { + return { ok: true, docs }; } -function getKeys (scan, matcher) { - return function repeat ([cursor, keys]) { - return cursor === '0' +function getKeys(scan, matcher) { + return function repeat([cursor, keys]) { + return cursor === "0" ? Async.Resolved(keys) - : scan(cursor, 'MATCH', matcher) + : scan(cursor, { pattern: matcher }) .chain(repeat) - .map(v => keys.concat(v)) - } + .map((v) => keys.concat(v)); + }; } -function getValues (get, store) { +function getValues(get, store) { return function (keys) { return Async.all( - map(key => + map((key) => get(key).map((v) => ({ - key: key.replace(`${store}_`, ''), - value: JSON.parse(v) - }) - ) - , keys) - ) - } + key: key.replace(`${store}_`, ""), + value: JSON.parse(v), + })), keys), + ); + }; } diff --git a/packages/adapter-redis/adapter_test.js b/packages/adapter-redis/adapter_test.js index 12efbe5f..ac26f32a 100644 --- a/packages/adapter-redis/adapter_test.js +++ b/packages/adapter-redis/adapter_test.js @@ -1,93 +1,159 @@ -const test = require('tape') -const redis = require('redis-mock') -// const redis = require('redis') -const createAdapter = require('./adapter') +import { + assert, + assertEquals, + assertObjectMatch, + resolves, + spy, +} from "./dev_deps.js"; -const client = redis.createClient() -const adapter = createAdapter(client) +import createAdapter from "./adapter.js"; -test('test scan', async t => { - let result = await adapter.createStore('word') +const baseStubClient = { + get: resolves(), + set: resolves(), + del: resolves(), + keys: resolves(), + scan: resolves(), +}; + +Deno.test("test scan", async () => { + let results = []; for (let i = 0; i < 100; i++) { - result = await adapter.createDoc({ - store: 'word', - key: 'bar' + i, - value: { bam: 'baz' } - }) + results.push(`key${i}`); } - result = await adapter.listDocs({ - store: 'word', - pattern: '*' - }) - t.ok(result.docs.length === 100) - t.end() -}) - -test('create redis store', async t => { - const result = await adapter.createStore('foo') - t.ok(result.ok) - t.end() -}) - -test('remove redis store', async t => { - const result = await adapter.destroyStore('foo') - t.ok(result.ok) - t.end() -}) - -test('create redis doc', async t => { + const adapter = createAdapter({ + ...baseStubClient, + get: resolves(JSON.stringify({ bam: "baz" })), + scan: resolves(["0", results]), + }); + + results = await adapter.listDocs({ + store: "word", + pattern: "*", + }); + + assert(results.docs.length === 100); +}); + +Deno.test("create redis store", async () => { + const adapter = createAdapter(baseStubClient); + + const result = await adapter.createStore("foo"); + assert(result.ok); +}); + +Deno.test("remove redis store - no keys", async () => { + const adapter = createAdapter({ + ...baseStubClient, + keys: resolves([]), + }); + + const result = await adapter.destroyStore("foo"); + assert(result.ok); +}); + +Deno.test("remove redis store - keys", async () => { + const del = spy(() => Promise.resolve(2)); + const adapter = createAdapter({ + ...baseStubClient, + del, + keys: resolves(["baz", "bar"]), + }); + + const result = await adapter.destroyStore("foo"); + + assert(result.ok); + assertObjectMatch(del.calls[0], { args: ["store_foo"] }); + assertObjectMatch(del.calls[1], { args: ["baz", "bar"] }); +}); + +Deno.test("create redis doc", async () => { + const adapter = createAdapter(baseStubClient); + const result = await adapter.createDoc({ - store: 'foo', - key: 'bar', - value: { bam: 'baz' } - }) - t.ok(result.ok) - t.end() -}) - -test('get redis doc', async t => { + store: "foo", + key: "bar", + value: { bam: "baz" }, + ttl: 5000, + }); + + assert(result.ok); + assertEquals(result.doc, { bam: "baz" }); +}); + +Deno.test("get redis doc", async () => { + const value = { bam: "baz" }; + const adapter = createAdapter({ + ...baseStubClient, + get: resolves(JSON.stringify(value)), + }); + const result = await adapter.getDoc({ - store: 'foo', - key: 'bar' - }) - t.deepEqual(result, { bam: 'baz' }) - t.end() -}) - -test('update redis doc', async t => { + store: "foo", + key: "bar", + }); + + assertObjectMatch(result, value); +}); + +Deno.test("get redis doc - not found", async () => { + const adapter = createAdapter({ + ...baseStubClient, + get: resolves(undefined), + }); + + // Wanted to use assertThrowsAsync, but it requires throwing an Error type + try { + await adapter.getDoc({ + store: "foo", + key: "bar", + }); + + assert(false); + } catch (err) { + assertObjectMatch(err, { ok: false, status: 404 }); + } +}); + +Deno.test("update redis doc", async () => { + const adapter = createAdapter(baseStubClient); + const result = await adapter.updateDoc({ - store: 'foo', - key: 'bar', - value: { hello: 'world' } - }) - t.ok(result.ok) - t.end() -}) - -test('delete redis doc', async t => { + store: "foo", + key: "bar", + value: { hello: "world" }, + }); + + assert(result.ok); +}); + +Deno.test("delete redis doc", async () => { + const adapter = createAdapter(baseStubClient); + const result = await adapter.deleteDoc({ - store: 'foo', - key: 'bar' - }) - t.ok(result.ok) - t.end() -}) - -test('list redis docs', async t => { - const doc = { hello: 'world' } - await adapter.createDoc({ store: 'foo', key: 'beep', value: doc }) + store: "foo", + key: "bar", + }); + + assert(result.ok); +}); + +Deno.test("list redis docs", async () => { + const doc = { bam: "baz" }; + + const adapter = createAdapter({ + ...baseStubClient, + get: resolves(JSON.stringify(doc)), + scan: resolves(["0", ["key"]]), + }); + const result = await adapter.listDocs({ - store: 'foo', - pattern: '*' - }) - await adapter.deleteDoc({ - store: 'foo', - key: 'beep' - }) - - t.ok(result.ok) - t.equal(result.docs.length, 1) - t.deepEqual(result.docs[0].value, doc) - t.end() -}) + store: "foo", + pattern: "*", + }); + + assert(result.ok); + assertEquals(result.docs.length, 1); + assertObjectMatch(result.docs[0].value, doc); +}); diff --git a/packages/adapter-redis/deps.js b/packages/adapter-redis/deps.js new file mode 100644 index 00000000..19c9735b --- /dev/null +++ b/packages/adapter-redis/deps.js @@ -0,0 +1,4 @@ +export * as R from "https://cdn.skypack.dev/ramda@^0.27.1"; +export { default as crocks } from "https://cdn.skypack.dev/crocks@^0.12.4"; +export * as z from "https://cdn.skypack.dev/zod@^3"; +export * as redis from "https://deno.land/x/redis@v0.22.2/mod.ts"; diff --git a/packages/adapter-redis/deps_lock.json b/packages/adapter-redis/deps_lock.json new file mode 100644 index 00000000..71bc8d7a --- /dev/null +++ b/packages/adapter-redis/deps_lock.json @@ -0,0 +1,36 @@ +{ + "https://cdn.skypack.dev/-/crocks@v0.12.4-Mje8nEhNx2rmIpwz3ROp/dist=es2020,mode=imports/optimized/crocks.js": "93d587d18dc5f124f30e5b38de37a6471eb65309c94ef2ffc7a36dc40ab394da", + "https://cdn.skypack.dev/-/ramda@v0.27.1-3ePaNsppsnXYRcaNcaWn/dist=es2020,mode=imports/optimized/ramda.js": "0e51cd76039fc9669f9179d935b261bcbb19ecbb11d49e7e60cfbc14360a85d2", + "https://cdn.skypack.dev/-/zod@v3.1.0-gI7iXzzTM1WELSyd5Wzr/dist=es2020,mode=imports/optimized/zod.js": "2dffcb4bf919f89e183c2ab0c8595822c1454af1676e504ec36bde1ac72b2bc1", + "https://cdn.skypack.dev/crocks@^0.12.4": "d48852ce36c500f2770a2bc240cb6df9ffb2219d184b32b9be542e8560a6ff1d", + "https://cdn.skypack.dev/ramda@^0.27.1": "fb06d7de4305dcdb997f9adc903a463afcdac75d148d638295c420a8922048fd", + "https://cdn.skypack.dev/zod@^3": "db17f3b774c7bdbb70181223fb10cdd480d1761f2e22340bd11cbcda4ccfbafe", + "https://deno.land/std@0.98.0/_util/assert.ts": "2f868145a042a11d5ad0a3c748dcf580add8a0dbc0e876eaa0026303a5488f58", + "https://deno.land/std@0.98.0/async/deferred.ts": "624bef4b755b71394620508a0c234a93cb8020cbd1b04bfcdad41c174392cef6", + "https://deno.land/std@0.98.0/bytes/bytes_list.ts": "a13287edb03f19d27ba4927dec6d6de3e5bd46254cd4aee6f7e5815810122673", + "https://deno.land/std@0.98.0/bytes/mod.ts": "1ae1ccfe98c4b979f12b015982c7444f81fcb921bea7aa215bf37d84f46e1e13", + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/io/buffer.ts": "3ead6bb11276ebcf093c403f74f67fd2205a515dbbb9061862c468ca56f37cd8", + "https://deno.land/std@0.98.0/io/bufio.ts": "82fe6a499cacf4604844472ccf328cb0a1c0571c0f83b5ee67e475018342b4ae", + "https://deno.land/std@0.98.0/io/types.d.ts": "89a27569399d380246ca7cdd9e14d5e68459f11fb6110790cc5ecbd4ee7f3215", + "https://deno.land/std@0.98.0/io/util.ts": "318be78b7954da25f0faffe123fef0d9423ea61af98467e860c06b60265eff6d", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c", + "https://deno.land/x/redis@v0.22.2/command.ts": "b1a9176e58da84090939242a4cd08bf97b7b80c2a2bae058bab356f184b123ff", + "https://deno.land/x/redis@v0.22.2/connection.ts": "ff112405ca31a49dd3371a4d8474ee95a855dd441f4192d8c564efe70ed2b3a1", + "https://deno.land/x/redis@v0.22.2/errors.ts": "b4d05747d2002190593a8aa436442a9a62beae0a92351c49aeb1f8220c8401cf", + "https://deno.land/x/redis@v0.22.2/executor.ts": "b56bf48f20d36a3b6ec9abb0deb6e7d63b9ff3a74c123a60ad6848a063075964", + "https://deno.land/x/redis@v0.22.2/mod.ts": "f3134a42e31cd622f7631d4911dfdee3be4dcf7beb0d091558e5943b644caf4b", + "https://deno.land/x/redis@v0.22.2/pipeline.ts": "5fa0c63b4409efb4c730410b8a78ced37be54d9d5cbcb78be15aa290c3f54bf5", + "https://deno.land/x/redis@v0.22.2/protocol/_util.ts": "0525f7f444a96b92cd36423abdfe221f8d8de4a018dc5cb6750a428a5fc897c2", + "https://deno.land/x/redis@v0.22.2/protocol/command.ts": "654e57528b11ed8065b44de99c22a621d762f1c3c09b49f9b71b983f664ad2bf", + "https://deno.land/x/redis@v0.22.2/protocol/mod.ts": "e71efc7e605fc71e2816129e7a40a19a8053dc008adc17b1f56ae6f47e6506c4", + "https://deno.land/x/redis@v0.22.2/protocol/reply.ts": "a7b9413330b78f72cb8186107884cceab4383f302903435df4107947c4f5066f", + "https://deno.land/x/redis@v0.22.2/protocol/types.ts": "30bfe5bd1aad4fb4c877f214ab63de2a1910f2f99dddd2e18d9bf7f0b9d5bfd4", + "https://deno.land/x/redis@v0.22.2/pubsub.ts": "8bad9c41a6935b40cd5ca292179d276cd6687d1700975a18024db6ec61e1bc96", + "https://deno.land/x/redis@v0.22.2/redis.ts": "b2551eef3476ab0f6ba5ae8dc8555260f5dab62b9fdb1f4a4e351ee643b79b2e", + "https://deno.land/x/redis@v0.22.2/stream.ts": "2efe59b7c9f2ac4975a6a1aedac2622f133d0e80c893122973e1336c8771e83f", + "https://deno.land/x/redis@v0.22.2/vendor/https/deno.land/std/async/deferred.ts": "033b0290e640cc0520dcd5378878f5b1bc76e863a1f370b309c1eac38fa7a2c9", + "https://deno.land/x/redis@v0.22.2/vendor/https/deno.land/std/io/buffer.ts": "4e43c31128e1723df7866b511a548a152ee004cbb17f70a987b924a6754559ec", + "https://deno.land/x/redis@v0.22.2/vendor/https/deno.land/std/io/bufio.ts": "0766c407c27861c04d3b53e756b9305bb863cdd12fef260b11e35565dbd6b313" +} diff --git a/packages/adapter-redis/dev_deps.js b/packages/adapter-redis/dev_deps.js new file mode 100644 index 00000000..e6b8e9a3 --- /dev/null +++ b/packages/adapter-redis/dev_deps.js @@ -0,0 +1,6 @@ +export { + assert, + assertEquals, + assertObjectMatch, +} from "https://deno.land/std@0.98.0/testing/asserts.ts"; +export { resolves, spy } from "https://deno.land/x/mock@v0.9.5/mod.ts"; diff --git a/packages/adapter-redis/dev_deps_lock.json b/packages/adapter-redis/dev_deps_lock.json new file mode 100644 index 00000000..39557cec --- /dev/null +++ b/packages/adapter-redis/dev_deps_lock.json @@ -0,0 +1,24 @@ +{ + "https://deno.land/std@0.93.0/async/delay.ts": "9de1d8d07d1927767ab7f82434b883f3d8294fb19cad819691a2ad81a728cf3d", + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c", + "https://deno.land/x/collections@v0.11.0/common.ts": "34e8367e3696c3f872ae417d7c421fa908a5a2125a1c4cb259f7dee9561a7096", + "https://deno.land/x/collections@v0.11.0/comparators.ts": "08563340dbb0051f032bacdcf854bcabd13d607d2e8cb1889826417419df89d0", + "https://deno.land/x/collections@v0.11.0/trees/bs_node.ts": "854d39f6d60cdcb47e1183f0fa67091e6bad59dd2b13252a8b38b1b37269fa67", + "https://deno.land/x/collections@v0.11.0/trees/bs_tree.ts": "694605e10a9f56caf8468c5eea06d60a8d81521dee75d4524a5f790b1ee713b5", + "https://deno.land/x/collections@v0.11.0/trees/rb_node.ts": "e5eecc211933140e0535fc371a4990bd5ac889b00136c96a4a610dbbf78d9ecb", + "https://deno.land/x/collections@v0.11.0/trees/rb_tree.ts": "e1e5f4e26bc255ebf41d72b498a2af903af69f0391276a4a0eac6d46fc43f942", + "https://deno.land/x/collections@v0.11.0/vector.ts": "23cb91087cc89ce9a1e10954336484b537a44bd786e21babc310ae85cb7ad52d", + "https://deno.land/x/mixins@v0.7.2/apply.ts": "dad7095324f5ce23693a0bc0eb3238f230c0ed2160ea8c285f3773ff7c76dcb0", + "https://deno.land/x/mock@v0.9.5/callbacks.ts": "610924901137b6a8ee2f5801d06a48af18da84c9486dd4775ff301de8a8d2b29", + "https://deno.land/x/mock@v0.9.5/deps/std/async/delay.ts": "b0855e5e208bcc08a6f7cb4debfc3ed408a4a3f1bc00ce36235481b94613f018", + "https://deno.land/x/mock@v0.9.5/deps/udibo/collections/comparators.ts": "c780b51a4fcdac4d506bf254d54702fdd46051bde9367fc819bd72cea041ac29", + "https://deno.land/x/mock@v0.9.5/deps/udibo/collections/trees/rb_tree.ts": "24839f7b1f66291d10d6640cb0051143d9b7effbd1e74271c3b564bb31a5b977", + "https://deno.land/x/mock@v0.9.5/deps/udibo/collections/vector.ts": "93285b7af952652e9733795c5677666e978e279ef0b7dae6013e9e017c022b30", + "https://deno.land/x/mock@v0.9.5/deps/udibo/mixins/apply.ts": "d2446714efc056d2c6a373dbf7a1a1db236991cae60b673db6e17d5c4d1bb8b7", + "https://deno.land/x/mock@v0.9.5/mod.ts": "9b44e3cbe9955ecf9f05b06cb9e8a47294bd84f2d33e0b85a8f5b4a0d740e0aa", + "https://deno.land/x/mock@v0.9.5/spy.ts": "c6c66b124f1c0b86f479e66e1daf687ef03aff068ec776ff61a3bf25ff3da5d0", + "https://deno.land/x/mock@v0.9.5/stub.ts": "762cb86de92038e17dfd1af25429036fa7aadecbafdb39fa98edfa01e20951eb", + "https://deno.land/x/mock@v0.9.5/time.ts": "7c6d42af7577c4da461f9581bf171f6e5cb7a22a243ba87ee4ed3c4f1922c2e4" +} diff --git a/packages/adapter-redis/index.js b/packages/adapter-redis/index.js deleted file mode 100644 index b2563fb3..00000000 --- a/packages/adapter-redis/index.js +++ /dev/null @@ -1,38 +0,0 @@ -const createAdapter = require('./adapter') -const redis = require('redis') - -/** - * @param {object} config - * @returns {object} - */ -module.exports = function RedisCacheAdapter (config) { - /** - * @param {object} env - */ - function load () { - return config - } - - /** - * @param {object} env - * @returns {function} - */ - function link (env) { - /** - * @param {object} adapter - * @returns {object} - */ - return function () { - // create client - const client = redis.createClient(env) - return createAdapter(client) - } - } - - return Object.freeze({ - id: 'redis-cache-adapter', - port: 'cache', - load, - link - }) -} diff --git a/packages/adapter-redis/index_test.js b/packages/adapter-redis/index_test.js deleted file mode 100644 index b9959f31..00000000 --- a/packages/adapter-redis/index_test.js +++ /dev/null @@ -1,22 +0,0 @@ -const test = require('tape') -const RedisCacheAdapter = require('./index') -const z = require('zod') - -const schema = z.object({ - id: z.string().optional(), - port: z.string().optional(), - load: z.function() - .args(z.any().optional()) - .returns(z.any()), - link: z.function() - .args(z.any()) - .returns(z.function() - .args(z.any()) - .returns(z.any()) - ) -}) - -test('validate schema', t => { - t.ok(schema.safeParse(RedisCacheAdapter()).success) - t.end() -}) diff --git a/packages/adapter-redis/mod.js b/packages/adapter-redis/mod.js new file mode 100644 index 00000000..02df8678 --- /dev/null +++ b/packages/adapter-redis/mod.js @@ -0,0 +1,40 @@ +import { redis } from "./deps.js"; + +import createAdapter from "./adapter.js"; + +/** + * @typedef RedisClientArgs + * @property {string} hostname + * @property {number?} port - defaults to 6379 + * + * @param {RedisClientArgs} config + * @returns {object} + */ +export default function RedisCacheAdapter(config) { + function load() { + return config; + } + + /** + * @param {RedisClientArgs} env + * @returns {function} + */ + function link(env) { + /** + * @param {object} adapter + * @returns {object} + */ + return function () { + // create client + const client = redis.connect(env); + return createAdapter(client); + }; + } + + return Object.freeze({ + id: "redis-cache-adapter", + port: "cache", + load, + link, + }); +} diff --git a/packages/adapter-redis/mod_test.js b/packages/adapter-redis/mod_test.js new file mode 100644 index 00000000..1704d430 --- /dev/null +++ b/packages/adapter-redis/mod_test.js @@ -0,0 +1,23 @@ +import { z } from "./deps.js"; +import { assert } from "./dev_deps.js"; + +import RedisCacheAdapter from "./mod.js"; + +const schema = z.object({ + id: z.string().optional(), + port: z.string().optional(), + load: z.function() + .args(z.any().optional()) + .returns(z.any()), + link: z.function() + .args(z.any()) + .returns( + z.function() + .args(z.any()) + .returns(z.any()), + ), +}); + +Deno.test("validate schema", () => { + assert(schema.safeParse(RedisCacheAdapter()).success); +}); diff --git a/packages/adapter-redis/package.json b/packages/adapter-redis/package.json index 5e6fd4b9..02af1dc3 100644 --- a/packages/adapter-redis/package.json +++ b/packages/adapter-redis/package.json @@ -6,19 +6,5 @@ "url": "https://github.com/hyper63/hyper63.git", "directory": "packages/adapter-redis" }, - "license": "MIT", - "main": "index.js", - "scripts": { - "test": "tape *_test.js" - }, - "dependencies": { - "crocks": "^0.12.4", - "ramda": "^0.27.1", - "redis": "^3.0.2", - "zod": "^3.0.0-alpha.33" - }, - "devDependencies": { - "redis-mock": "^0.54.0", - "tape": "^5.0.1" - } + "license": "MIT" } diff --git a/packages/adapter-redis/scripts/test.sh b/packages/adapter-redis/scripts/test.sh new file mode 100755 index 00000000..09bdb23f --- /dev/null +++ b/packages/adapter-redis/scripts/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +deno fmt --check +deno test diff --git a/packages/app-opine/api/cache.js b/packages/app-opine/api/cache.js new file mode 100644 index 00000000..37afabc1 --- /dev/null +++ b/packages/app-opine/api/cache.js @@ -0,0 +1,41 @@ +import { fork } from "../utils.js"; + +// GET /cache +export const index = ({ cache }, res) => + fork( + res, + 200, + cache.index().map((stores) => ({ + name: "cache", + version: "0.0.4", + stores, + })), + ); + +// PUT /cache/:name +export const createStore = ({ params, cache }, res) => + fork(res, 201, cache.createStore(params.name)); + +// DELETE /cache/:name +export const deleteStore = ({ params, cache }, res) => + fork(res, 200, cache.deleteStore(params.name)); + +// POST /cache/:name/:key?ttl=1hr +export const createDocument = ({ params, body, cache }, res) => + fork(res, 201, cache.createDoc(params.name, body.key, body.value, body.ttl)); + +// GET /cache/:name/:key +export const getDocument = ({ params, cache }, res) => + fork(res, 200, cache.getDoc(params.name, params.key)); + +// PUT /cache/:name/:key +export const updateDocument = ({ cache, params, body, query }, res) => + fork(res, 200, cache.updateDoc(params.name, params.key, body, query.ttl)); + +// DELETE /cache/:name/:key +export const deleteDocument = ({ cache, params }, res) => + fork(res, 200, cache.deleteDoc(params.name, params.key)); + +// POST /cache/:name/_query +export const queryStore = ({ cache, params, query }, res) => + fork(res, 200, cache.queryStore(params.name, query.pattern)); diff --git a/packages/app-opine/api/data.js b/packages/app-opine/api/data.js new file mode 100644 index 00000000..7cd72632 --- /dev/null +++ b/packages/app-opine/api/data.js @@ -0,0 +1,45 @@ +import { fork } from "../utils.js"; + +// GET /data +export const index = (_req, res) => + res.send({ name: "hyper63 Data", version: "1.0.1", status: "unstable" }); + +// PUT /data/:db +export const createDb = ({ params, data }, res) => + fork(res, 201, data.createDatabase(params.db)); + +// DELETE /data/:db +export const removeDb = ({ params, data }, res) => + fork(res, 200, data.destroyDatabase(params.db)); + +// GET /data/:db +export const listDocuments = ({ params, query, data }, res) => + fork(res, 200, data.listDocuments(params.db, query)); + +// POST /data/:db +export const createDocument = ({ params, body, data }, res) => + fork(res, 201, data.createDocument(params.db, body)); + +// GET /data/:db/:id +export const getDocument = ({ params, data }, res) => + fork(res, 200, data.getDocument(params.db, params.id)); + +// PUT /data/:db/:id +export const updateDocument = ({ data, params, body }, res) => + fork(res, 200, data.updateDocument(params.db, params.id, body)); + +// DELETE /data/:db/:id +export const deleteDocument = ({ data, params }, res) => + fork(res, 200, data.removeDocument(params.db, params.id)); + +// POST /data/:db/_query +export const queryDb = ({ data, params, body }, res) => + fork(res, 200, data.query(params.db, body)); + +// POST /data/:db/_index +export const indexDb = ({ data, params, body }, res) => + fork(res, 201, data.index(params.db, body.name, body.fields)); + +// POST /data/:db/_bulk +export const bulk = ({ data, params, body }, res) => + fork(res, 201, data.bulkDocuments(params.db, body)); diff --git a/packages/app-opine/api/queue.js b/packages/app-opine/api/queue.js new file mode 100644 index 00000000..f2aa7c65 --- /dev/null +++ b/packages/app-opine/api/queue.js @@ -0,0 +1,40 @@ +import { fork } from "../utils.js"; + +export const index = ({ queue }, res) => + fork( + res, + 200, + queue.index().map((queues) => ({ + name: "queue", + version: "0.0.4", + queues, + })), + ); + +// PUT /queue/:name +export const create = ({ params, body, queue }, res) => + fork( + res, + 201, + queue.create({ + name: params.name, + target: body.target, + secret: body.secret, + }), + ); + +// DELETE /queue/:name +export const del = ({ params, queue }, res) => + fork(res, 201, queue.delete(params.name)); + +// POST /queue/:name +export const post = ({ params, body, queue }, res) => + fork(res, 201, queue.post({ name: params.name, job: body })); + +// GET /queue/:name?status=READY|ERROR +export const list = ({ params, query, queue }, res) => + fork(res, 200, queue.list({ name: params.name, status: query.status })); + +// POST /queue/:name/:id/_cancel +export const cancel = ({ params, queue }, res) => + fork(res, 201, queue.cancel({ name: params.name, id: params.id })); diff --git a/packages/app-opine/api/search.js b/packages/app-opine/api/search.js new file mode 100644 index 00000000..815ee387 --- /dev/null +++ b/packages/app-opine/api/search.js @@ -0,0 +1,37 @@ +import { fork } from "../utils.js"; + +// GET /search +export const index = (_req, res) => + res.send({ name: "hyper63 Search", version: "1.0", status: "unstable" }); + +// PUT /search/:index +export const createIndex = ({ params, search, body }, res) => + fork(res, 201, search.createIndex(params.index, body)); + +// DELETE /search/:index +export const deleteIndex = ({ params, search }, res) => + fork(res, 200, search.deleteIndex(params.index)); + +// POST /search/:index +export const indexDoc = ({ params, body, search }, res) => + fork(res, 201, search.indexDoc(params.index, body.key, body.doc)); + +// GET /search/:index/:key +export const getDoc = ({ params, search }, res) => + fork(res, 200, search.getDoc(params.index, params.key)); + +// PUT /search/:index/:key +export const updateDoc = ({ search, params, body }, res) => + fork(res, 200, search.updateDoc(params.index, params.key, body)); + +// DELETE /search/:index/:key +export const removeDoc = ({ search, params }, res) => + fork(res, 200, search.removeDoc(params.index, params.key)); + +// POST /search/:index/_query +export const query = ({ search, params, body }, res) => + fork(res, 200, search.query(params.index, body)); + +// add bulk or batch +export const bulk = ({ search, params, body }, res) => + fork(res, 201, search.bulk(params.index, body)); diff --git a/packages/app-opine/api/storage.js b/packages/app-opine/api/storage.js new file mode 100644 index 00000000..b1e6a4b2 --- /dev/null +++ b/packages/app-opine/api/storage.js @@ -0,0 +1,80 @@ +import { Buffer, crocks, getMimeType } from "../deps.js"; +import { fork } from "../utils.js"; + +const { Async } = crocks; + +// GET /storage +export const index = (_req, res) => + res.send({ name: "hyper63 Storage", version: "1.0", status: "unstable" }); + +// PUT /storage/:name - make bucket +export const makeBucket = ({ params, storage }, res) => + fork(res, 201, storage.makeBucket(params.name)); + +// DELETE /storage/:name - remove bucket +export const removeBucket = ({ params, storage }, res) => + fork(res, 201, storage.removeBucket(params.name)); + +// POST /storage/:name - put object +/** + * requires multi-part form post + * fields: file, [path] + */ +/** + * + * @param {*} param0 + * @param {*} res + * @returns + */ +export const putObject = async ({ file, params, body, storage }, res) => { + let object = file.filename; + if (body.path) { + object = `${body.path}/${file.filename}`; + } + + const reader = file.content + ? new Buffer(file.content.buffer) // from memory + : await Deno.open(file.tempfile, { read: true }); // from tempfile if too large for memory buffer + + /** + * Ensure reader is closed to prevent leaks + * in the case of a tempfile being created + */ + const cleanup = (_constructor) => + Async.fromPromise(async (res) => { + if (typeof reader.close === "function") { + await reader.close(); + } + + return _constructor(res); + }); + + return fork( + res, + 201, + storage.putObject(params.name, object, reader).bichain( + cleanup(Promise.reject.bind(Promise)), + cleanup(Promise.resolve.bind(Promise)), + ), + ); +}; + +export const getObject = ({ params, storage }, res) => { + storage.getObject(params.name, params[0]).fork( + (e) => res.setStatus(500).send({ ok: false, msg: e.message }), + async (fileReader) => { + // get mime type + const mimeType = getMimeType(params[0].split(".")[1]); + res.set({ + "Content-Type": mimeType, + "Transfer-Encoding": "chunked", + }); + res.setStatus(200); + + await res.send(fileReader); + }, + ); +}; + +export const removeObject = ({ params, storage }, res) => + fork(res, 201, storage.removeObject(params.name, params[0])); diff --git a/packages/app-opine/deps.js b/packages/app-opine/deps.js new file mode 100644 index 00000000..1b2dd440 --- /dev/null +++ b/packages/app-opine/deps.js @@ -0,0 +1,12 @@ +export { json, opine } from "https://deno.land/x/opine@1.4.0/mod.ts"; +export { opineCors as cors } from "https://deno.land/x/cors@v1.2.1/mod.ts"; +export { lookup as getMimeType } from "https://deno.land/x/media_types@v2.8.4/mod.ts"; + +export { MultipartReader } from "https://deno.land/std@0.98.0/mime/mod.ts"; +export { Buffer } from "https://deno.land/std@0.98.0/io/buffer.ts"; +export { exists } from "https://deno.land/std@0.98.0/fs/exists.ts"; + +export { default as helmet } from "https://cdn.skypack.dev/helmet@^4.6.0"; + +export * as R from "https://cdn.skypack.dev/ramda@^0.27.1"; +export { default as crocks } from "https://cdn.skypack.dev/crocks@^0.12.4"; diff --git a/packages/app-opine/deps_lock.json b/packages/app-opine/deps_lock.json new file mode 100644 index 00000000..fced7d43 --- /dev/null +++ b/packages/app-opine/deps_lock.json @@ -0,0 +1,263 @@ +{ + "https://cdn.esm.sh/v41/@types/ms@0.7.31/index.d.ts": "6a9c5127096b35264eb7cd21b2417bfc1d42cceca9ba4ce2bb0c3410b7816042", + "https://cdn.esm.sh/v41/@types/qs@6.9.6/index.d.ts": "98437d5a640b67c41534f0de2dcb64c75433dcdff54ff8f8432e613663619a2e", + "https://cdn.esm.sh/v41/@types/range-parser@1.2.3/index.d.ts": "4e88b833be14c7f384e0dcd57bb30acd799e8e34d212635d693e41a75a71164b", + "https://cdn.esm.sh/v41/ipaddr.js@2.0.0/deno/ipaddr.js.js": "a4e9ddabd21d4ae569cce3f8130b5665208e57b871264cf10d17212b3e7fd683", + "https://cdn.esm.sh/v41/ipaddr.js@2.0.0/lib/ipaddr.js.d.ts": "4ce9371828fa2852d92ae6d483f58d0b61332129c9ef254851d4742d0d5eeba7", + "https://cdn.esm.sh/v41/ms@2.1.2/deno/ms.js": "a2c1e63708c16f1aed3950ca1483a87ad3b11af06962c24bbddc4d5bd46ae671", + "https://cdn.esm.sh/v41/qs@6.9.4/deno/qs.js": "feb3f8eb20508c5e02813e9c19d14fc26285e5bb57bec00c125bf35f031be5e9", + "https://cdn.esm.sh/v41/range-parser@1.2.1/deno/range-parser.js": "6413694c7175edca7f166c6dc2b52cf061cc4af7b071fb95923cb0aca1915941", + "https://cdn.skypack.dev/-/crocks@v0.12.4-Mje8nEhNx2rmIpwz3ROp/dist=es2020,mode=imports/optimized/crocks.js": "93d587d18dc5f124f30e5b38de37a6471eb65309c94ef2ffc7a36dc40ab394da", + "https://cdn.skypack.dev/-/helmet@v4.6.0-xZavBngbShvx8qaVnHcS/dist=es2020,mode=imports/optimized/helmet.js": "a94162d1b088e94bde788533311f511c1050b39e2302d4c6f8d622d04ae45bc2", + "https://cdn.skypack.dev/-/ramda@v0.27.1-3ePaNsppsnXYRcaNcaWn/dist=es2020,mode=imports/optimized/ramda.js": "0e51cd76039fc9669f9179d935b261bcbb19ecbb11d49e7e60cfbc14360a85d2", + "https://cdn.skypack.dev/crocks@^0.12.4": "d48852ce36c500f2770a2bc240cb6df9ffb2219d184b32b9be542e8560a6ff1d", + "https://cdn.skypack.dev/helmet@^4.6.0": "946751a7ab75d0560058d0f56dd0a683626ded7983e576bf689a8c134430c172", + "https://cdn.skypack.dev/ramda@^0.27.1": "fb06d7de4305dcdb997f9adc903a463afcdac75d148d638295c420a8922048fd", + "https://deno.land/std@0.69.0/_util/assert.ts": "e1f76e77c5ccb5a8e0dbbbe6cce3a56d2556c8cb5a9a8802fc9565af72462149", + "https://deno.land/std@0.69.0/async/deferred.ts": "ac95025f46580cf5197928ba90995d87f26e202c19ad961bc4e3177310894cdc", + "https://deno.land/std@0.69.0/async/delay.ts": "35957d585a6e3dd87706858fb1d6b551cb278271b03f52c5a2cb70e65e00c26a", + "https://deno.land/std@0.69.0/async/mod.ts": "39f2602a005805dd1e6b9da4ee5391b14d15e8fec4fa5494a6165c599911d746", + "https://deno.land/std@0.69.0/async/mux_async_iterator.ts": "1352b10a5cee1821a963eb8f4f05603bd7e25f8886eaf8d1888f4b416ddccdf5", + "https://deno.land/std@0.69.0/async/pool.ts": "a499691231d8c249f044f69d204b479ad3af7f115e0b37342829eff076bc2450", + "https://deno.land/std@0.69.0/bytes/mod.ts": "b1a149ac741728db00bda9ce1a2d044f248edd5ac95e708a6cc501bfd3adb4a7", + "https://deno.land/std@0.69.0/datetime/formatter.ts": "a21986fb09d36164f1b1e867ea9e5bb816a907dabc768a8f0c8c44c1112f65e4", + "https://deno.land/std@0.69.0/datetime/mod.ts": "140c93dc33a4918230685d7cc4f098da661e4870f006f05a7853c765eff041ff", + "https://deno.land/std@0.69.0/datetime/tokenizer.ts": "175c06fcf866ada3cbf61b4c387b8f17dd9b22befbf299772dda1a4d53cfc22b", + "https://deno.land/std@0.69.0/encoding/utf8.ts": "8654fa820aa69a37ec5eb11908e20b39d056c9bf1c23ab294303ff467f3d50a1", + "https://deno.land/std@0.69.0/http/_io.ts": "a6108623563611e901b1128ca91aca02982af552aa7c4f39f2240cf6e6cd9ac7", + "https://deno.land/std@0.69.0/http/cookie.ts": "0d141ef4ff05a53602bfd06839f4a674a0dce170dc80619d67736984c8b57830", + "https://deno.land/std@0.69.0/http/http_status.ts": "0ecc0799a208f49452023b1b927106ba5a2c13cc6cf6666345db028239d554ab", + "https://deno.land/std@0.69.0/http/mod.ts": "7f2ddc904c7951757228709a2f72c5b3960dee3e81f552f21d382edfdd2a1165", + "https://deno.land/std@0.69.0/http/server.ts": "9f39d814950c741e0dcf0dfb9a046515f5fb342a62fbc28607c44ac5692e8a7e", + "https://deno.land/std@0.69.0/io/bufio.ts": "e76c5b7bf978a638aae6f62b87efde3ab7203b85902ce9b84ac8388c8c2bb104", + "https://deno.land/std@0.69.0/path/_constants.ts": "aba480c4a2c098b6374fdd5951fea13ecc8aaaf8b8aa4dae1871baa50243d676", + "https://deno.land/std@0.69.0/path/_interface.ts": "5876f91d35fd42624893a4aaddaee352144e1f46e719f1dde6511bab7c3c1029", + "https://deno.land/std@0.69.0/path/_util.ts": "f0fa012d40ae9b6acbef03908e534eb11e694de6470fb4d78ea4f38829e735ab", + "https://deno.land/std@0.69.0/path/common.ts": "e4ec66a7416d56f60331b66e27a8a4f08c7b1cf48e350271cb69754a01cf5c04", + "https://deno.land/std@0.69.0/path/glob.ts": "43cc45e8649a35a199c4106dfdf66206f46dfd8e2e626a746512c1a1376fde99", + "https://deno.land/std@0.69.0/path/mod.ts": "6de8885c2534757097818e302becd1cefcbc4c28ac022cc279e612ee04e8cfd1", + "https://deno.land/std@0.69.0/path/posix.ts": "40c387415fca91b3482214cf74880c415cda90b337bebd2c9d4b62d2097bc146", + "https://deno.land/std@0.69.0/path/separator.ts": "9dd15d46ff84a16e13554f56af7fee1f85f8d0f379efbbe60ac066a60561f036", + "https://deno.land/std@0.69.0/path/win32.ts": "9e200471f24fb560d22e74b238133cb75ebb57bead933de1cc5aefed4cda3346", + "https://deno.land/std@0.69.0/textproto/mod.ts": "ebd84342f62216af1850279d32581c51d0958e36d477c6c26c961987e6240356", + "https://deno.land/std@0.71.0/_util/assert.ts": "e1f76e77c5ccb5a8e0dbbbe6cce3a56d2556c8cb5a9a8802fc9565af72462149", + "https://deno.land/std@0.71.0/archive/tar.ts": "5a0a7465d57fec2684239b07da0f5ec884c6c71a768297f73c348f22e04acd92", + "https://deno.land/std@0.71.0/bytes/mod.ts": "b1a149ac741728db00bda9ce1a2d044f248edd5ac95e708a6cc501bfd3adb4a7", + "https://deno.land/std@0.71.0/encoding/utf8.ts": "1b7e77db9a12363c67872f8a208886ca1329f160c1ca9133b13d2ed399688b99", + "https://deno.land/std@0.71.0/fs/_util.ts": "9318f5253cb09177280bdce64b6af97012707cdb458c02864811c2bae1dd1dbd", + "https://deno.land/std@0.71.0/fs/ensure_dir.ts": "54cf0cfb16160857116d1bdff98214ad0189275fe2f089607fdc06c52ac79cc4", + "https://deno.land/std@0.71.0/fs/ensure_file.ts": "b70eccaee6f41ae226d399ad9c8ebc29beb5dd86fe179d30ab7e681976352baf", + "https://deno.land/std@0.71.0/io/bufio.ts": "9fdfcd7e408aa01cbd31e5d8c90cb8a648635db12e05ba78534296e696c635ee", + "https://deno.land/std@0.71.0/io/readers.ts": "4c2e98abf2a2a1d5e2adaf460479a1c69f742951c1bc141b26654d2f5248b663", + "https://deno.land/std@0.71.0/node/_utils.ts": "9fdd38e53754fc7e4fc38ab472a56be9ef3ce32c3503ac7b84ca1e0f563f69fa", + "https://deno.land/std@0.71.0/node/events.ts": "00fb524c3b10fc1a55f292c26dc6c949b8a31ac6d51c0db0c3a47ff13d2c46d4", + "https://deno.land/std@0.71.0/path/_constants.ts": "aba480c4a2c098b6374fdd5951fea13ecc8aaaf8b8aa4dae1871baa50243d676", + "https://deno.land/std@0.71.0/path/_interface.ts": "67b276380d297a7cedc3c17f7a0bf122edcfc96a3e1f69de06f379d85ba0e2c0", + "https://deno.land/std@0.71.0/path/_util.ts": "f0fa012d40ae9b6acbef03908e534eb11e694de6470fb4d78ea4f38829e735ab", + "https://deno.land/std@0.71.0/path/common.ts": "e4ec66a7416d56f60331b66e27a8a4f08c7b1cf48e350271cb69754a01cf5c04", + "https://deno.land/std@0.71.0/path/glob.ts": "43cc45e8649a35a199c4106dfdf66206f46dfd8e2e626a746512c1a1376fde99", + "https://deno.land/std@0.71.0/path/mod.ts": "6de8885c2534757097818e302becd1cefcbc4c28ac022cc279e612ee04e8cfd1", + "https://deno.land/std@0.71.0/path/posix.ts": "40c387415fca91b3482214cf74880c415cda90b337bebd2c9d4b62d2097bc146", + "https://deno.land/std@0.71.0/path/separator.ts": "9dd15d46ff84a16e13554f56af7fee1f85f8d0f379efbbe60ac066a60561f036", + "https://deno.land/std@0.71.0/path/win32.ts": "9e200471f24fb560d22e74b238133cb75ebb57bead933de1cc5aefed4cda3346", + "https://deno.land/std@0.94.0/_util/assert.ts": "2f868145a042a11d5ad0a3c748dcf580add8a0dbc0e876eaa0026303a5488f58", + "https://deno.land/std@0.94.0/_util/os.ts": "e282950a0eaa96760c0cf11e7463e66babd15ec9157d4c9ed49cc0925686f6a7", + "https://deno.land/std@0.94.0/path/_constants.ts": "1247fee4a79b70c89f23499691ef169b41b6ccf01887a0abd131009c5581b853", + "https://deno.land/std@0.94.0/path/_interface.ts": "1fa73b02aaa24867e481a48492b44f2598cd9dfa513c7b34001437007d3642e4", + "https://deno.land/std@0.94.0/path/_util.ts": "2e06a3b9e79beaf62687196bd4b60a4c391d862cfa007a20fc3a39f778ba073b", + "https://deno.land/std@0.94.0/path/common.ts": "eaf03d08b569e8a87e674e4e265e099f237472b6fd135b3cbeae5827035ea14a", + "https://deno.land/std@0.94.0/path/glob.ts": "a4a0d57b9674a275f0e70cdab14a5c93d65747be5ec360a00087c79688e6a2de", + "https://deno.land/std@0.94.0/path/mod.ts": "4465dc494f271b02569edbb4a18d727063b5dbd6ed84283ff906260970a15d12", + "https://deno.land/std@0.94.0/path/posix.ts": "f56c3c99feb47f30a40ce9d252ef6f00296fa7c0fcb6dd81211bdb3b8b99ca3b", + "https://deno.land/std@0.94.0/path/separator.ts": "8fdcf289b1b76fd726a508f57d3370ca029ae6976fcde5044007f062e643ff1c", + "https://deno.land/std@0.94.0/path/win32.ts": "77f7b3604e0de40f3a7c698e8a79e7f601dc187035a1c21cb1e596666ce112f8", + "https://deno.land/std@0.96.0/_util/assert.ts": "2f868145a042a11d5ad0a3c748dcf580add8a0dbc0e876eaa0026303a5488f58", + "https://deno.land/std@0.96.0/_util/os.ts": "e282950a0eaa96760c0cf11e7463e66babd15ec9157d4c9ed49cc0925686f6a7", + "https://deno.land/std@0.96.0/path/_constants.ts": "1247fee4a79b70c89f23499691ef169b41b6ccf01887a0abd131009c5581b853", + "https://deno.land/std@0.96.0/path/_interface.ts": "1fa73b02aaa24867e481a48492b44f2598cd9dfa513c7b34001437007d3642e4", + "https://deno.land/std@0.96.0/path/_util.ts": "2e06a3b9e79beaf62687196bd4b60a4c391d862cfa007a20fc3a39f778ba073b", + "https://deno.land/std@0.96.0/path/common.ts": "eaf03d08b569e8a87e674e4e265e099f237472b6fd135b3cbeae5827035ea14a", + "https://deno.land/std@0.96.0/path/glob.ts": "314ad9ff263b895795208cdd4d5e35a44618ca3c6dd155e226fb15d065008652", + "https://deno.land/std@0.96.0/path/mod.ts": "4465dc494f271b02569edbb4a18d727063b5dbd6ed84283ff906260970a15d12", + "https://deno.land/std@0.96.0/path/posix.ts": "f56c3c99feb47f30a40ce9d252ef6f00296fa7c0fcb6dd81211bdb3b8b99ca3b", + "https://deno.land/std@0.96.0/path/separator.ts": "8fdcf289b1b76fd726a508f57d3370ca029ae6976fcde5044007f062e643ff1c", + "https://deno.land/std@0.96.0/path/win32.ts": "77f7b3604e0de40f3a7c698e8a79e7f601dc187035a1c21cb1e596666ce112f8", + "https://deno.land/std@0.97.0/_util/assert.ts": "2f868145a042a11d5ad0a3c748dcf580add8a0dbc0e876eaa0026303a5488f58", + "https://deno.land/std@0.97.0/_util/os.ts": "e282950a0eaa96760c0cf11e7463e66babd15ec9157d4c9ed49cc0925686f6a7", + "https://deno.land/std@0.97.0/async/deferred.ts": "624bef4b755b71394620508a0c234a93cb8020cbd1b04bfcdad41c174392cef6", + "https://deno.land/std@0.97.0/async/delay.ts": "9de1d8d07d1927767ab7f82434b883f3d8294fb19cad819691a2ad81a728cf3d", + "https://deno.land/std@0.97.0/async/mod.ts": "253b41c658d768613eacfb11caa0a9ca7148442f932018a45576f7f27554c853", + "https://deno.land/std@0.97.0/async/mux_async_iterator.ts": "c405c4c1194f7600189aa81dfe243e165c27f36eaea88a301fa2a74ce974548b", + "https://deno.land/std@0.97.0/async/pool.ts": "353ce4f91865da203a097aa6f33de8966340c91b6f4a055611c8c5d534afd12f", + "https://deno.land/std@0.97.0/bytes/bytes_list.ts": "a13287edb03f19d27ba4927dec6d6de3e5bd46254cd4aee6f7e5815810122673", + "https://deno.land/std@0.97.0/bytes/mod.ts": "1ae1ccfe98c4b979f12b015982c7444f81fcb921bea7aa215bf37d84f46e1e13", + "https://deno.land/std@0.97.0/datetime/formatter.ts": "bf7befcd2c55c3060be199ebc10e40f9c33aef6141c20f7c781d03beef25a49e", + "https://deno.land/std@0.97.0/datetime/mod.ts": "c62a2c7e9d364f8d007f0f2496a4850a0f0790adb529de1988359d8d54f6103c", + "https://deno.land/std@0.97.0/datetime/tokenizer.ts": "492bb6251e75e0c03d5a89a66bd2b03e08e9cbc298d51e002cf59378aaa32c48", + "https://deno.land/std@0.97.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.97.0/hash/sha1.ts": "1cca324b4b253885a47f121adafcfac55b4cc96113e22b338e1db26f37a730b8", + "https://deno.land/std@0.97.0/http/_io.ts": "f4446e433d8d0009851c7de20d594746de228399c382cbee65da30eb87e70827", + "https://deno.land/std@0.97.0/http/cookie.ts": "5952a0a7973fcacb99fdcbe080b735a367194514217d2c6788f92a299c87136c", + "https://deno.land/std@0.97.0/http/http_status.ts": "ebaa9bebfb8adc3d7b20c49e11037e4eefd79629ad80d81383933f4cdc91b3eb", + "https://deno.land/std@0.97.0/http/server.ts": "39414681549353ebd0665e3df4145ee0a93a989cd0f55bed14fdb2c43d2faa3b", + "https://deno.land/std@0.97.0/io/buffer.ts": "ed3528e299fd1e0dc056c4b5005a07b28c3dabad2595f077a562ff7b06fe89a5", + "https://deno.land/std@0.97.0/io/bufio.ts": "61d11414e83d2b504eb8ea2fca3c2670470ee2fe2e5fe82bdd81eb55dc11ac4a", + "https://deno.land/std@0.97.0/io/types.d.ts": "89a27569399d380246ca7cdd9e14d5e68459f11fb6110790cc5ecbd4ee7f3215", + "https://deno.land/std@0.97.0/io/util.ts": "318be78b7954da25f0faffe123fef0d9423ea61af98467e860c06b60265eff6d", + "https://deno.land/std@0.97.0/node/_errors.ts": "02285efd044fe3d35e7ede4fc9578404363ae9d4ad4d817c965ebd04dc6e3e8c", + "https://deno.land/std@0.97.0/node/_util/_util_callbackify.ts": "f2ac50a47572dde37612a52c7b337afeefb6faafdb849184487e06436334a5ab", + "https://deno.land/std@0.97.0/node/_util/_util_promisify.ts": "2ad6efe685f73443d5ed6ae009999789a8de4a0f01e6d2afdf242b4515477ee2", + "https://deno.land/std@0.97.0/node/_util/_util_types.ts": "ae3d21e07c975f06590ab80bbde8173670d70ff40546267c0c1df869fc2ff00c", + "https://deno.land/std@0.97.0/node/_utils.ts": "559f7c47dec95961f508273b4e12284b221d6948f13527b3ca5db10f8fa9d93a", + "https://deno.land/std@0.97.0/node/events.ts": "06407e6c124fe1991837ec353eef62330ebb51377264a7c8e9e70b84648ea39d", + "https://deno.land/std@0.97.0/node/querystring.ts": "ba32a638962b56b120769d2d25e3d20fbd0dfed7597f8398c6f1e079f4106daa", + "https://deno.land/std@0.97.0/node/timers.ts": "96cb37b2d806028db02faa60013b6672a6ef2608e79139e092313640ed9df110", + "https://deno.land/std@0.97.0/node/util.ts": "468c080e0a0553f00fe881e8d97ff9e54c54186ad39a5073b329e1980cf31402", + "https://deno.land/std@0.97.0/path/_constants.ts": "1247fee4a79b70c89f23499691ef169b41b6ccf01887a0abd131009c5581b853", + "https://deno.land/std@0.97.0/path/_interface.ts": "1fa73b02aaa24867e481a48492b44f2598cd9dfa513c7b34001437007d3642e4", + "https://deno.land/std@0.97.0/path/_util.ts": "2e06a3b9e79beaf62687196bd4b60a4c391d862cfa007a20fc3a39f778ba073b", + "https://deno.land/std@0.97.0/path/common.ts": "eaf03d08b569e8a87e674e4e265e099f237472b6fd135b3cbeae5827035ea14a", + "https://deno.land/std@0.97.0/path/glob.ts": "314ad9ff263b895795208cdd4d5e35a44618ca3c6dd155e226fb15d065008652", + "https://deno.land/std@0.97.0/path/mod.ts": "4465dc494f271b02569edbb4a18d727063b5dbd6ed84283ff906260970a15d12", + "https://deno.land/std@0.97.0/path/posix.ts": "f56c3c99feb47f30a40ce9d252ef6f00296fa7c0fcb6dd81211bdb3b8b99ca3b", + "https://deno.land/std@0.97.0/path/separator.ts": "8fdcf289b1b76fd726a508f57d3370ca029ae6976fcde5044007f062e643ff1c", + "https://deno.land/std@0.97.0/path/win32.ts": "77f7b3604e0de40f3a7c698e8a79e7f601dc187035a1c21cb1e596666ce112f8", + "https://deno.land/std@0.97.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.97.0/testing/asserts.ts": "341292d12eebc44be4c3c2ca101ba8b6b5859cef2fa69d50c217f9d0bfbcfd1f", + "https://deno.land/std@0.97.0/textproto/mod.ts": "6e8430986393e3929720cec9c6668d75dee2ffd953886e842dc124c251cb86c8", + "https://deno.land/std@0.98.0/_util/assert.ts": "2f868145a042a11d5ad0a3c748dcf580add8a0dbc0e876eaa0026303a5488f58", + "https://deno.land/std@0.98.0/_util/has_own_property.ts": "f5edd94ed3f3c20c517d812045deb97977e18501c9b7105b5f5c11a31893d7a2", + "https://deno.land/std@0.98.0/_util/os.ts": "e282950a0eaa96760c0cf11e7463e66babd15ec9157d4c9ed49cc0925686f6a7", + "https://deno.land/std@0.98.0/bytes/bytes_list.ts": "a13287edb03f19d27ba4927dec6d6de3e5bd46254cd4aee6f7e5815810122673", + "https://deno.land/std@0.98.0/bytes/mod.ts": "1ae1ccfe98c4b979f12b015982c7444f81fcb921bea7aa215bf37d84f46e1e13", + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/fs/exists.ts": "b0d2e31654819cc2a8d37df45d6b14686c0cc1d802e9ff09e902a63e98b85a00", + "https://deno.land/std@0.98.0/io/buffer.ts": "3ead6bb11276ebcf093c403f74f67fd2205a515dbbb9061862c468ca56f37cd8", + "https://deno.land/std@0.98.0/io/bufio.ts": "82fe6a499cacf4604844472ccf328cb0a1c0571c0f83b5ee67e475018342b4ae", + "https://deno.land/std@0.98.0/io/ioutil.ts": "3c6b7c8be3b8cd19746de028c40063193578612244a935dcc27be9f3ff343b0c", + "https://deno.land/std@0.98.0/io/readers.ts": "17403919724fef2f343c88555606368868a5c752a1099ad801f6a381c170f62d", + "https://deno.land/std@0.98.0/io/types.d.ts": "89a27569399d380246ca7cdd9e14d5e68459f11fb6110790cc5ecbd4ee7f3215", + "https://deno.land/std@0.98.0/io/util.ts": "318be78b7954da25f0faffe123fef0d9423ea61af98467e860c06b60265eff6d", + "https://deno.land/std@0.98.0/mime/mod.ts": "38193fda66d8219b1a3cc19b318700b4dea6924bb28b3ffdd4d5bcf53056395f", + "https://deno.land/std@0.98.0/mime/multipart.ts": "c8dc6d7c1869045ea09daab2e6a1e4e86675519eb8062900086e2b5dff512129", + "https://deno.land/std@0.98.0/path/_constants.ts": "1247fee4a79b70c89f23499691ef169b41b6ccf01887a0abd131009c5581b853", + "https://deno.land/std@0.98.0/path/_interface.ts": "1fa73b02aaa24867e481a48492b44f2598cd9dfa513c7b34001437007d3642e4", + "https://deno.land/std@0.98.0/path/_util.ts": "2e06a3b9e79beaf62687196bd4b60a4c391d862cfa007a20fc3a39f778ba073b", + "https://deno.land/std@0.98.0/path/common.ts": "eaf03d08b569e8a87e674e4e265e099f237472b6fd135b3cbeae5827035ea14a", + "https://deno.land/std@0.98.0/path/glob.ts": "314ad9ff263b895795208cdd4d5e35a44618ca3c6dd155e226fb15d065008652", + "https://deno.land/std@0.98.0/path/mod.ts": "4465dc494f271b02569edbb4a18d727063b5dbd6ed84283ff906260970a15d12", + "https://deno.land/std@0.98.0/path/posix.ts": "f56c3c99feb47f30a40ce9d252ef6f00296fa7c0fcb6dd81211bdb3b8b99ca3b", + "https://deno.land/std@0.98.0/path/separator.ts": "8fdcf289b1b76fd726a508f57d3370ca029ae6976fcde5044007f062e643ff1c", + "https://deno.land/std@0.98.0/path/win32.ts": "77f7b3604e0de40f3a7c698e8a79e7f601dc187035a1c21cb1e596666ce112f8", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c", + "https://deno.land/std@0.98.0/textproto/mod.ts": "6e8430986393e3929720cec9c6668d75dee2ffd953886e842dc124c251cb86c8", + "https://deno.land/x/accepts@2.1.0/deps.ts": "15190dfb504b920508972e0319f652f045d44e0310efeda755b08c1f375b5847", + "https://deno.land/x/accepts@2.1.0/mod.ts": "ae058b80ba4e560ca9bd1eb4a488f5d6aeed9437fa192bc4903764fa653032cf", + "https://deno.land/x/compress@v0.3.6/deps.ts": "45958f0f1ac0ef3b5841c45f92ef5c12da77b5c8517f25bf98db8cc1d9aa4a6d", + "https://deno.land/x/compress@v0.3.6/gzip/gzip.ts": "e0132fb8af25fd4256125cc701dd4f4a590f93d6604d42ab1dfc4679efb5d950", + "https://deno.land/x/compress@v0.3.6/gzip/gzip_file.ts": "4d23399c1b17a6e65ce2e7dcb3fad0bc81b9920d010bd50b53c468de226c818c", + "https://deno.land/x/compress@v0.3.6/gzip/gzip_stream.ts": "9d4291c5ef3b7abce05b16450f7a0bcdd1c7d12ecd113417578d90e02eebb760", + "https://deno.land/x/compress@v0.3.6/gzip/mod.ts": "42a51f43eedbdc8522dec16c6ef58671f06b1ba270b86a14780af8a0bc017366", + "https://deno.land/x/compress@v0.3.6/gzip/writer_gunzip.ts": "0e08971118dc7f22f67312f0700c8a854bec747c28c183e4c1cf5d6d7c5ef19d", + "https://deno.land/x/compress@v0.3.6/gzip/writer_gzip.ts": "151d731e84dccdcaee2fde7b668a8ae0dc8f300f36b14a60202747113abeb151", + "https://deno.land/x/compress@v0.3.6/interface.ts": "169067c1fc3c6ac677afbffd2df59418a4927d851252fac67b881ada4dd6a1ba", + "https://deno.land/x/compress@v0.3.6/mod.ts": "19be50e35e5bf50bfbf7895ae953aa30314fdd10bff2f919a7417cca5ded55a6", + "https://deno.land/x/compress@v0.3.6/tar/mod.ts": "a7b709d7886f42e724516a7fa1522876b06b7f361c1fa908c486714be42a278b", + "https://deno.land/x/compress@v0.3.6/tgz/mod.ts": "cc2c230c3b97b7965aaa287acb546f8da0e65933cd7a3f4ba7f6e89f80b61853", + "https://deno.land/x/compress@v0.3.6/utils/uint8.ts": "7d15b88caaab8ac22330ce9660a78a778435d0ae7d52e3ab3762ef64d17ed66d", + "https://deno.land/x/compress@v0.3.6/zlib/deflate.ts": "0b961469e76f67adbfb52ca72ae110de123710647885816de69b7c1a697c681c", + "https://deno.land/x/compress@v0.3.6/zlib/inflate.ts": "450ff50e1d7d9716f3afcf0468d391775f87a17824ec203e02fc706404cb641f", + "https://deno.land/x/compress@v0.3.6/zlib/mod.ts": "4dca9c1e934b7ab27f31c318abd7bfd39b09be96fd76ba27bd46f3a4e73b4ad0", + "https://deno.land/x/compress@v0.3.6/zlib/zlib/adler32.ts": "e34c7596d63a655755c4b0a44a40d4f9c1d1c4d3b891e5c1f3f840f8939e1940", + "https://deno.land/x/compress@v0.3.6/zlib/zlib/crc32.ts": "b9bc4adaf327d32585205d1176bd52f6453c06dd1040544611d4c869e638119c", + "https://deno.land/x/compress@v0.3.6/zlib/zlib/deflate.ts": "1bfcd8a92cd2d7c247c26a2d35bfa08d05281f5be89ac95a4655bfb7b32d4912", + "https://deno.land/x/compress@v0.3.6/zlib/zlib/gzheader.ts": "11e6da7383447aae9791308dc2350a809fa341a876a2da396b03a2a31408c20c", + "https://deno.land/x/compress@v0.3.6/zlib/zlib/inffast.ts": "282daf5ea16bb876d26e342f3c24fe1a8ec84640e713a970b02232955a853f86", + "https://deno.land/x/compress@v0.3.6/zlib/zlib/inflate.ts": "880fb3fd1713b5904aed21e610699c0a0c8440f845daf9bf56b4830473f726c3", + "https://deno.land/x/compress@v0.3.6/zlib/zlib/inftrees.ts": "8a6d765a5c42bf3b6990060cabbe52e88493f8ce6d082e6e35d97756914cfb8e", + "https://deno.land/x/compress@v0.3.6/zlib/zlib/messages.ts": "c82229bd67ccc3b6162f3aca1c5e7f936e546aa91ac9a9ac4fcfefc3a9dc5ac8", + "https://deno.land/x/compress@v0.3.6/zlib/zlib/status.ts": "5987864d2d43d59bbbfa2e6ef4d5a07284c1d10489cc5843ddf41ac547957ac3", + "https://deno.land/x/compress@v0.3.6/zlib/zlib/trees.ts": "6575455707d62cd430f3fd1634cdb10ab6ac00212df11b6a7b31f1acc4ed0675", + "https://deno.land/x/compress@v0.3.6/zlib/zlib/zstream.ts": "c110fd5919235e317d64933852e24a1bba0126202be592e90e58f7b19315ad93", + "https://deno.land/x/content_type@1.0.1/mod.ts": "5dc435e84b665c2968d6845c573569ddeef65a41baa9a04c51e1c5e927806583", + "https://deno.land/x/cors@v1.2.1/abcCors.ts": "bacdcb2c9fbe2dbe71bf89a6fc1d87f77707aa475afb6440d1bdf667e21b9d58", + "https://deno.land/x/cors@v1.2.1/attainCors.ts": "3702f0ad58293c501594dffebce7185e0d93d9e01ec27a62009ad735851bdebd", + "https://deno.land/x/cors@v1.2.1/cors.ts": "60f6d07821fbd890f58cb78edc82f70a3f7f374a27a2c2152c7a7d77a58bef77", + "https://deno.land/x/cors@v1.2.1/mithCors.ts": "ae11b8b0a1cb04849c49ac6475251c64048054cbd52d0c68b4b07798d057a434", + "https://deno.land/x/cors@v1.2.1/mod.ts": "2b351913f56d77ad80cb3b8633d4539c9eeddb426dae79437ada0e6a9cb4f1a6", + "https://deno.land/x/cors@v1.2.1/oakCors.ts": "6188b4946c4b421372a77aaca61daa3b1c17720c6a09b967700258f28a06a6cc", + "https://deno.land/x/cors@v1.2.1/opineCors.ts": "944c56639abbb5698bb0769440d7cb9d4a900e8f44f1c7910904a61bf97c20ff", + "https://deno.land/x/cors@v1.2.1/types.ts": "97546633ccc7f0df7a29bacba5d91dc6f61decdd1b65258300244dba905d34b8", + "https://deno.land/x/crc32@v0.2.0/mod.ts": "de7a3fa2d4ef24b96fc21e1cc4d2d65d1d2b1dcea92f63960e3e11bfa82df0fa", + "https://deno.land/x/encodeurl@1.0.0/mod.ts": "b797af240fd3c4b3876b3f32b8c2404601e5503c4a10afa719e54815056685d8", + "https://deno.land/x/escape_html@1.0.0/mod.ts": "fd6671533b7f8bbf267047e3d1a20786837b3285c31552fa5f82f1e34f6e37c3", + "https://deno.land/x/http_errors@3.0.0/deps.ts": "7fcd181f9577e1803297e42cdb58ce219d9c7087391b1e9ccc12537e23b9d68c", + "https://deno.land/x/http_errors@3.0.0/mod.ts": "6549778cb6616cd16884dd1f46b28211fc8ac33f9d7141e92a8eaffb48391bc1", + "https://deno.land/x/isIP@1.0.0/mod.ts": "bd55c2180f7275930d9b1ef0b8f031108625ae3f9b87a1fa75cc8ee15d485ff8", + "https://deno.land/x/media_typer@1.0.1/mod.ts": "01aebf79f0ecf43897a663dca9730d2dbd2a698eac9d297a6ac26b4b647bb7cc", + "https://deno.land/x/media_types@v2.4.7/db.ts": "56a9deab286b6283e1df021d74ee3319353f27f7827716b6443427fff2fc6e24", + "https://deno.land/x/media_types@v2.4.7/deps.ts": "5eb8e569a0c1439f85f4a9888f8f3327b426c8b8f7ba55fdc1124d98d0e17390", + "https://deno.land/x/media_types@v2.4.7/mod.ts": "94141d7c415fcdad350fec9d36d77c18334efe25766db6f37d34709c896881ed", + "https://deno.land/x/media_types@v2.8.2/db.ts": "22db9bd6eb24934105098e9fce27d2746ef3861cd2668edef749c83c60acd60b", + "https://deno.land/x/media_types@v2.8.2/deps.ts": "c35f8deb890cc826b5e6797a920a6973623bedfa730ac94f469024028402b19b", + "https://deno.land/x/media_types@v2.8.2/mod.ts": "d63583b978d32eff8b76e1ae5d83cba2fb27baa90cc1bcb0ad15a06122ea8c19", + "https://deno.land/x/media_types@v2.8.4/db.ts": "22db9bd6eb24934105098e9fce27d2746ef3861cd2668edef749c83c60acd60b", + "https://deno.land/x/media_types@v2.8.4/deps.ts": "2dd71d14adbe5f3c837bd79e010d9706895a2cebdcd50d13ab1debe42b32ff7f", + "https://deno.land/x/media_types@v2.8.4/mod.ts": "d63583b978d32eff8b76e1ae5d83cba2fb27baa90cc1bcb0ad15a06122ea8c19", + "https://deno.land/x/negotiator@1.0.1/mod.ts": "d8b28a0a7b2d75c944cebef8f87a58eeb344974d432fe0dea85e2d98e03daf24", + "https://deno.land/x/negotiator@1.0.1/src/charset.ts": "ca9e78c774c59fed90e31eed9901b6ac78348cbba1adbba71364abdf56dce7e0", + "https://deno.land/x/negotiator@1.0.1/src/encoding.ts": "bd3e980c10de08798192cd56f1913e0c9f9be2b56391d7b4213f448cc94a0140", + "https://deno.land/x/negotiator@1.0.1/src/language.ts": "bd822581a7621bbe8c3157d38b4f1ec5a35d75eb58bf6be927d477d929e689a2", + "https://deno.land/x/negotiator@1.0.1/src/media_type.ts": "bde7a0f6a276fdb3b72569832ca6050ce5bcacad9d493029239af768bd06149f", + "https://deno.land/x/negotiator@1.0.1/src/types.ts": "3f2a779b3432bd5e2942965f326d82962e7e9b7a72b30fd81aeb0128f898303d", + "https://deno.land/x/opine@1.4.0/deps.ts": "ca464632f1987c84885757b3e38bbdab7b398cea8ef21835d370d60234173df2", + "https://deno.land/x/opine@1.4.0/mod.ts": "2e08e9573533a3a1467a9b2c61a477424f9b7c1b6361096e22a63ef80222ef74", + "https://deno.land/x/opine@1.4.0/src/application.ts": "1930ce661fb829dfd6b59643254b44ad3678efe6359edbf6e576ed07652d9d79", + "https://deno.land/x/opine@1.4.0/src/methods.ts": "0481daecc6068d24e9e5391818baddf555ab803d39a465dcd259161f8bd8ee49", + "https://deno.land/x/opine@1.4.0/src/middleware/bodyParser/getCharset.ts": "a1b1bcdcaed5e3d60e422fe81605c7982b0237037dad070d1f8c59699ff3fbed", + "https://deno.land/x/opine@1.4.0/src/middleware/bodyParser/json.ts": "0f54d3c45eaf891158f9b05f2ce4df7d1c9cd3c3f109e5c6ccd17cc674cb9e7b", + "https://deno.land/x/opine@1.4.0/src/middleware/bodyParser/raw.ts": "3910b63f2688090fa60398518a218cc9937f83b0fde4e0c1ad2b7cf3122596d7", + "https://deno.land/x/opine@1.4.0/src/middleware/bodyParser/read.ts": "985d4dea9decdd81206fc33bc8c9255c4f4ea7979167a059861a53a3af545522", + "https://deno.land/x/opine@1.4.0/src/middleware/bodyParser/text.ts": "a5dadd71f5181940a6ea6f25377c515c49708527602ff5de86d2b2765a4ac2ee", + "https://deno.land/x/opine@1.4.0/src/middleware/bodyParser/typeChecker.ts": "6faff00dd9413a5069537e1afdaf47a18b812ebaa29c18e63f908d9059b9d08b", + "https://deno.land/x/opine@1.4.0/src/middleware/bodyParser/urlencoded.ts": "6f0bfb39bdd5643ce28091402dab7504c8c48af85ab45167148804b75f1acf87", + "https://deno.land/x/opine@1.4.0/src/middleware/init.ts": "4bbb3e3bab0f6269f226e2f73f8a7ac74848477fb56e128082b75bebd329d3e4", + "https://deno.land/x/opine@1.4.0/src/middleware/query.ts": "e59ae85e26e31cbeab5fcbac258c9eaae2664015f98ea9854298050e90fcd6ae", + "https://deno.land/x/opine@1.4.0/src/middleware/serveStatic.ts": "25caf95641d11d51ea40a313cf51b78d0045abca8bc4d7a535a110c5a63df342", + "https://deno.land/x/opine@1.4.0/src/opine.ts": "c962e46bb64e7822675ba2ce011a43b2591aee54d3ce12737ea7f676a9c723b4", + "https://deno.land/x/opine@1.4.0/src/request.ts": "938e376a25987a9e3f1fb442a80ba56efcc484adbc82170e96da5c41b0892b21", + "https://deno.land/x/opine@1.4.0/src/response.ts": "aeee395cda665b7d534ce62270483f22c071a893405599866f305745aef153ab", + "https://deno.land/x/opine@1.4.0/src/router/index.ts": "4bfacaf3726ff774371f2600a8f670077323b77284a5a7ae45e35da830d824e5", + "https://deno.land/x/opine@1.4.0/src/router/layer.ts": "d3e6ee498a896c54a29fea9d1120079a6de665038ca1bb70e90b962ca86d902e", + "https://deno.land/x/opine@1.4.0/src/router/route.ts": "5a5567cdaa62799b8de7d40c9f201532928afe2313dc8c9836218a3f06aa8ce8", + "https://deno.land/x/opine@1.4.0/src/types.ts": "d0612f2df324c44a027030752d42866d86673bd09318f46f156cea0913675ee8", + "https://deno.land/x/opine@1.4.0/src/utils/compileETag.ts": "32edd6a8158a65389b99e6b24bba6c3933b509f21687f2ef39a838212cfb153d", + "https://deno.land/x/opine@1.4.0/src/utils/compileQueryParser.ts": "ce76345fed26ec94af17cd6a09b926cfa78fe5e0d4f7097552f34735ebce9ec4", + "https://deno.land/x/opine@1.4.0/src/utils/compileTrust.ts": "81ec638fc16f4a7b2bfee9283b477cec69b22898ee6e2f7e7aa13cd4a47ba3cf", + "https://deno.land/x/opine@1.4.0/src/utils/contentDisposition.ts": "2dfab1cea517a9378940f050f5d85a345458f71e4f9524487280e85b8b7e83b5", + "https://deno.land/x/opine@1.4.0/src/utils/cookies.ts": "241d5ae610d8a21748b8dfa5b2b94771b43906f8f4a1a2281d1f4698ce6c0a9f", + "https://deno.land/x/opine@1.4.0/src/utils/defineGetter.ts": "5f19a1789ea936963dd4733de480adfb0de0d1c8a9bf3e33f7126086dc090f29", + "https://deno.land/x/opine@1.4.0/src/utils/etag.ts": "297ed506bd4c80328facf6dff2d623aca9cc26ea540f5aef96a71815c8701869", + "https://deno.land/x/opine@1.4.0/src/utils/finalHandler.ts": "b6f9881250ca8c1d1e99dc8bc77c7a3aedbefbd0946fc7e70e2c4743bcab9181", + "https://deno.land/x/opine@1.4.0/src/utils/forwarded.ts": "9a6533f0fa749de26c4831b81a9c82aa05a47210b1d276f2df9df1d2eb4bc77c", + "https://deno.land/x/opine@1.4.0/src/utils/fresh.ts": "87c9adf18bcd9f1f7c90ffd7335d8fd60f902d92ed2e4adb0c876b5506e76728", + "https://deno.land/x/opine@1.4.0/src/utils/merge.ts": "c9cb64fff1d2691d74fd37a3649a74c87a7f6431dc5ea982f3ec49dd90372388", + "https://deno.land/x/opine@1.4.0/src/utils/mergeDescriptors.ts": "1fe498d4a1a8dcfd3570f9ca5e0647590d86d029b3c340bfcfdb57002851e41b", + "https://deno.land/x/opine@1.4.0/src/utils/normalizeType.ts": "16d6375e16de85a04ae9c661e51375542b389a6fe8f64669601828ad83d5af2e", + "https://deno.land/x/opine@1.4.0/src/utils/parseUrl.ts": "5a38b38dcaeb1b0debc7b252d654ab08cae586b05dd737accac2a3d218bc311d", + "https://deno.land/x/opine@1.4.0/src/utils/pathToRegex.ts": "d68398cff62053a3ea9f680d81ef7a7df4facc3781cb99aa1a14d4e7576c19d9", + "https://deno.land/x/opine@1.4.0/src/utils/proxyAddr.ts": "3deec89c0debc170d4bd1243d542bc05cf0d2a44ed3f0bba3af09398bdf08085", + "https://deno.land/x/opine@1.4.0/src/utils/requestProxy.ts": "a24b3cfa8668d3975d61e3408c13f500b309f5b571f91627a4460fc7d701a673", + "https://deno.land/x/opine@1.4.0/src/utils/send.ts": "68804a0941142c1c3d324158caacc1e88d33d34533fddbdcf769b4572bbe4a20", + "https://deno.land/x/opine@1.4.0/src/utils/stringify.ts": "4a935cd6c5a7d3a9fe1a786a71d2ec92c55120c8c551a3ef642a808ee5209b4c", + "https://deno.land/x/opine@1.4.0/src/view.ts": "02c5554551d55d06362b2fe3abbf8fe8a047b899d9b20e1548fa998e16a5aad0", + "https://deno.land/x/opine@1.4.0/version.ts": "0978c915b71daec33ab47989a8f95fbea6eb7daee55f3fb1d2dfe1344719fb32", + "https://deno.land/x/type_is@1.0.1/deps.ts": "e02cf93f70606ab11e8b601860625788958db686280fdfad5b6db1546bd879c4", + "https://deno.land/x/type_is@1.0.1/mod.ts": "3e471cf55f7ca9ec137ddb5feacb0d33b26e6b5b61bf7b2e2a4337b6939b8fd8", + "https://deno.land/x/vary@1.0.0/mod.ts": "e7c452694b21336419f16e0891f8ea503adaafc7bde956fb29e1a86f450a68e6", + "https://esm.sh/ipaddr.js@2.0.0": "4917afe3cce9ad078433493704c3bdba4d9d0f82e93f3b8e10636ddf85ef1b60", + "https://esm.sh/ms@2.1.2": "b7e88b2efb261a3b2cfb2b25b38da16c8921f0c11ea31b8b45224c21a2b67944", + "https://esm.sh/qs@6.9.4": "f217c83ab5b598c184b7d462af46b50bbb4695c60a54af48da371e3af3c7894a", + "https://esm.sh/range-parser@1.2.1": "f6714101f761e6724bdc83ed3b4f79c1f6678265d272c9d9d901e63e1f9c9e89" +} diff --git a/packages/app-opine/dev_deps.js b/packages/app-opine/dev_deps.js new file mode 100644 index 00000000..03921feb --- /dev/null +++ b/packages/app-opine/dev_deps.js @@ -0,0 +1,6 @@ +export { + assert, + assertEquals, + assertObjectMatch, +} from "https://deno.land/std@0.98.0/testing/asserts.ts"; +export { superdeno } from "https://deno.land/x/superdeno@4.2.1/mod.ts"; diff --git a/packages/app-opine/dev_deps_lock.json b/packages/app-opine/dev_deps_lock.json new file mode 100644 index 00000000..3a1ee6bb --- /dev/null +++ b/packages/app-opine/dev_deps_lock.json @@ -0,0 +1,66 @@ +{ + "https://deno.land/std@0.95.0/_util/assert.ts": "2f868145a042a11d5ad0a3c748dcf580add8a0dbc0e876eaa0026303a5488f58", + "https://deno.land/std@0.95.0/async/deferred.ts": "624bef4b755b71394620508a0c234a93cb8020cbd1b04bfcdad41c174392cef6", + "https://deno.land/std@0.95.0/async/delay.ts": "9de1d8d07d1927767ab7f82434b883f3d8294fb19cad819691a2ad81a728cf3d", + "https://deno.land/std@0.95.0/async/mod.ts": "253b41c658d768613eacfb11caa0a9ca7148442f932018a45576f7f27554c853", + "https://deno.land/std@0.95.0/async/mux_async_iterator.ts": "c405c4c1194f7600189aa81dfe243e165c27f36eaea88a301fa2a74ce974548b", + "https://deno.land/std@0.95.0/async/pool.ts": "353ce4f91865da203a097aa6f33de8966340c91b6f4a055611c8c5d534afd12f", + "https://deno.land/std@0.95.0/bytes/mod.ts": "1ae1ccfe98c4b979f12b015982c7444f81fcb921bea7aa215bf37d84f46e1e13", + "https://deno.land/std@0.95.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.95.0/http/_io.ts": "a04ecb9ffef7963d2ccb332b3d5ba1e2efd6ff4d029c3df62833a43058955439", + "https://deno.land/std@0.95.0/http/http_status.ts": "ebaa9bebfb8adc3d7b20c49e11037e4eefd79629ad80d81383933f4cdc91b3eb", + "https://deno.land/std@0.95.0/http/server.ts": "f71555bcf2f8e94bef9617f747d7f2adce883187e197396f83c0975378e84bf5", + "https://deno.land/std@0.95.0/io/buffer.ts": "c4717e49f742a1ddd0dd6f547cdbd9d15f5dac2c86df666579f50e5d090266ce", + "https://deno.land/std@0.95.0/io/bufio.ts": "729ea49afacd27ed0687451c694752dcaa68250871d1c957ca886ef5d82c461f", + "https://deno.land/std@0.95.0/io/util.ts": "843ccf667ff291ba2315c2419a54b4e76dbc7c0f6c3152f640eac4dc954e3753", + "https://deno.land/std@0.95.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.95.0/testing/asserts.ts": "06ca5f20da42af5960bab649010b6e3e3e476f695223bffe11ce7ae80365b262", + "https://deno.land/std@0.95.0/textproto/mod.ts": "1c89b39a079dd158893ab2e9ff79391c66049433d6ca82da7d64b32280570d51", + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c", + "https://deno.land/x/opine@1.3.3/src/methods.ts": "b0d10bca6360c29488a41c95dc2af475e1f96a5a44da1ffcf7e9edacc65a40a9", + "https://deno.land/x/opine@1.3.3/src/utils/mergeDescriptors.ts": "e61d541bb6eeeb651f051accbe935fbbdddcb4c1d382994099b4fddf29d10d02", + "https://deno.land/x/superdeno@4.2.1/deps.ts": "323b3a9c7d7440016489ca59316bd50a42feb4ef7e664f1de57b47b3fb440aee", + "https://deno.land/x/superdeno@4.2.1/mod.ts": "fa91c501867a4302a4bc92d63cbf934fe5475ebb7bf58335338e001147263c87", + "https://deno.land/x/superdeno@4.2.1/src/close.ts": "b662f230cbebf7270669f0de25f07dfb4fac6006041f6b9607fd7eb3a182b274", + "https://deno.land/x/superdeno@4.2.1/src/superagent.ts": "e11ae39779bec9b419ab8aa7aeda410aa16363161723eaed5da60fb64755f03c", + "https://deno.land/x/superdeno@4.2.1/src/superdeno.ts": "1aa855ec1e005d52d07d5df008b3253a5988170d2820da1c943dcd9ebe74458f", + "https://deno.land/x/superdeno@4.2.1/src/test.ts": "91cbf3c91ff4b4b302dc841fc10b2ab013ac1a0603c713e8c77a9596b58150dd", + "https://deno.land/x/superdeno@4.2.1/src/types.ts": "54d30059443f2b9cd6b08722e94b39d5ba98ef392ae0ccbca95bc9793d9867b1", + "https://deno.land/x/superdeno@4.2.1/src/utils.ts": "4cf7b6f88ebe82b5bb7f9e7c022c96fd41c6a99c7f735f55e34c6118441c972b", + "https://deno.land/x/superdeno@4.2.1/src/xhrSham.js": "66b15c3da7c87166cbbb982ba35631c9ab23129bdf9aa5f3c03a855b2d84ae1f", + "https://deno.land/x/superdeno@4.2.1/version.ts": "f6769c2b05ae7fe6e84a6e40daa4cc3c97417812eb824f8e18b2a17264087a33", + "https://jspm.dev/npm:call-bind@1.0.2!cjs": "24c188f9241fbc82c6ac2ae1b63bf3a0574885e3fa739ef09b3a476a3a571634", + "https://jspm.dev/npm:call-bind@1.0.2/callBound!cjs": "a1586c35033398887376229b84f861c77cb686b034ea8a29d0be41bf46623071", + "https://jspm.dev/npm:call-bind@1/callBound!cjs": "3ff4779595db6e915ae7d9926ba3338b8777ea7af06560bd254c84840faab169", + "https://jspm.dev/npm:component-emitter@1!cjs": "26c2994a5fcac1cd9156b00be96c5e2f006dd76338095a96006ac3a47c6c327d", + "https://jspm.dev/npm:component-emitter@1.3.0!cjs": "757cafefb0bf5639f3f90b2267a7d168e03631e731c2a79fca847b735695e196", + "https://jspm.dev/npm:fast-safe-stringify@2!cjs": "8a66c4aded6b83ba61f4b3dfcef6a141d57ae290568370791c1b8a18fadf89c3", + "https://jspm.dev/npm:fast-safe-stringify@2.0.7!cjs": "9b602a8b003511197c41c7b54bbb1de8f9662697fed250bdcca5bb1563d3bc1c", + "https://jspm.dev/npm:function-bind@1!cjs": "c06b352224fa5e98b331aef38c540a409ed7ef1e6515a38f1a737a8e4f42118a", + "https://jspm.dev/npm:function-bind@1.1.1!cjs": "4fb6903c972da5bcebe751a5b498ec4599987fc1e7157c15d08f388f2986c0f6", + "https://jspm.dev/npm:get-intrinsic@1!cjs": "abb5217ca73334e2dbe85ece2b5e28054f09fe68388940d1d17ff84929a7c55b", + "https://jspm.dev/npm:get-intrinsic@1.1.1!cjs": "c775042bc8637b6a65e9aeccfdb34f7cd09df75f30a4aff7c53e600e03df8ee2", + "https://jspm.dev/npm:has-symbols@1!cjs": "a7515aa43edd2be1cd65cadf92b8a400472a082dc844988cfd5e9421d191bfde", + "https://jspm.dev/npm:has-symbols@1.0.2!cjs": "61f8934789661c002e53a0bb7f4ea3d752979ca8240d6e0d782200b55873e1a0", + "https://jspm.dev/npm:has-symbols@1.0.2/shams!cjs": "939ccd9db414160fc19a1ee746d4b42dcb7a4c87034302b6589678e0a17170f4", + "https://jspm.dev/npm:has@1!cjs": "79286edf0358a403240ee04be7814df009af2ed8a0f76e7df54b614da4b0ac95", + "https://jspm.dev/npm:has@1.0.3!cjs": "ecbdcfeef29b4da5a5b3e7e388c08998e1b52e0cca905bd3a06ec457fe0a568a", + "https://jspm.dev/npm:object-inspect@1!cjs": "a1954bac46a49241758575566da5b2a86ce66d9bbf49f7703c156ed9ccafafc0", + "https://jspm.dev/npm:object-inspect@1.10.3!cjs": "c6f599db738b17381158d06ec3f78eda67fa2121d157bddc1ef8eb2a89dbab3f", + "https://jspm.dev/npm:qs@6!cjs": "0cb7206233584f81f332a131df966b0f2529daf731dd2f7c8a49da86adfc15ca", + "https://jspm.dev/npm:qs@6.10.1!cjs": "665d3ff0db409dbf7a88e0552598350821f40e192664ed5719b7566484759ebb", + "https://jspm.dev/npm:qs@6.10.1/_/9b9acba6.js": "990a8f41723853aae58d118ad70fe6e09718d44fb68ed5ee650efd681aa6f5cd", + "https://jspm.dev/npm:qs@6.10.1/lib/parse!cjs": "301878ada2ca7b5c61efd60207e38266594ea1053bb379200a2cc70874d3b188", + "https://jspm.dev/npm:qs@6.10.1/lib/stringify!cjs": "b46e649f2b1ad10993e9600261b3fd19b3b41027bd5af3788b3c72bba5d85e92", + "https://jspm.dev/npm:side-channel@1!cjs": "a07dfe7165af0d7f916d089490c38839397abcd8b218e4566b270858c9a0ea04", + "https://jspm.dev/npm:side-channel@1.0.4!cjs": "db65b31b6f9e67d57f04e26d71eb5b376306f5a89ab46fae1278c3ffefb19663", + "https://jspm.dev/npm:superagent@6.1.0!cjs": "fcf1c0b17cb3ff899b59ae178fc4ab74ad3b592d7fa8b44b16394001758e3176", + "https://jspm.dev/npm:superagent@6.1.0/lib/agent-base!cjs": "cfe465965a55d80114d835143717413945d0bbc46355d0f7f8200a89902ed006", + "https://jspm.dev/npm:superagent@6.1.0/lib/is-object!cjs": "95f67ff49b42fd5e82114b9d54a4b3fe1ac98813aed7ceaf53d314983f59820a", + "https://jspm.dev/npm:superagent@6.1.0/lib/request-base!cjs": "e361c341aa75d7417c918bc8fb697d0ccf96101e039dd2f00e5e45c01c534caa", + "https://jspm.dev/npm:superagent@6.1.0/lib/response-base!cjs": "00ac549f34d73c2753caa798aa7eb781051179013e3418ff0868a1e1904a8913", + "https://jspm.dev/npm:superagent@6.1.0/lib/utils!cjs": "ea706523553983c96ef4ab2f191c61c53fb8b78ad8ff2472b48f1385e896c030", + "https://jspm.dev/superagent@6.1.0": "4b3082d71252c42abd3930d85d1f3c4b2e937e0fab2b5f1c9d19eac20dea89a9" +} diff --git a/packages/app-opine/mod.js b/packages/app-opine/mod.js new file mode 100644 index 00000000..7f6c5bf6 --- /dev/null +++ b/packages/app-opine/mod.js @@ -0,0 +1,186 @@ +import { + cors, + exists, + helmet, + json, + MultipartReader, + opine, + R, +} from "./deps.js"; + +import * as cache from "./api/cache.js"; +import * as data from "./api/data.js"; +import * as storage from "./api/storage.js"; +import * as search from "./api/search.js"; +import * as queue from "./api/queue.js"; + +const { compose } = R; + +const TMP_DIR = "/tmp/hyper/uploads"; + +// opine app +export default function (services) { + // TODO: Maybe refine this and make this a lib? + // Upload middleware for handling multipart/formdata ie. files + const upload = (fieldName = "file") => + async (req, _res, next) => { + let boundary; + + const contentType = req.get("content-type"); + if (contentType.startsWith("multipart/form-data")) { + boundary = contentType.split(";")[1].split("=")[1]; + } + + // Ensure tmp dir exists. Otherwise MultipartReader throws error when reading form data + if (!(await exists(TMP_DIR))) { + await Deno.mkdir(TMP_DIR, { recursive: true }); + } + + const form = await new MultipartReader(req.body, boundary).readForm({ + maxMemory: 10 << 20, + dir: TMP_DIR, + }); + + // emulate multer + req.file = form.file(fieldName); + + next(); + }; + + let app = opine(); + // enable extensibility to allow + // middleware + app = services.middleware.length > 0 + ? compose(...services.middleware)(app) + : app; + + const port = Deno.env.get("PORT") || 6363; + + // middleware to inject core modules into request object + const bindCore = (req, _res, next) => { + req.cache = services.cache; + req.data = services.data; + req.storage = services.storage; + req.search = services.search; + req.events = services.events; + req.hooks = services.hooks; + req.queue = services.queue; + next(); + }; + + /** + * Workaround for: + * https://github.com/asos-craigmorten/opine/issues/126 + * https://github.com/asos-craigmorten/opine/issues/128 + * + * waiting until released on Deno land + */ + app.use((_req, res, next) => { + if (typeof res.setHeader !== "function") { + res.setHeader = res.set.bind(res); + } + + if (typeof res.removeHeader !== "function") { + res.removeHeader = (name) => res.headers && res.headers.delete(name); + } + next(); + }); + + app.use(helmet()); + app.use(cors({ credentials: true })); + // data api + app.get("/data", data.index); + app.put("/data/:db", bindCore, data.createDb); + app.delete("/data/:db", bindCore, data.removeDb); + app.get("/data/:db", bindCore, data.listDocuments); + app.post("/data/:db", json({ limit: "8mb" }), bindCore, data.createDocument); + app.get("/data/:db/:id", bindCore, data.getDocument); + app.put( + "/data/:db/:id", + json({ limit: "8mb" }), + bindCore, + data.updateDocument, + ); + app.delete("/data/:db/:id", bindCore, data.deleteDocument); + app.post("/data/:db/_query", json(), bindCore, data.queryDb); + app.post("/data/:db/_index", json(), bindCore, data.indexDb); + app.post("/data/:db/_bulk", json(), bindCore, data.bulk); + + // cache api + app.get("/cache", bindCore, cache.index); + app.put("/cache/:name", bindCore, cache.createStore); + app.delete("/cache/:name", bindCore, cache.deleteStore); + app.get("/cache/:name/_query", bindCore, cache.queryStore); + app.post("/cache/:name/_query", bindCore, cache.queryStore); + app.post("/cache/:name", json(), bindCore, cache.createDocument); + app.get("/cache/:name/:key", bindCore, cache.getDocument); + app.put("/cache/:name/:key", json(), bindCore, cache.updateDocument); + app.delete("/cache/:name/:key", bindCore, cache.deleteDocument); + + // storage api + app.get("/storage", storage.index); + app.put("/storage/:name", bindCore, storage.makeBucket); + app.delete("/storage/:name", bindCore, storage.removeBucket); + app.post("/storage/:name", upload("file"), bindCore, storage.putObject); + app.get("/storage/:name/*", bindCore, storage.getObject); + app.delete("/storage/:name/*", bindCore, storage.removeObject); + + // search api + app.get("/search", search.index); + app.put("/search/:index", json(), bindCore, search.createIndex); + app.delete("/search/:index", bindCore, search.deleteIndex); + app.post("/search/:index", json(), bindCore, search.indexDoc); + app.get("/search/:index/:key", bindCore, search.getDoc); + app.put("/search/:index/:key", json(), bindCore, search.updateDoc); + app.delete("/search/:index/:key", bindCore, search.removeDoc); + app.post("/search/:index/_query", json(), bindCore, search.query); + app.post("/search/:index/_bulk", json(), bindCore, search.bulk); + + // queue api + app.get("/queue", bindCore, queue.index); + app.put("/queue/:name", json(), bindCore, queue.create); + app.delete("/queue/:name", bindCore, queue.del); + app.post("/queue/:name", json(), bindCore, queue.post); + app.get("/queue/:name", bindCore, queue.list); + app.post("/queue/:name/_cancel", bindCore, queue.cancel); + + app.get("/error", (_req, _res, next) => { + console.log("oooooo"); + next(new Error("Error occuried")); + }); + + app.get("/", (_req, res) => { + res.send({ + name: "hyper63", + version: "unstable", + services: Object + .keys(services) + .filter((k) => k !== "events") + .filter((k) => k !== "middleware") + .filter((k) => k !== "hooks") + .filter((k) => services[k] !== null), + }); + }); + + // TODO: Tyler. Add a favicon? + app.get("/favicon.ico", (_req, res) => res.sendStatus(204)); + + // All of these args need to be specified, or it won't be invoked on error + app.use((err, _req, res, _next) => { + if (err) { + console.log(JSON.stringify({ + type: "ERROR", + date: new Date().toISOString(), + payload: err.message, + })); + res.setStatus(500).json({ ok: false, msg: err.message }); + } + }); + + if (Deno.env.get("DENO_ENV") !== "test") { + app.listen(port); + console.log("hyper63 service listening on port ", port); + } + + return app; +} diff --git a/packages/app-opine/test/data-bulk_test.js b/packages/app-opine/test/data-bulk_test.js new file mode 100644 index 00000000..40a536eb --- /dev/null +++ b/packages/app-opine/test/data-bulk_test.js @@ -0,0 +1,23 @@ +// TODO: Tyler. Probably better way to do this +import { crocks } from "../deps.js"; +import { assertEquals, superdeno } from "../dev_deps.js"; + +import build from "../mod.js"; + +Deno.env.set("DENO_ENV", "test"); + +const app = build({ + data: { + bulkDocuments: () => crocks.Async.Resolved({ ok: true, results: [] }), + }, + middleware: [], +}); + +Deno.test("POST /data/movies/_bulk", async () => { + const res = await superdeno(app) + .post("/data/movies/_bulk") + .set("Content-Type", "application/json") + .send([{ id: "1", type: "movie" }]); + + assertEquals(res.body.ok, true); +}); diff --git a/packages/app-opine/test/error_test.js b/packages/app-opine/test/error_test.js new file mode 100644 index 00000000..1728cba7 --- /dev/null +++ b/packages/app-opine/test/error_test.js @@ -0,0 +1,17 @@ +// TODO: Tyler. Probably better way to do this +import { assertEquals, superdeno } from "../dev_deps.js"; + +import build from "../mod.js"; + +Deno.env.set("DENO_ENV", "test"); + +const app = build({ + middleware: [], +}); + +Deno.test("GET /error", async () => { + const res = await superdeno(app) + .get("/error"); + + assertEquals(res.body.ok, false); +}); diff --git a/packages/app-opine/test/mod_test.js b/packages/app-opine/test/mod_test.js new file mode 100644 index 00000000..ab4a27ff --- /dev/null +++ b/packages/app-opine/test/mod_test.js @@ -0,0 +1,18 @@ +// TODO: Tyler. Probably better way to do this +import { assertEquals, superdeno } from "../dev_deps.js"; + +import build from "../mod.js"; + +Deno.env.set("DENO_ENV", "test"); + +const app = build({ + middleware: [], +}); + +Deno.test("GET /", async () => { + const res = await superdeno(app) + .get("/") + .expect(200); + + assertEquals(res.body.name, "hyper63"); +}); diff --git a/packages/app-opine/utils.js b/packages/app-opine/utils.js new file mode 100644 index 00000000..38302ac4 --- /dev/null +++ b/packages/app-opine/utils.js @@ -0,0 +1,13 @@ +export const fork = (res, code, m) => + m.fork( + (error) => { + if (error.status) { + return res.setStatus(error.status).send({ + ok: false, + msg: error.message, + }); + } + res.setStatus(500).send(error); + }, + (result) => res.setStatus(code).send(result), + ); diff --git a/packages/core/README.md b/packages/core/README.md index 2ae647ef..1e281a20 100644 --- a/packages/core/README.md +++ b/packages/core/README.md @@ -1,7 +1,12 @@ -# hyper63 Core Package +# hyper core -The core package handles all of the business logic for the hyper63 -service framework. Every implementation of hyper63 should require -the core package. +The core package handles all of the business logic for the hyper63 service +framework. Every implementation of hyper63 should require the core package. The core package validates the config schema and plugin schemas + +## Testing + +``` +./scripts/test.sh +``` diff --git a/packages/core/deps.js b/packages/core/deps.js new file mode 100644 index 00000000..07b1c41e --- /dev/null +++ b/packages/core/deps.js @@ -0,0 +1,14 @@ +export * as R from "https://cdn.skypack.dev/ramda@^0.27.1"; +export { default as crocks } from "https://cdn.skypack.dev/crocks@^0.12.4"; +export * as z from "https://cdn.skypack.dev/zod@3.1.0"; +export * as ms from "https://cdn.skypack.dev/ms@2.1.3"; +export { cuid } from "https://deno.land/x/cuid@v1.0.0/index.js"; + +/* +export cache from '../port-cache/mod.js' +export data from '../port-data/mod.js' +export storage from '../port-storage/mod.js' +export search from '../port-search/mod.js' +export hooks from '../port-hooks/mod.js' +//export queue from '../port-queue/mod.js' +*/ diff --git a/packages/core/deps_lock.json b/packages/core/deps_lock.json new file mode 100644 index 00000000..b9925d2d --- /dev/null +++ b/packages/core/deps_lock.json @@ -0,0 +1,14 @@ +{ + "https://cdn.skypack.dev/-/crocks@v0.12.4-Mje8nEhNx2rmIpwz3ROp/dist=es2020,mode=imports/optimized/crocks.js": "93d587d18dc5f124f30e5b38de37a6471eb65309c94ef2ffc7a36dc40ab394da", + "https://cdn.skypack.dev/-/ms@v2.1.3-dWyghJmVCt8Lp5D9h2ww/dist=es2020,mode=imports/optimized/ms.js": "fd88e2d51900437011f1ad232f3393ce97db1b87a7844b3c58dd6d65562c1276", + "https://cdn.skypack.dev/-/ramda@v0.27.1-3ePaNsppsnXYRcaNcaWn/dist=es2020,mode=imports/optimized/ramda.js": "0e51cd76039fc9669f9179d935b261bcbb19ecbb11d49e7e60cfbc14360a85d2", + "https://cdn.skypack.dev/-/zod@v3.1.0-Uo2Qq76nDKBkmcT9IlPZ/dist=es2020,mode=imports/optimized/zod.js": "2dffcb4bf919f89e183c2ab0c8595822c1454af1676e504ec36bde1ac72b2bc1", + "https://cdn.skypack.dev/crocks@^0.12.4": "d48852ce36c500f2770a2bc240cb6df9ffb2219d184b32b9be542e8560a6ff1d", + "https://cdn.skypack.dev/ms@2.1.3": "505c67d3120ebdefdcf30bba02e40322eb23bbf52641315611812cc98efeb7dc", + "https://cdn.skypack.dev/ramda@^0.27.1": "fb06d7de4305dcdb997f9adc903a463afcdac75d148d638295c420a8922048fd", + "https://cdn.skypack.dev/zod@3.1.0": "9062f35d0134abac378aef1e04fd4ce0fb44af185c8aaa29633c641f5802f725", + "https://deno.land/x/cuid@v1.0.0/fingerprint.js": "34d45895441ad08a1ce2c76da4b72da24c00b68346e9bb3c8dfd10eb5774d74e", + "https://deno.land/x/cuid@v1.0.0/getRandomValue.js": "52545634d1e62836ed44aeea057caa2d054ea979090049ea8facb2ececa23385", + "https://deno.land/x/cuid@v1.0.0/index.js": "7313248002e361977e2801d0cbfd6b7d20926dd59c880f1698672a4fdefe999e", + "https://deno.land/x/cuid@v1.0.0/pad.js": "11c84744f110744659e8a61f991a1223b917786aaddc928970f85027e1fe1a12" +} diff --git a/packages/core/dev_deps.js b/packages/core/dev_deps.js new file mode 100644 index 00000000..6a43158e --- /dev/null +++ b/packages/core/dev_deps.js @@ -0,0 +1,4 @@ +export { + assertEquals, + assertObjectMatch, +} from "https://deno.land/std@0.98.0/testing/asserts.ts"; diff --git a/packages/core/dev_deps_lock.json b/packages/core/dev_deps_lock.json new file mode 100644 index 00000000..7cf4ec42 --- /dev/null +++ b/packages/core/dev_deps_lock.json @@ -0,0 +1,5 @@ +{ + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c" +} diff --git a/packages/core/lib/cache/doc.js b/packages/core/lib/cache/doc.js index 40e61a8b..ff896ede 100644 --- a/packages/core/lib/cache/doc.js +++ b/packages/core/lib/cache/doc.js @@ -1,12 +1,16 @@ -const ms = require('ms') -const { compose, identity, ifElse, isNil, lensProp, prop, over, omit } = require('ramda') -const { is, of, apply, triggerEvent } = require('../utils') -const { hasProp } = require('crocks/predicates') +import { crocks, ms, R } from "../../deps.js"; +import { apply, is, of, triggerEvent } from "../utils/mod.js"; -const INVALID_KEY = 'key is not valid' -const INVALID_RESULT = 'result is not valid' -const convertTTL = over(lensProp('ttl'), (ttl) => (ttl ? String(ms(ttl)) : null)) -const removeTTL = ifElse(compose(isNil, prop('ttl')), omit(['ttl']), identity) +const { compose, identity, ifElse, isNil, lensProp, prop, over, omit } = R; +const { hasProp } = crocks; + +const INVALID_KEY = "key is not valid"; +const INVALID_RESULT = "result is not valid"; +const convertTTL = over( + lensProp("ttl"), + (ttl) => (ttl ? String(ms(ttl)) : null), +); +const removeTTL = ifElse(compose(isNil, prop("ttl")), omit(["ttl"]), identity); /** * @param {string} store @@ -15,26 +19,26 @@ const removeTTL = ifElse(compose(isNil, prop('ttl')), omit(['ttl']), identity) * @param {string} ttl * @returns {AsyncReader} */ -const create = (store, key, value, ttl) => +export const create = (store, key, value, ttl) => of({ store, key, value, ttl }) .map(convertTTL) .map(removeTTL) .chain(is(validKey, INVALID_KEY)) - .chain(apply('createDoc')) - .chain(triggerEvent('CACHE:CREATE')) - .chain(is(validResult, INVALID_RESULT)) + .chain(apply("createDoc")) + .chain(triggerEvent("CACHE:CREATE")) + .chain(is(validResult, INVALID_RESULT)); /** * @param {string} store * @param {string} key * @returns {AsyncReader} */ -const get = (store, key) => +export const get = (store, key) => of({ store, key }) .chain(is(validKey, INVALID_KEY)) - .chain(apply('getDoc')) - .chain(triggerEvent('CACHE:GET')) - // .chain(is(validResult, INVALID_RESULT)); + .chain(apply("getDoc")) + .chain(triggerEvent("CACHE:GET")); +// .chain(is(validResult, INVALID_RESULT)); /** * @param {string} store @@ -43,42 +47,36 @@ const get = (store, key) => * @param {string} ttl * @returns {AsyncReader} */ -const update = (store, key, value, ttl) => +export const update = (store, key, value, ttl) => of({ store, key, value, ttl }) .map(convertTTL) .map(removeTTL) .chain(is(validKey, INVALID_KEY)) - .chain(apply('updateDoc')) - .chain(triggerEvent('CACHE:UPDATE')) - .chain(is(validResult, INVALID_RESULT)) + .chain(apply("updateDoc")) + .chain(triggerEvent("CACHE:UPDATE")) + .chain(is(validResult, INVALID_RESULT)); /** * @param {string} store * @param {string} key * @returns {AsyncReader} */ -const del = (store, key) => +export const del = (store, key) => of({ store, key }) .chain(is(validKey, INVALID_KEY)) - .chain(apply('deleteDoc')) - .chain(triggerEvent('CACHE:DELETE')) - .chain(is(validResult, INVALID_RESULT)) + .chain(apply("deleteDoc")) + .chain(triggerEvent("CACHE:DELETE")) + .chain(is(validResult, INVALID_RESULT)); -module.exports = { - create, - get, - update, - del -} // validators predicate functions -function validKey (doc) { - return /^[a-z0-9-]+$/.test(doc.key) +function validKey(doc) { + return /^[a-z0-9-]+$/.test(doc.key); } -function validResult (result) { - if (result && hasProp('ok', result)) { - return true +function validResult(result) { + if (result && hasProp("ok", result)) { + return true; } - console.log({ result }) - return false + console.log({ result }); + return false; } diff --git a/packages/core/lib/cache/doc_test.js b/packages/core/lib/cache/doc_test.js index a59c6e0b..0c3625a6 100644 --- a/packages/core/lib/cache/doc_test.js +++ b/packages/core/lib/cache/doc_test.js @@ -1,56 +1,69 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ +// deno-lint-ignore-file no-unused-vars +import { assertEquals } from "../../dev_deps.js"; +import * as doc from "./doc.js"; -const test = require('tape') -const doc = require('./doc.js') +const test = Deno.test; const mockService = { createDoc: ({ store, key, doc, ttl }) => Promise.resolve({ ok: true }), - getDoc: ({ store, key }) => Promise.resolve({ hello: 'world' }), + getDoc: ({ store, key }) => Promise.resolve({ hello: "world" }), updateDoc: ({ store, key, doc }) => Promise.resolve({ ok: true }), - deleteDoc: ({ store, key }) => Promise.resolve({ ok: true }) -} - -const fork = (m) => (t) => { - t.plan(1) - m.fork( - e => { - console.log(e) - t.ok(false) - }, - () => t.ok(true) - ) -} + deleteDoc: ({ store, key }) => Promise.resolve({ ok: true }), +}; + +const fork = (m) => + () => { + m.fork( + (e) => { + console.log(e); + assertEquals(false, true); + }, + () => assertEquals(true, true), + ); + }; const events = { - dispatch: () => null -} + dispatch: () => null, +}; test( - 'create cache doc', - fork(doc.create('store', 'key', { hello: 'world' }).runWith({ svc: mockService, events })) -) + "create cache doc", + fork( + doc.create("store", "key", { hello: "world" }).runWith({ + svc: mockService, + events, + }), + ), +); test( - 'cannot create cache doc with invalid key', - t => { - t.plan(1) - doc.create('store', 'Not_Valid', { beep: 'boop' }) + "cannot create cache doc with invalid key", + () => { + doc.create("store", "Not_Valid", { beep: "boop" }) .runWith({ svc: mockService, events }) .fork( - () => t.ok(true), - () => t.ok(false) - ) - } -) + () => assertEquals(true, true), + () => assertEquals(false, true), + ); + }, +); -test('get cache doc', fork(doc.get('store', 'key-1234').runWith({ svc: mockService, events }))) +test( + "get cache doc", + fork(doc.get("store", "key-1234").runWith({ svc: mockService, events })), +); test( - 'update cache document', - fork(doc.update('store', 'key-1234', { foo: 'bar' }).runWith({ svc: mockService, events })) -) + "update cache document", + fork( + doc.update("store", "key-1234", { foo: "bar" }).runWith({ + svc: mockService, + events, + }), + ), +); test( - 'delete cache document', - fork(doc.update('store', 'key-1234').runWith({ svc: mockService, events })) -) + "delete cache document", + fork(doc.update("store", "key-1234").runWith({ svc: mockService, events })), +); diff --git a/packages/core/lib/cache/index.js b/packages/core/lib/cache/mod.js similarity index 70% rename from packages/core/lib/cache/index.js rename to packages/core/lib/cache/mod.js index 13bcf725..73cbf154 100644 --- a/packages/core/lib/cache/index.js +++ b/packages/core/lib/cache/mod.js @@ -1,21 +1,22 @@ -const store = require('./store.js') -const doc = require('./doc.js') +import * as store from "./store.js"; +import * as doc from "./doc.js"; -module.exports = ({ cache, events }) => { - const index = () => store.index().runWith({ svc: cache, events }) +export default function ({ cache, events }) { + const index = () => store.index().runWith({ svc: cache, events }); /** * @param {string} name * @returns {Async} */ - const createStore = (name) => store.create(name).runWith({ svc: cache, events }) + const createStore = (name) => + store.create(name).runWith({ svc: cache, events }); /** * * @param {string} name * @returns {Async} */ - const deleteStore = (name) => store.del(name).runWith({ svc: cache, events }) + const deleteStore = (name) => store.del(name).runWith({ svc: cache, events }); /** * @param {string} store @@ -25,7 +26,7 @@ module.exports = ({ cache, events }) => { * @returns {Async} */ const createDoc = (store, key, value, ttl) => - doc.create(store, key, value, ttl).runWith({ svc: cache, events }) + doc.create(store, key, value, ttl).runWith({ svc: cache, events }); /** * @param {string} store @@ -34,21 +35,23 @@ module.exports = ({ cache, events }) => { * @returns {Async} */ const updateDoc = (store, key, value, ttl) => - doc.update(store, key, value, ttl).runWith({ svc: cache, events }) + doc.update(store, key, value, ttl).runWith({ svc: cache, events }); /** * @param {string} store * @param {string} key * @returns {Async} */ - const getDoc = (store, key) => doc.get(store, key).runWith({ svc: cache, events }) + const getDoc = (store, key) => + doc.get(store, key).runWith({ svc: cache, events }); /** * @param {string} name * @param {string} key * @returns {Async} */ - const deleteDoc = (store, key) => doc.del(store, key).runWith({ svc: cache, events }) + const deleteDoc = (store, key) => + doc.del(store, key).runWith({ svc: cache, events }); /** * @param {string} name @@ -56,7 +59,7 @@ module.exports = ({ cache, events }) => { * @returns {Async} */ const queryStore = (name, pattern) => - store.query(name, pattern).runWith({ svc: cache, events }) + store.query(name, pattern).runWith({ svc: cache, events }); return Object.freeze({ index, @@ -66,6 +69,6 @@ module.exports = ({ cache, events }) => { updateDoc, getDoc, deleteDoc, - queryStore - }) + queryStore, + }); } diff --git a/packages/core/lib/cache/store.js b/packages/core/lib/cache/store.js index 70e4a768..72372de1 100644 --- a/packages/core/lib/cache/store.js +++ b/packages/core/lib/cache/store.js @@ -1,47 +1,48 @@ -const { is, of, apply, triggerEvent } = require('../utils') -const { toLower } = require('ramda') +import { apply, is, of, triggerEvent } from "../utils/mod.js"; +import { R } from "../../deps.js"; -const INVALID_NAME_MSG = 'name is not valid' -const INVALID_RESULT_MSG = 'result is not valid' +const { toLower } = R; -exports.index = () => - apply('index')().chain(triggerEvent('CACHE:INDEX')) +const INVALID_NAME_MSG = "name is not valid"; +const INVALID_RESULT_MSG = "result is not valid"; + +export const index = () => apply("index")().chain(triggerEvent("CACHE:INDEX")); /** * @param {string} name * @returns {AsyncReader} */ -exports.create = (name) => +export const create = (name) => of(name) .map(toLower) .chain(is(validName, INVALID_NAME_MSG)) - .chain(apply('createStore')) - .chain(triggerEvent('CACHE:CREATE_STORE')) - .chain(is(validResult, INVALID_RESULT_MSG)) + .chain(apply("createStore")) + .chain(triggerEvent("CACHE:CREATE_STORE")) + .chain(is(validResult, INVALID_RESULT_MSG)); /** * @param {string} name * @returns {AsyncReader} */ -exports.del = (name) => +export const del = (name) => of(name) .chain(is(validName, INVALID_NAME_MSG)) - .chain(apply('destroyStore')) - .chain(triggerEvent('CACHE:DELETE_STORE')) - .chain(is(validResult, INVALID_RESULT_MSG)) + .chain(apply("destroyStore")) + .chain(triggerEvent("CACHE:DELETE_STORE")) + .chain(is(validResult, INVALID_RESULT_MSG)); /** * @param {string} name * @param {string} pattern * @returns {AsyncReader} */ -exports.query = (name, pattern) => +export const query = (name, pattern) => of(name) .chain(is(validName, INVALID_NAME_MSG)) .map((name) => ({ store: name, pattern })) - .chain(apply('listDocs')) - .chain(triggerEvent('CACHE:LIST')) - .chain(is(validResult, INVALID_RESULT_MSG)) + .chain(apply("listDocs")) + .chain(triggerEvent("CACHE:LIST")) + .chain(is(validResult, INVALID_RESULT_MSG)); // validators predicate functions @@ -49,19 +50,19 @@ exports.query = (name, pattern) => * @param {string} name * @returns {boolean} */ -function validName (name) { +function validName(name) { // verify that the name does not contains spaces // verify that the name does not contain slashes // verify that the name contains URI friendly characters // should return a true or false - return /^[a-z0-9-]+$/.test(name) + return /^[a-z0-9-]+$/.test(name); } /** * @param {object} result * @returns {boolean} */ -function validResult () { +function validResult() { // return Left({ ok: false, msg: 'result is invalid'}) - return true + return true; } diff --git a/packages/core/lib/cache/store_test.js b/packages/core/lib/cache/store_test.js index 9832424b..e1f6d14a 100644 --- a/packages/core/lib/cache/store_test.js +++ b/packages/core/lib/cache/store_test.js @@ -1,66 +1,69 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ +// deno-lint-ignore-file no-unused-vars +import { assertEquals } from "../../dev_deps.js"; -const test = require('tape') -const store = require('./store.js') +import * as store from "./store.js"; + +const test = Deno.test; const mockService = { createStore: (name) => Promise.resolve({ - ok: true + ok: true, }), destroyStore: (name) => Promise.resolve({ ok: true }), - listDocs: (name) => Promise.resolve({ ok: true }) -} + listDocs: (name) => Promise.resolve({ ok: true }), +}; const events = { - dispatch: () => null -} - -test('create cache store', (t) => { - t.plan(1) + dispatch: () => null, +}; - function handleError () { - t.ok(false) +test("create cache store", () => { + function handleError() { + assertEquals(false, true); } - function handleSuccess () { - t.ok(true) + function handleSuccess() { + assertEquals(true, true); } - store.create('hello').runWith({ svc: mockService, events }).fork(handleError, handleSuccess) -}) - -test('should not create store', t => { - t.plan(1) + store.create("hello").runWith({ svc: mockService, events }).fork( + handleError, + handleSuccess, + ); +}); - store.create('_foo').runWith({ svc: mockService, events }) +test("should not create store", () => { + store.create("_foo").runWith({ svc: mockService, events }) .fork( - () => t.ok(true), - () => t.ok(false) - ) -}) + () => assertEquals(true, true), + () => assertEquals(false, true), + ); +}); -test('destroy cache store', (t) => { - t.plan(1) - - function handleError () { - t.ok(false) +test("destroy cache store", () => { + function handleError() { + assertEquals(false, true); } - function handleSuccess () { - t.ok(true) + function handleSuccess() { + assertEquals(true, true); } - store.del('hello').runWith({ svc: mockService, events }).fork(handleError, handleSuccess) -}) - -test('query cache store', (t) => { - t.plan(1) + store.del("hello").runWith({ svc: mockService, events }).fork( + handleError, + handleSuccess, + ); +}); - function handleError () { - t.ok(false) +test("query cache store", () => { + function handleError() { + assertEquals(false, true); } - function handleSuccess () { - t.ok(true) + function handleSuccess() { + assertEquals(true, true); } - store.query('hello').runWith({ svc: mockService, events }).fork(handleError, handleSuccess) -}) + store.query("hello").runWith({ svc: mockService, events }).fork( + handleError, + handleSuccess, + ); +}); diff --git a/packages/core/lib/data/db.js b/packages/core/lib/data/db.js index 06f49901..8b76a026 100644 --- a/packages/core/lib/data/db.js +++ b/packages/core/lib/data/db.js @@ -1,45 +1,45 @@ -const { is, of, apply, triggerEvent } = require('../utils') +import { apply, is, of, triggerEvent } from "../utils/mod.js"; -const INVALID_DB_MSG = 'database name is not valid' -const INVALID_RESPONSE = 'response is not valid' +const INVALID_DB_MSG = "database name is not valid"; +const INVALID_RESPONSE = "response is not valid"; -exports.create = (name) => +export const create = (name) => of(name) .chain(is(validDbName, INVALID_DB_MSG)) - .chain(apply('createDatabase')) - .chain(triggerEvent('DATA:CREATE_DB')) - .chain(is(validResponse, INVALID_RESPONSE)) + .chain(apply("createDatabase")) + .chain(triggerEvent("DATA:CREATE_DB")) + .chain(is(validResponse, INVALID_RESPONSE)); -exports.remove = (name) => +export const remove = (name) => of(name) .chain(is(validDbName, INVALID_DB_MSG)) - .chain(triggerEvent('DATA:DELETE_DB')) - .chain(apply('removeDatabase')) + .chain(triggerEvent("DATA:DELETE_DB")) + .chain(apply("removeDatabase")); -exports.query = (db, query) => +export const query = (db, query) => of({ db, query }) - .chain(apply('queryDocuments')) - .chain(triggerEvent('DATA:QUERY')) + .chain(apply("queryDocuments")) + .chain(triggerEvent("DATA:QUERY")); -exports.index = (db, name, fields) => +export const index = (db, name, fields) => of({ db, name, fields }) - .chain(apply('indexDocuments')) - .chain(triggerEvent('DATA:INDEX')) + .chain(apply("indexDocuments")) + .chain(triggerEvent("DATA:INDEX")); -exports.list = (db, options) => +export const list = (db, options) => of({ db, ...options }) - .chain(apply('listDocuments')) - .chain(triggerEvent('DATA:LIST')) + .chain(apply("listDocuments")) + .chain(triggerEvent("DATA:LIST")); -exports.bulk = (db, docs) => +export const bulk = (db, docs) => of({ db, docs }) - .chain(apply('bulkDocuments')) - .chain(triggerEvent('DATA:BULK')) + .chain(apply("bulkDocuments")) + .chain(triggerEvent("DATA:BULK")); -function validDbName () { - return true +function validDbName() { + return true; } -function validResponse () { - return true +function validResponse() { + return true; } diff --git a/packages/core/lib/data/db_test.js b/packages/core/lib/data/db_test.js index b7de3730..fe418879 100644 --- a/packages/core/lib/data/db_test.js +++ b/packages/core/lib/data/db_test.js @@ -1,49 +1,63 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ - -const test = require('tape') -const db = require('./db.js') +// deno-lint-ignore-file no-unused-vars +import { assertEquals } from "../../dev_deps.js"; +import * as db from "./db.js"; +const test = Deno.test; const mockDb = { - createDatabase (name) { - return Promise.resolve({ ok: true }) + createDatabase(name) { + return Promise.resolve({ ok: true }); }, - removeDatabase (name) { - return Promise.resolve({ ok: true }) + removeDatabase(name) { + return Promise.resolve({ ok: true }); }, - bulkDocuments ({ db, docs }) { + bulkDocuments({ db, docs }) { if (docs.length === 2) { return Promise.resolve({ ok: true, - results: [{ ok: true, id: '1' }, { ok: true, id: '2' }] - }) + results: [{ ok: true, id: "1" }, { ok: true, id: "2" }], + }); } else { - return Promise.reject({ ok: false }) + return Promise.reject({ ok: false }); } - } -} + }, +}; -const fork = (m) => (t) => { - t.plan(1) - return m.fork( - () => t.ok(false), - () => t.ok(true) - ) -} -const handleFail = (m) => (t) => { - t.plan(1) - return m.fork( - () => t.ok(true), - () => t.ok(false) - ) -} +const fork = (m) => + () => { + return m.fork( + () => assertEquals(false, true), + () => assertEquals(true, true), + ); + }; +const handleFail = (m) => + () => { + return m.fork( + () => assertEquals(true, true), + () => assertEquals(false, true), + ); + }; const events = { - dispatch: () => null -} + dispatch: () => null, +}; -test('create database', fork(db.create('foo').runWith({ svc: mockDb, events }))) -test('remove database', fork(db.remove('foo').runWith({ svc: mockDb, events }))) -test('bulk documents', fork(db.bulk('foo', [{ id: '1' }, { id: '2' }]).runWith({ svc: mockDb, events }))) -test('bulk docs failure', handleFail(db.bulk('foo', []).runWith({ svc: mockDb, events }))) +test( + "create database", + fork(db.create("foo").runWith({ svc: mockDb, events })), +); +test( + "remove database", + fork(db.remove("foo").runWith({ svc: mockDb, events })), +); +test( + "bulk documents", + fork( + db.bulk("foo", [{ id: "1" }, { id: "2" }]).runWith({ svc: mockDb, events }), + ), +); +test( + "bulk docs failure", + handleFail(db.bulk("foo", []).runWith({ svc: mockDb, events })), +); // test("query database"); // test("index database"); diff --git a/packages/core/lib/data/doc.js b/packages/core/lib/data/doc.js index cdfcd83e..3257957c 100644 --- a/packages/core/lib/data/doc.js +++ b/packages/core/lib/data/doc.js @@ -1,32 +1,32 @@ -const { is, of, apply, triggerEvent } = require('../utils') -const cuid = require('cuid') +import { apply, is, of, triggerEvent } from "../utils/mod.js"; +import { cuid } from "../../deps.js"; // const INVALID_ID_MSG = 'doc id is not valid' -const INVALID_RESPONSE = 'response is not valid' +const INVALID_RESPONSE = "response is not valid"; -const createGUID = (id) => (id || cuid()) +const createGUID = (id) => (id || cuid()); -exports.create = (db, doc) => +export const create = (db, doc) => of({ db, id: createGUID(doc.id), doc }) - .chain(apply('createDocument')) - .chain(triggerEvent('DATA:CREATE')) - .chain(is(validResponse, INVALID_RESPONSE)) + .chain(apply("createDocument")) + .chain(triggerEvent("DATA:CREATE")) + .chain(is(validResponse, INVALID_RESPONSE)); -exports.get = (db, id) => +export const get = (db, id) => of({ db, id }) - .chain(apply('retrieveDocument')) - .chain(triggerEvent('DATA:GET')) + .chain(apply("retrieveDocument")) + .chain(triggerEvent("DATA:GET")); -exports.update = (db, id, doc) => +export const update = (db, id, doc) => of({ db, id, doc }) - .chain(apply('updateDocument')) - .chain(triggerEvent('DATA:UPDATE')) + .chain(apply("updateDocument")) + .chain(triggerEvent("DATA:UPDATE")); -exports.remove = (db, id) => +export const remove = (db, id) => of({ db, id }) - .chain(apply('removeDocument')) - .chain(triggerEvent('DATA:DELETE')) + .chain(apply("removeDocument")) + .chain(triggerEvent("DATA:DELETE")); -function validResponse () { - return true +function validResponse() { + return true; } diff --git a/packages/core/lib/data/doc_test.js b/packages/core/lib/data/doc_test.js index c5f7e76b..d02f1ce0 100644 --- a/packages/core/lib/data/doc_test.js +++ b/packages/core/lib/data/doc_test.js @@ -1,42 +1,50 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ - -const test = require('tape') -const doc = require('./doc') +// deno-lint-ignore-file no-unused-vars +import { assertEquals } from "../../dev_deps.js"; +import * as doc from "./doc.js"; +const test = Deno.test; const mock = { - createDocument ({ db, id, doc }) { - return Promise.resolve({ ok: true }) + createDocument({ db, id, doc }) { + return Promise.resolve({ ok: true }); + }, + retrieveDocument({ db, id }) { + return Promise.resolve({ ok: true }); }, - retrieveDocument ({ db, id }) { - return Promise.resolve({ ok: true }) + updateDocument({ db, id, doc }) { + return Promise.resolve({ ok: true }); }, - updateDocument ({ db, id, doc }) { - return Promise.resolve({ ok: true }) + removeDocument({ db, id }) { + return Promise.resolve({ ok: true }); }, - removeDocument ({ db, id }) { - return Promise.resolve({ ok: true }) - } -} +}; -const fork = (m) => (t) => { - t.plan(1) - return m.fork( - () => t.ok(false), - () => t.ok(true) - ) -} +const fork = (m) => + () => { + return m.fork( + () => assertEquals(false, true), + () => assertEquals(true, true), + ); + }; const events = { - dispatch: () => null -} + dispatch: () => null, +}; test( - 'create document', - fork(doc.create('foo', { hello: 'world' }).runWith({ svc: mock, events })) -) -test('get document', fork(doc.get('foo', '1').runWith({ svc: mock, events }))) + "create document", + fork(doc.create("foo", { hello: "world" }).runWith({ svc: mock, events })), +); +test("get document", fork(doc.get("foo", "1").runWith({ svc: mock, events }))); +test( + "update document", + fork( + doc.update("foo", "1", { id: "1", goodbye: "moon" }).runWith({ + svc: mock, + events, + }), + ), +); test( - 'update document', - fork(doc.update('foo', '1', { id: '1', goodbye: 'moon' }).runWith({ svc: mock, events })) -) -test('remove document', fork(doc.remove('foo', '1').runWith({ svc: mock, events }))) + "remove document", + fork(doc.remove("foo", "1").runWith({ svc: mock, events })), +); diff --git a/packages/core/lib/data/index.js b/packages/core/lib/data/mod.js similarity index 50% rename from packages/core/lib/data/index.js rename to packages/core/lib/data/mod.js index ce3512f9..53620459 100644 --- a/packages/core/lib/data/index.js +++ b/packages/core/lib/data/mod.js @@ -1,18 +1,20 @@ -const db = require('./db.js') -const doc = require('./doc.js') +import * as db from "./db.js"; +import * as doc from "./doc.js"; -module.exports = ({ data, events }) => { +export default function ({ data, events }) { /** * @param {string} name * @returns {Async} */ - const createDatabase = (name) => db.create(name).runWith({ svc: data, events }) + const createDatabase = (name) => + db.create(name).runWith({ svc: data, events }); /** * @param {string} name * @returns {Async} */ - const destroyDatabase = (name) => db.remove(name).runWith({ svc: data, events }) + const destroyDatabase = (name) => + db.remove(name).runWith({ svc: data, events }); /** * @param {string} db @@ -20,14 +22,15 @@ module.exports = ({ data, events }) => { * @returns {Async} */ const createDocument = (db, document) => - doc.create(db, document).runWith({ svc: data, events }) + doc.create(db, document).runWith({ svc: data, events }); /** * @param {string} db * @param {string} id * @returns {Async} */ - const getDocument = (db, id) => doc.get(db, id).runWith({ svc: data, events }) + const getDocument = (db, id) => + doc.get(db, id).runWith({ svc: data, events }); /** * @param {string} db @@ -36,37 +39,42 @@ module.exports = ({ data, events }) => { * @returns {Async} */ const updateDocument = (db, id, document) => - doc.update(db, id, document).runWith({ svc: data, events }) + doc.update(db, id, document).runWith({ svc: data, events }); /** * @param {string} db * @param {string} id * @returns {Async} */ - const removeDocument = (db, id) => doc.remove(db, id).runWith({ svc: data, events }) + const removeDocument = (db, id) => + doc.remove(db, id).runWith({ svc: data, events }); /** * @param {string} dbname * @param {object} query * @returns {Async} */ - const query = (dbname, query) => db.query(dbname, query).runWith({ svc: data, events }) + const query = (dbname, query) => + db.query(dbname, query).runWith({ svc: data, events }); /** * @param {string} dbname * @param {object} index * @returns {Async} */ - const index = (dbname, name, fields) => db.index(dbname, name, fields).runWith({ svc: data, events }) + const index = (dbname, name, fields) => + db.index(dbname, name, fields).runWith({ svc: data, events }); /** * @param {string} dbname, * @param {object} options * @returns {Async} */ - const listDocuments = (dbname, options) => db.list(dbname, options).runWith({ svc: data, events }) + const listDocuments = (dbname, options) => + db.list(dbname, options).runWith({ svc: data, events }); - const bulkDocuments = (dbname, docs) => db.bulk(dbname, docs).runWith({ svc: data, events }) + const bulkDocuments = (dbname, docs) => + db.bulk(dbname, docs).runWith({ svc: data, events }); return Object.freeze({ createDatabase, @@ -78,6 +86,6 @@ module.exports = ({ data, events }) => { query, index, listDocuments, - bulkDocuments - }) + bulkDocuments, + }); } diff --git a/packages/core/lib/hooks/index.js b/packages/core/lib/hooks/index.js deleted file mode 100644 index d6ad57c3..00000000 --- a/packages/core/lib/hooks/index.js +++ /dev/null @@ -1,18 +0,0 @@ -const { Async } = require('crocks') - -module.exports = function ({ events, hooks }) { - events.subscribe(action => { - if (hooks && hooks.call) { - Async.fromPromise(hooks.call)(action) - .fork( - err => console.log('ERROR', err.message), - () => null - ) - } else { - console.log(`${action.type}: ${JSON.stringify(action.payload)}`) - } - }) - return ({ - status: () => ({ ok: true, msg: 'listening for events ' }) - }) -} diff --git a/packages/core/lib/hooks/mod.js b/packages/core/lib/hooks/mod.js new file mode 100644 index 00000000..0f173569 --- /dev/null +++ b/packages/core/lib/hooks/mod.js @@ -0,0 +1,20 @@ +import { crocks } from "../../deps.js"; + +const { Async } = crocks; + +export default function ({ events, hooks }) { + events.subscribe((action) => { + if (hooks && hooks.call) { + Async.fromPromise(hooks.call)(action) + .fork( + (err) => console.log("ERROR", err.message), + () => null, + ); + } else { + console.log(`${action.type}: ${JSON.stringify(action.payload)}`); + } + }); + return ({ + status: () => ({ ok: true, msg: "listening for events " }), + }); +} diff --git a/packages/core/lib/index.js b/packages/core/lib/mod.js similarity index 60% rename from packages/core/lib/index.js rename to packages/core/lib/mod.js index 6691b5a9..e06714b3 100644 --- a/packages/core/lib/index.js +++ b/packages/core/lib/mod.js @@ -1,9 +1,10 @@ -const cacheCore = require('./cache') -const dataCore = require('./data') -const storageCore = require('./storage') -const searchCore = require('./search') -const hooksCore = require('./hooks') -const queueCore = require('./queue') +import cacheCore from "./cache/mod.js"; +import dataCore from "./data/mod.js"; +import storageCore from "./storage/mod.js"; +import searchCore from "./search/mod.js"; +import hooksCore from "./hooks/mod.js"; +import queueCore from "./queue/mod.js"; + /** * main core module * @@ -11,7 +12,7 @@ const queueCore = require('./queue') * and passes them to each core module * */ -module.exports = function (services) { +export default function (services) { return Object.freeze({ cache: services.cache ? cacheCore(services) : null, data: services.data ? dataCore(services) : null, @@ -20,6 +21,6 @@ module.exports = function (services) { queue: services.queue ? queueCore(services) : null, hooks: hooksCore(services), events: services.events, - middleware: services.middleware - }) + middleware: services.middleware, + }); } diff --git a/packages/core/lib/queue/index.js b/packages/core/lib/queue/mod.js similarity index 66% rename from packages/core/lib/queue/index.js rename to packages/core/lib/queue/mod.js index 789c3e53..eff6706b 100644 --- a/packages/core/lib/queue/index.js +++ b/packages/core/lib/queue/mod.js @@ -1,11 +1,12 @@ -const q = require('./queue') +import q from "./queue.js"; -module.exports = ({ queue, events }) => - ({ +export default function ({ queue, events }) { + return ({ index: () => q.index().runWith({ svc: queue, events }), create: (input) => q.create(input).runWith({ svc: queue, events }), - delete: (name) => q.delete(name).runWith({ svc: queue, events }), + delete: (name) => q.del(name).runWith({ svc: queue, events }), post: (input) => q.post(input).runWith({ svc: queue, events }), list: (input) => q.list(input).runWith({ svc: queue, events }), - cancel: (input) => q.cancel(input).runWith({ svc: queue, events }) - }) + cancel: (input) => q.cancel(input).runWith({ svc: queue, events }), + }); +} diff --git a/packages/core/lib/queue/queue.js b/packages/core/lib/queue/queue.js index ff29cfb0..69dcd538 100644 --- a/packages/core/lib/queue/queue.js +++ b/packages/core/lib/queue/queue.js @@ -1,39 +1,40 @@ -const { is, of, apply, triggerEvent } = require('../utils') -const { toLower, lensProp, over } = require('ramda') +import { apply, is, of, triggerEvent } from "../utils/mod.js"; +import { R } from "../../deps.js"; -const INVALID_NAME_MSG = 'queue name is not valid!' +const { toLower, lensProp, over } = R; -exports.index = () => - apply('index')().chain(triggerEvent('QUEUE:INDEX')) - // apply('index')().chain(triggerEvent('QUEUE:INDEX')) +const INVALID_NAME_MSG = "queue name is not valid!"; -exports.create = (input) => +export const index = () => apply("index")().chain(triggerEvent("QUEUE:INDEX")); +// apply('index')().chain(triggerEvent('QUEUE:INDEX')) + +export const create = (input) => of(input) - .map(over(lensProp('name'), toLower)) + .map(over(lensProp("name"), toLower)) .chain(is(validName, INVALID_NAME_MSG)) - .chain(apply('create')) - .chain(triggerEvent('QUEUE:CREATE')) + .chain(apply("create")) + .chain(triggerEvent("QUEUE:CREATE")); -exports.delete = (name) => +export const del = (name) => of(name) - .chain(apply('delete')) - .chain(triggerEvent('QUEUE:DELETE')) + .chain(apply("delete")) + .chain(triggerEvent("QUEUE:DELETE")); -exports.post = (input) => +export const post = (input) => of(input) - .chain(apply('post')) - .chain(triggerEvent('QUEUE:POST')) + .chain(apply("post")) + .chain(triggerEvent("QUEUE:POST")); -exports.list = (input) => +export const list = (input) => of(input) - .chain(apply('get')) - .chain(triggerEvent('QUEUE:LIST')) + .chain(apply("get")) + .chain(triggerEvent("QUEUE:LIST")); -exports.cancel = (input) => +export const cancel = (input) => of(input) - .chain(apply('cancel')) - .chain(triggerEvent('QUEUE:CANCEL')) + .chain(apply("cancel")) + .chain(triggerEvent("QUEUE:CANCEL")); -function validName (input) { - return /^[a-z0-9-]+$/.test(input.name) +function validName(input) { + return /^[a-z0-9-]+$/.test(input.name); } diff --git a/packages/core/lib/search/index.js b/packages/core/lib/search/index.js deleted file mode 100644 index 5bc9ec72..00000000 --- a/packages/core/lib/search/index.js +++ /dev/null @@ -1,38 +0,0 @@ -const { of, apply, triggerEvent } = require('../utils') - -module.exports = ({ search, events }) => ({ - createIndex: (index, mappings = {}) => of({ index, mappings }) - .chain(apply('createIndex')) - .chain(triggerEvent('SEARCH:CREATE_INDEX')) - .runWith({ svc: search, events }), - deleteIndex: (index) => of(index) - .chain(apply('deleteIndex')) - .chain(triggerEvent('SEARCH:DELETE_INDEX')) - .runWith({ svc: search, events }), - indexDoc: (index, key, doc) => of({ index, key, doc }) - .chain(apply('indexDoc')) - .chain(triggerEvent('SEARCH:CREATE')) - .runWith({ svc: search, events }), - getDoc: (index, key) => of({ index, key }) - .chain(apply('getDoc')) - .chain(triggerEvent('SEARCH:GET')) - .runWith({ svc: search, events }), - updateDoc: (index, key, doc) => of({ index, key, doc }) - .chain(apply('updateDoc')) - .chain(triggerEvent('SEARCH:UPDATE')) - .runWith({ svc: search, events }), - removeDoc: (index, key) => of({ index, key }) - .chain(apply('removeDoc')) - .chain(triggerEvent('SEARCH:DELETE')) - .runWith({ svc: search, events }), - - bulk: (index, docs) => of({ index, docs }) - .chain(apply('bulk')) - .chain(triggerEvent('SEARCH:BULK')) - .runWith({ svc: search, events }), - query: (index, q = {}) => of({ index, q }) - .chain(apply('query')) - .chain(triggerEvent('SEARCH:QUERY')) - .runWith({ svc: search, events }) - // batch or bulk -}) diff --git a/packages/core/lib/search/mod.js b/packages/core/lib/search/mod.js new file mode 100644 index 00000000..74f17a41 --- /dev/null +++ b/packages/core/lib/search/mod.js @@ -0,0 +1,48 @@ +import { apply, of, triggerEvent } from "../utils"; + +export default function ({ search, events }) { + return ({ + createIndex: (index, mappings = {}) => + of({ index, mappings }) + .chain(apply("createIndex")) + .chain(triggerEvent("SEARCH:CREATE_INDEX")) + .runWith({ svc: search, events }), + deleteIndex: (index) => + of(index) + .chain(apply("deleteIndex")) + .chain(triggerEvent("SEARCH:DELETE_INDEX")) + .runWith({ svc: search, events }), + indexDoc: (index, key, doc) => + of({ index, key, doc }) + .chain(apply("indexDoc")) + .chain(triggerEvent("SEARCH:CREATE")) + .runWith({ svc: search, events }), + getDoc: (index, key) => + of({ index, key }) + .chain(apply("getDoc")) + .chain(triggerEvent("SEARCH:GET")) + .runWith({ svc: search, events }), + updateDoc: (index, key, doc) => + of({ index, key, doc }) + .chain(apply("updateDoc")) + .chain(triggerEvent("SEARCH:UPDATE")) + .runWith({ svc: search, events }), + removeDoc: (index, key) => + of({ index, key }) + .chain(apply("removeDoc")) + .chain(triggerEvent("SEARCH:DELETE")) + .runWith({ svc: search, events }), + + bulk: (index, docs) => + of({ index, docs }) + .chain(apply("bulk")) + .chain(triggerEvent("SEARCH:BULK")) + .runWith({ svc: search, events }), + query: (index, q = {}) => + of({ index, q }) + .chain(apply("query")) + .chain(triggerEvent("SEARCH:QUERY")) + .runWith({ svc: search, events }), + // batch or bulk + }); +} diff --git a/packages/core/lib/storage/buckets.js b/packages/core/lib/storage/buckets.js index bd56e958..3b06bbe2 100644 --- a/packages/core/lib/storage/buckets.js +++ b/packages/core/lib/storage/buckets.js @@ -1,4 +1,4 @@ -const { of, apply, triggerEvent } = require('../utils') +import { apply, of, triggerEvent } from "../utils/mod.js"; // const INVALID_BUCKET_MSG = 'bucket name is not valid' // const INVALID_RESPONSE = 'response is not valid' @@ -7,23 +7,23 @@ const { of, apply, triggerEvent } = require('../utils') * @param {string} name * @returns {AsyncReader} */ -exports.make = (name) => +export const make = (name) => of(name) // .chain(is(validDbName, INVALID_DB_MSG)) - .chain(apply('makeBucket')) - .chain(triggerEvent('STORAGE:CREATE_BUCKET')) + .chain(apply("makeBucket")) + .chain(triggerEvent("STORAGE:CREATE_BUCKET")); // .chain(is(validResponse, INVALID_RESPONSE)); /** * @param {string} name * @returns {AsyncReader} */ -exports.remove = (name) => +export const remove = (name) => of(name) - .chain(apply('removeBucket')) - .chain(triggerEvent('STORAGE:DELTE_BUCKET')) + .chain(apply("removeBucket")) + .chain(triggerEvent("STORAGE:DELTE_BUCKET")); /** * @returns {AsyncReader} */ -exports.list = () => of().chain(apply('listBuckets')) +export const list = () => of().chain(apply("listBuckets")); diff --git a/packages/core/lib/storage/buckets_test.js b/packages/core/lib/storage/buckets_test.js index cd1a0bfd..75de488b 100644 --- a/packages/core/lib/storage/buckets_test.js +++ b/packages/core/lib/storage/buckets_test.js @@ -1,32 +1,36 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ +// deno-lint-ignore-file no-unused-vars +import * as buckets from "./buckets.js"; +import { assertEquals } from "../../dev_deps.js"; -const test = require('tape') -const buckets = require('./buckets') +const test = Deno.test; const mock = { - makeBucket (name) { - return Promise.resolve({ ok: true }) + makeBucket(name) { + return Promise.resolve({ ok: true }); }, - removeBucket (name) { - return Promise.resolve({ ok: true }) + removeBucket(name) { + return Promise.resolve({ ok: true }); }, - listBuckets () { - return Promise.resolve({ ok: true, buckets: ['one', 'two', 'three'] }) - } -} + listBuckets() { + return Promise.resolve({ ok: true, buckets: ["one", "two", "three"] }); + }, +}; -const fork = (m) => (t) => { - t.plan(1) - m.fork( - () => t.ok(false), - () => t.ok(true) - ) -} +const fork = (m) => + () => { + m.fork( + () => assertEquals(false, true), + () => assertEquals(true, true), + ); + }; const events = { - dispatch: () => null -} + dispatch: () => null, +}; -test('make bucket', fork(buckets.make('beep').runWith({ svc: mock, events }))) -test('remove bucket', fork(buckets.remove('beep').runWith({ svc: mock, events }))) -test('list buckets', fork(buckets.list().runWith({ svc: mock, events }))) +test("make bucket", fork(buckets.make("beep").runWith({ svc: mock, events }))); +test( + "remove bucket", + fork(buckets.remove("beep").runWith({ svc: mock, events })), +); +test("list buckets", fork(buckets.list().runWith({ svc: mock, events }))); diff --git a/packages/core/lib/storage/index.js b/packages/core/lib/storage/mod.js similarity index 71% rename from packages/core/lib/storage/index.js rename to packages/core/lib/storage/mod.js index fe28150c..f9279847 100644 --- a/packages/core/lib/storage/index.js +++ b/packages/core/lib/storage/mod.js @@ -1,24 +1,26 @@ -const buckets = require('./buckets') -const objects = require('./objects') +import * as buckets from "./buckets"; +import * as objects from "./objects"; -module.exports = ({ storage, events }) => { +export default function ({ storage, events }) { /** * @param {string} name * @returns {Async} */ - const makeBucket = (name) => buckets.make(name).runWith({ svc: storage, events }) + const makeBucket = (name) => + buckets.make(name).runWith({ svc: storage, events }); /** * @param {string} name * @returns {Async} */ - const removeBucket = (name) => buckets.remove(name).runWith({ svc: storage, events }) + const removeBucket = (name) => + buckets.remove(name).runWith({ svc: storage, events }); /** * @param {string} name * @returns {Async} */ - const listBuckets = () => buckets.list().runWith({ svc: storage, events }) + const listBuckets = () => buckets.list().runWith({ svc: storage, events }); /** * @param {string} bucketName @@ -27,7 +29,10 @@ module.exports = ({ storage, events }) => { * @returns {Async} */ const putObject = (bucketName, objectName, stream) => - objects.put(bucketName, objectName, stream).runWith({ svc: storage, events }) + objects.put(bucketName, objectName, stream).runWith({ + svc: storage, + events, + }); /** * @param {string} bucketName @@ -35,7 +40,7 @@ module.exports = ({ storage, events }) => { * @returns {Async} */ const getObject = (bucketName, objectName) => - objects.get(bucketName, objectName).runWith({ svc: storage, events }) + objects.get(bucketName, objectName).runWith({ svc: storage, events }); /** * @param {string} bucketName @@ -43,7 +48,7 @@ module.exports = ({ storage, events }) => { * @returns {Async} */ const removeObject = (bucketName, objectName) => - objects.remove(bucketName, objectName).runWith({ svc: storage, events }) + objects.remove(bucketName, objectName).runWith({ svc: storage, events }); /** * @param {string} bucketName @@ -51,7 +56,7 @@ module.exports = ({ storage, events }) => { * @returns {Async} */ const listObjects = (bucketName, prefix) => - objects.list(bucketName, prefix).runWith({ svc: storage, events }) + objects.list(bucketName, prefix).runWith({ svc: storage, events }); return Object.freeze({ makeBucket, @@ -60,6 +65,6 @@ module.exports = ({ storage, events }) => { putObject, getObject, listObjects, - removeObject - }) + removeObject, + }); } diff --git a/packages/core/lib/storage/objects.js b/packages/core/lib/storage/objects.js index a321b4d1..27581d27 100644 --- a/packages/core/lib/storage/objects.js +++ b/packages/core/lib/storage/objects.js @@ -1,30 +1,30 @@ -const { of, apply, triggerEvent } = require('../utils') +import { apply, of, triggerEvent } from "../utils/mod.js"; -exports.put = (bucket, object, stream) => +export const put = (bucket, object, stream) => of({ bucket, object, - stream - }).chain(apply('putObject')) - .chain(triggerEvent('STORAGE:PUT')) + stream, + }).chain(apply("putObject")) + .chain(triggerEvent("STORAGE:PUT")); -exports.get = (bucket, object) => +export const get = (bucket, object) => of({ bucket, - object - }).chain(apply('getObject')) - .chain(triggerEvent('STORAGE:GET')) + object, + }).chain(apply("getObject")) + .chain(triggerEvent("STORAGE:GET")); -exports.remove = (bucket, object) => +export const remove = (bucket, object) => of({ bucket, - object - }).chain(apply('removeObject')) - .chain(triggerEvent('STORAGE:DELETE')) + object, + }).chain(apply("removeObject")) + .chain(triggerEvent("STORAGE:DELETE")); -exports.list = (bucket, prefix) => +export const list = (bucket, prefix) => of({ bucket, - prefix - }).chain(apply('listObjects')) - .chain(triggerEvent('STORAGE:LIST')) + prefix, + }).chain(apply("listObjects")) + .chain(triggerEvent("STORAGE:LIST")); diff --git a/packages/core/lib/storage/objects_test.js b/packages/core/lib/storage/objects_test.js index 9ad35be8..5a852e83 100644 --- a/packages/core/lib/storage/objects_test.js +++ b/packages/core/lib/storage/objects_test.js @@ -1,48 +1,53 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ - -const test = require('tape') -const objects = require('./objects') -const fs = require('fs') -const path = require('path') +// deno-lint-ignore-file no-unused-vars +import { assertEquals } from "../../dev_deps.js"; +import * as objects from "./objects.js"; +const test = Deno.test; const mock = { - putObject ({ bucket, object, stream }) { - return Promise.resolve({ ok: true }) + putObject({ bucket, object, stream }) { + return Promise.resolve({ ok: true }); + }, + getObject({ bucket, object }) { + return Promise.resolve({ ok: true }); }, - getObject ({ bucket, object }) { - return Promise.resolve({ ok: true }) + removeObject({ bucket, object }) { + return Promise.resolve({ ok: true }); }, - removeObject ({ bucket, object }) { - return Promise.resolve({ ok: true }) + listObjects({ bucket, prefix }) { + return Promise.resolve({ + ok: true, + objects: ["one.txt", "two.txt", "three.txt"], + }); }, - listObjects ({ bucket, prefix }) { - return Promise.resolve({ ok: true, objects: ['one.txt', 'two.txt', 'three.txt'] }) - } -} +}; -const fork = (m) => (t) => { - t.plan(1) - m.fork( - () => t.ok(false), - () => t.ok(true) - ) -} +const fork = (m) => + () => { + m.fork( + () => assertEquals(true, false), + () => assertEquals(true, true), + ); + }; const events = { - dispatch: () => null -} + dispatch: () => null, +}; test( - 'put object', + "put object", fork( objects .put( - 'test', - 'README.md', - fs.createReadStream(path.resolve('../../README.md')) + "test", + "README.md", + null, // fs.createReadStream(path.resolve('../../README.md')) ) - .runWith({ svc: mock, events }) - ) -) -test('remove bucket', fork(objects.remove('beep').runWith({ svc: mock, events }))) -test('list buckets', fork(objects.list().runWith({ svc: mock, events }))) + .runWith({ svc: mock, events }), + ), +); + +test( + "remove bucket", + fork(objects.remove("beep").runWith({ svc: mock, events })), +); +test("list buckets", fork(objects.list().runWith({ svc: mock, events }))); diff --git a/packages/core/lib/utils/index.js b/packages/core/lib/utils/index.js deleted file mode 100644 index afedbca3..00000000 --- a/packages/core/lib/utils/index.js +++ /dev/null @@ -1,66 +0,0 @@ -const Async = require('crocks/Async') -const ReaderT = require('crocks/Reader/ReaderT') -const compose = require('crocks/helpers/compose') -const Either = require('crocks/Either') -const eitherToAsync = require('crocks/Async/eitherToAsync') -const ReaderAsync = ReaderT(Async) -const { ask, lift } = ReaderAsync - -const { Left, Right } = Either - -const doValidate = (pred, msg) => (value) => - pred(value) ? Right(value) : Left({ ok: false, msg }) - -/** - * takes a predicate function and error message - * if the predicate function fails then returns an object with an error message - * if the predicate function passes then the value is passed down the chain - */ -exports.is = (fn, msg) => compose(lift, eitherToAsync, doValidate(fn, msg)) -/** - * uses the reader monad to get the environment, in this case a service - * module and invokes a method on that module passing the data from the - * pipeline as the arguments - */ -exports.apply = (method) => (data) => - ask(({ svc }) => { - // const async = Async.fromPromise(svc[method]) - return Async(function (reject, resolve) { - // NOTE: maybe consider using an Either here? - try { - const p = data ? svc[method](data) : svc[method]() - return p.then(resolve) - .catch(e => { - console.log(e) - return reject(e) - }) - } catch (e) { - let msg = '' - console.log(e) - if (e.errors) { - msg = e.errors.map(x => x.code).join(',') - } - return reject({ ok: false, msg }) - } - }) - // return async(data) - }).chain(lift) - -exports.triggerEvent = (event) => (data) => - ask(({ events }) => { - const payload = { date: new Date().toISOString() } - if (data.name) { payload.name = data.name } - if (data.id) { payload.id = data.id } - if (data.type) { payload.type = data.type } - - events.dispatch({ - type: event, - payload - }) - return Async.Resolved(data) - }).chain(lift) - -/** - * constructor for an AsyncReader monad - */ -exports.of = ReaderAsync.of diff --git a/packages/core/lib/utils/mod.js b/packages/core/lib/utils/mod.js new file mode 100644 index 00000000..43d6cfb0 --- /dev/null +++ b/packages/core/lib/utils/mod.js @@ -0,0 +1,68 @@ +import { crocks } from "../../deps.js"; + +const { Async, compose, ReaderT, Either, eitherToAsync } = crocks; + +const ReaderAsync = ReaderT(Async); +const { ask, lift } = ReaderAsync; + +const { Left, Right } = Either; + +const doValidate = (pred, msg) => + (value) => pred(value) ? Right(value) : Left({ ok: false, msg }); + +/** + * takes a predicate function and error message + * if the predicate function fails then returns an object with an error message + * if the predicate function passes then the value is passed down the chain + */ +export const is = (fn, msg) => + compose(lift, eitherToAsync, doValidate(fn, msg)); +/** + * uses the reader monad to get the environment, in this case a service + * module and invokes a method on that module passing the data from the + * pipeline as the arguments + */ +export const apply = (method) => + (data) => + ask(({ svc }) => { + // const async = Async.fromPromise(svc[method]) + return Async(function (reject, resolve) { + // NOTE: maybe consider using an Either here? + try { + const p = data ? svc[method](data) : svc[method](); + return p.then(resolve) + .catch((e) => { + console.log(e); + return reject(e); + }); + } catch (e) { + let msg = ""; + console.log(e); + if (e.errors) { + msg = e.errors.map((x) => x.code).join(","); + } + return reject({ ok: false, msg }); + } + }); + // return async(data) + }).chain(lift); + +export const triggerEvent = (event) => + (data) => + ask(({ events }) => { + const payload = { date: new Date().toISOString() }; + if (data.name) payload.name = data.name; + if (data.id) payload.id = data.id; + if (data.type) payload.type = data.type; + + events.dispatch({ + type: event, + payload, + }); + return Async.Resolved(data); + }).chain(lift); + +/** + * constructor for an AsyncReader monad + */ +export const of = ReaderAsync.of; diff --git a/packages/core/main.js b/packages/core/main.js deleted file mode 100644 index 0ff8b734..00000000 --- a/packages/core/main.js +++ /dev/null @@ -1,33 +0,0 @@ -const loadPorts = require('./ports') -const wrapCore = require('./lib') -const validateConfig = require('./utils/config-schema') -const initAdapters = require('./utils/plugins') -const eventMgr = require('./utils/event-mgr') -const { compose, prop, assoc, propOr } = require('ramda') - -/** - * @returns {function} - listen function - */ -function main (config) { - // const config = (await import(process.cwd() + '/hyper63.config')).default - config = !config ? require(process.cwd() + '/hyper63.config') : config - config = validateConfig(config) - - // TODO: validate config - const services = compose( - // add eventMgr to services - wrapCore, - assoc('middleware', propOr([], 'middleware', config)), - assoc('events', eventMgr()), - loadPorts, - initAdapters, - prop('adapters') - )(config) - - const app = config.app(services) - - // return app - return app -} - -module.exports = main diff --git a/packages/core/mod.js b/packages/core/mod.js new file mode 100644 index 00000000..65228016 --- /dev/null +++ b/packages/core/mod.js @@ -0,0 +1,33 @@ +import loadPorts from "./ports"; +import wrapCore from "./lib/mod.js"; +import validateConfig from "./utils/config-schema.js"; +import initAdapters from "./utils/plugins.js"; +import eventMgr from "./utils/event-mgr.js"; +import { R } from "./deps.js"; + +const { compose, prop, assoc, propOr } = R; + +/** + * @returns {function} - listen function + */ +export default function main(config) { + // const config = (await import(process.cwd() + '/hyper63.config')).default + config = !config ? require(process.cwd() + "/hyper63.config") : config; + config = validateConfig(config); + + // TODO: validate config + const services = compose( + // add eventMgr to services + wrapCore, + assoc("middleware", propOr([], "middleware", config)), + assoc("events", eventMgr()), + loadPorts, + initAdapters, + prop("adapters"), + )(config); + + const app = config.app(services); + + // return app + return app; +} diff --git a/packages/core/ports.js b/packages/core/ports.js index 06e0d451..803202ed 100644 --- a/packages/core/ports.js +++ b/packages/core/ports.js @@ -1,36 +1,18 @@ -const cache = require('@hyper63/port-cache') -const data = require('@hyper63/port-data') -const storage = require('@hyper63/port-storage') -const search = require('@hyper63/port-search') -const queue = require('@hyper63/port-queue').default +// deno-lint-ignore-file no-unused-vars +import { queue } from "../port-queue/mod.js"; +import { cache } from "../port-cache/mod.js"; +import { data } from "../port-data/mod.js"; +import { storage } from "../port-storage/mod.js"; +import { search } from "../port-search/mod.js"; +import { hooks } from "../port-hooks/mod.js"; -module.exports = (adapters) => ({ - cache: adapters.cache ? cache(adapters.cache) : null, - data: adapters.data ? data(adapters.data) : null, - storage: adapters.storage ? storage(adapters.storage) : null, - search: adapters.search ? search(adapters.search) : null, - queue: adapters.queue ? queue(adapters.queue) : null, - hooks: adapters.hooks -}) - -/* -module.exports = mapObjIndexed( - (adapter, k) => { - if (k === 'cache') { - return cache(adapter) - } else if (k === 'data' ) { - return data(adapter) - } else if (k === 'storage') { - return storage(adapter) - } else if (k === 'search') { - return search(adapter) - } else if (k === 'hooks') { - return v - } else { - // need to use the value to combine port and adapter - let port = require(k) - return port(adapter) - } - } -) -*/ +export default function (adapters) { + return ({ + cache: adapters.cache ? cache(adapters.cache) : null, + data: adapters.data ? data(adapters.data) : null, + storage: adapters.storage ? storage(adapters.storage) : null, + search: adapters.search ? search(adapters.search) : null, + queue: adapters.queue ? queue(adapters.queue) : null, + hooks: adapters.hooks, + }); +} diff --git a/packages/core/scripts/test.sh b/packages/core/scripts/test.sh new file mode 100755 index 00000000..ca0a1f4f --- /dev/null +++ b/packages/core/scripts/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +deno fmt --check +deno test --unstable --allow-env lib/storage/*_test.js utils/*_test.js lib/cache/*_test.js lib/data/*_test.js diff --git a/packages/core/utils/config-schema.js b/packages/core/utils/config-schema.js index 664b2d4c..a3a26bae 100644 --- a/packages/core/utils/config-schema.js +++ b/packages/core/utils/config-schema.js @@ -1,6 +1,6 @@ -const z = require('zod') +import { z } from "../deps.js"; -const F = z.function().args(z.any()) +const F = z.function().args(z.any()); const plugin = z.object({ id: z.string().optional(), @@ -10,21 +10,22 @@ const plugin = z.object({ .returns(z.any()), link: z.function() .args(z.any()) - .returns(z.function() - .args(z.any()) - .returns(z.any()) - ) -}) + .returns( + z.function() + .args(z.any()) + .returns(z.any()), + ), +}); const Schema = z.object({ app: F, adapters: z.object({ - port: z.enum(['data', 'cache', 'search', 'storage', 'queue', 'hooks']), - plugins: plugin.array() + port: z.enum(["data", "cache", "search", "storage", "queue", "hooks"]), + plugins: plugin.array(), }).array(), - middleware: F.array().optional() -}) + middleware: F.array().optional(), +}); -module.exports = data => { - return Schema.parse(data) +export default function (data) { + return Schema.parse(data); } diff --git a/packages/core/utils/config-schema_test.js b/packages/core/utils/config-schema_test.js index 8b7b9326..3be89bf7 100644 --- a/packages/core/utils/config-schema_test.js +++ b/packages/core/utils/config-schema_test.js @@ -1,20 +1,22 @@ -const test = require('tape') -const validateConfig = require('./config-schema') +import validateConfig from "./config-schema.js"; +import { assertEquals } from "../dev_deps.js"; -const noop = () => null +const test = Deno.test; -test('validate schema', t => { - t.plan(1) +const noop = () => null; +const plugin = () => ({ id: "foo", load: noop, link: noop }); + +test("validate schema", () => { try { validateConfig({ app: noop, adapters: [ - { port: 'queue', plugins: [noop] } - ] - }) - t.ok(true) + { port: "queue", plugins: [plugin()] }, + ], + }); + assertEquals(true, true); } catch (e) { - console.log(JSON.stringify(e.issues)) - t.ok(false) + console.log(JSON.stringify(e.issues)); + assertEquals(true, false); } -}) +}); diff --git a/packages/core/utils/event-mgr.js b/packages/core/utils/event-mgr.js index b60062d9..1c379c3c 100644 --- a/packages/core/utils/event-mgr.js +++ b/packages/core/utils/event-mgr.js @@ -1,38 +1,39 @@ -const { Identity } = require('crocks') -const { is, append, map } = require('ramda') -const z = require('zod') +import { crocks, R, z } from "../deps.js"; + +const { Identity } = crocks; +const { is, append, map } = R; const fnSpec = z.function() .args(z.object({ type: z.string(), - payload: z.any() - })) + payload: z.any(), + })); /** * @typedef {Object} Action * @property {string} type * @property {Object} payload */ -module.exports = function () { - let fns = Identity([]) // maybe change to Either to handle tryCatch +export default function () { + let fns = Identity([]); // maybe change to Either to handle tryCatch return Object.freeze({ /** * @param {Function} fn */ - subscribe (fn) { + subscribe(fn) { if (is(Function, fn)) { // append function - fns = fns.map(append(fnSpec.validate(fn))) + fns = fns.map(append(fnSpec.validate(fn))); } }, /** * @param {Action} action */ - dispatch (action) { + dispatch(action) { map( - fn => fn(action), - fns.valueOf() - ) - } - }) + (fn) => fn(action), + fns.valueOf(), + ); + }, + }); } diff --git a/packages/core/utils/event-mgr_test.js b/packages/core/utils/event-mgr_test.js index 08ec7e1b..e30c44a2 100644 --- a/packages/core/utils/event-mgr_test.js +++ b/packages/core/utils/event-mgr_test.js @@ -1,27 +1,35 @@ -const test = require('tape') -const eventMgr = require('./event-mgr.js') +import eventMgr from "./event-mgr.js"; +import { assertEquals } from "../dev_deps.js"; -const events = eventMgr() +const test = Deno.test; -test('event mgr - happy path', t => { - t.plan(2) - const log = function (action) { - console.log(`${action.type} - ${JSON.stringify(action.payload)}`) - t.ok(true) - } - // maybe need to add unsubscribe? - events.subscribe(log) // x = em.subscribe(fn); x.unsubscribe() +const events = eventMgr(); - // - events.dispatch({ - type: 'SEARCH:CREATE_DOC', - payload: { date: new Date().toISOString(), app: 'foo', id: '1234' } - }) +test("event mgr - happy path", () => { + return new Promise(function (resolve) { + let count = 0; + const log = function (action) { + console.log(`${action.type} - ${JSON.stringify(action.payload)}`); + count++; + if (count === 2) { + assertEquals(true, true); + resolve(); + } + }; + // maybe need to add unsubscribe? + events.subscribe(log); // x = em.subscribe(fn); x.unsubscribe() - setTimeout(() => { + // events.dispatch({ - type: 'DATA:READ_DOC', - payload: { date: new Date().toISOString(), app: 'bar', id: '4321' } - }) - }, 500) -}) + type: "SEARCH:CREATE_DOC", + payload: { date: new Date().toISOString(), app: "foo", id: "1234" }, + }); + + setTimeout(() => { + events.dispatch({ + type: "DATA:READ_DOC", + payload: { date: new Date().toISOString(), app: "bar", id: "4321" }, + }); + }, 500); + }); +}); diff --git a/packages/core/utils/plugin-schema.js b/packages/core/utils/plugin-schema.js index 94d8975d..e95ea856 100644 --- a/packages/core/utils/plugin-schema.js +++ b/packages/core/utils/plugin-schema.js @@ -1,9 +1,10 @@ -const z = require('zod') +import { z } from "../deps.js"; + /** * @param {object} plugin * @returns {object} */ -module.exports = function (plugin) { +export default function (plugin) { const schema = z.object({ id: z.string().optional(), port: z.string().optional(), @@ -12,15 +13,16 @@ module.exports = function (plugin) { .returns(z.any()), link: z.function() .args(z.any()) - .returns(z.function() - .args(z.any()) - .returns(z.any()) - ) - }) + .returns( + z.function() + .args(z.any()) + .returns(z.any()), + ), + }); - const instance = schema.parse(plugin) - instance.load = schema.shape.load.validate(plugin.load) - instance.link = schema.shape.link.validate(plugin.link) + const instance = schema.parse(plugin); + instance.load = schema.shape.load.validate(plugin.load); + instance.link = schema.shape.link.validate(plugin.link); - return instance + return instance; } diff --git a/packages/core/utils/plugins.js b/packages/core/utils/plugins.js index f24c7fab..f9744a92 100644 --- a/packages/core/utils/plugins.js +++ b/packages/core/utils/plugins.js @@ -1,5 +1,16 @@ +import { R } from "../deps.js"; -const { applyTo, filter, compose, map, is, reduce, defaultTo, fromPairs, reverse } = require('ramda') +const { + applyTo, + filter, + compose, + map, + is, + reduce, + defaultTo, + fromPairs, + reverse, +} = R; /** * Given a list of plugins, compose the plugin.load() @@ -7,11 +18,11 @@ const { applyTo, filter, compose, map, is, reduce, defaultTo, fromPairs, reverse * * @param {[]} plugins - a list of plugins */ -function loadAdapterConfig (plugins = []) { +function loadAdapterConfig(plugins = []) { return compose( reduce((acc, plugin) => defaultTo(acc, plugin.load(acc)), {}), - filter(plugin => is(Function, plugin.load)) - )(plugins) + filter((plugin) => is(Function, plugin.load)), + )(plugins); } /** @@ -24,33 +35,34 @@ function loadAdapterConfig (plugins = []) { * @param {[]} plugins - a list of plugins * @param {{}} adapterConfig - the config obj for the adapter */ -function linkPlugins (plugins, adapterConfig) { +function linkPlugins(plugins, adapterConfig) { return compose( - links => links.reduce((a, b) => ({ - /** + (links) => + links.reduce((a, b) => ({ + /** * We spread here, so that plugins may just partially implement * a port interface. This allows the use of multiple plugins * to produce the *complete* port interface, while also achieving the * "Onion" wrapping of each method */ - ...a, - ...b(a) - }), {}), + ...a, + ...b(a), + }), {}), reverse, map( - applyTo(adapterConfig) + applyTo(adapterConfig), ), - map(plugin => plugin.link.bind(plugin)), - filter(plugin => is(Function, plugin.link)) - )(plugins) + map((plugin) => plugin.link.bind(plugin)), + filter((plugin) => is(Function, plugin.link)), + )(plugins); } -function initAdapter (portAdapter) { - const { plugins } = portAdapter +function initAdapter(portAdapter) { + const { plugins } = portAdapter; return compose( - adapterConfig => linkPlugins(plugins, adapterConfig), - loadAdapterConfig - )(plugins || []) + (adapterConfig) => linkPlugins(plugins, adapterConfig), + loadAdapterConfig, + )(plugins || []); } /** @@ -59,9 +71,9 @@ function initAdapter (portAdapter) { * * @param {[]} adapters - a list of port nodes from a hyper63 config */ -module.exports = function initAdapters (adapters) { +export default function initAdapters(adapters) { return compose( fromPairs, - map(adapterNode => [adapterNode.port, initAdapter(adapterNode)]) - )(adapters) + map((adapterNode) => [adapterNode.port, initAdapter(adapterNode)]), + )(adapters); } diff --git a/packages/core/utils/plugins_test.js b/packages/core/utils/plugins_test.js index aab7d24d..e63cad3d 100644 --- a/packages/core/utils/plugins_test.js +++ b/packages/core/utils/plugins_test.js @@ -1,31 +1,32 @@ -const test = require('tape') -const initAdapters = require('./plugins') -const validate = require('./plugin-schema') +import initAdapters from "./plugins.js"; +import validate from "./plugin-schema.js"; +import { assertEquals, assertObjectMatch } from "../dev_deps.js"; -test('sucessfully compose plugins', t => { +const test = Deno.test; + +test("sucessfully compose plugins", () => { const plugin1 = validate({ - id: 'plugin1', - port: 'default', - load: (env) => ({ ...env, hello: 'world' }), - link: env => () => ({ hello: () => env.hello }) - }) + id: "plugin1", + port: "default", + load: (env) => ({ ...env, hello: "world" }), + link: (env) => () => ({ hello: () => env.hello }), + }); - const plugin2 = config => validate({ - id: 'plugin2', - port: 'default', - load: (env) => ({ ...env, ...config }), - link: env => plugin => ({ ...plugin, beep: () => env }) - }) + const plugin2 = (config) => + validate({ + id: "plugin2", + port: "default", + load: (env) => ({ ...env, ...config }), + link: (env) => (plugin) => ({ ...plugin, beep: () => env }), + }); const config = { adapters: [ - { port: 'default', plugins: [plugin2({ foo: 'bar' }), plugin1] } - ] - } - const adapters = initAdapters(config.adapters) + { port: "default", plugins: [plugin2({ foo: "bar" }), plugin1] }, + ], + }; + const adapters = initAdapters(config.adapters); - t.equal(adapters.default.hello(), 'world') - t.deepEqual(adapters.default.beep(), { foo: 'bar', hello: 'world' }) - t.ok(true) - t.end() -}) + assertEquals(adapters.default.hello(), "world"); + assertObjectMatch(adapters.default.beep(), { foo: "bar", hello: "world" }); +}); diff --git a/packages/port-cache/README.md b/packages/port-cache/README.md index ece51c97..2dd27bab 100644 --- a/packages/port-cache/README.md +++ b/packages/port-cache/README.md @@ -1,6 +1,7 @@ # Cache Port -The cache port takes an adapter and environment, then parses the adapter and wraps function validations around each function. +The cache port takes an adapter and environment, then parses the adapter and +wraps function validations around each function. Your adapter or composed adapter should implement all specified functions: @@ -35,7 +36,7 @@ test("certify adapter", async (t) => { key: "hello", value: { foo: "bar" }, }) - ).ok + ).ok, ); t.deepEqual((await instance.getDoc({ store: "default", key: "hello" })).doc, { foo: "bar", diff --git a/packages/port-cache/deps.js b/packages/port-cache/deps.js new file mode 100644 index 00000000..da312e71 --- /dev/null +++ b/packages/port-cache/deps.js @@ -0,0 +1,2 @@ +// runtime dependencies here +export * as z from "https://cdn.skypack.dev/zod@^3"; diff --git a/packages/port-cache/deps_lock.json b/packages/port-cache/deps_lock.json new file mode 100644 index 00000000..82b6ab3f --- /dev/null +++ b/packages/port-cache/deps_lock.json @@ -0,0 +1,4 @@ +{ + "https://cdn.skypack.dev/-/zod@v3.1.0-gI7iXzzTM1WELSyd5Wzr/dist=es2020,mode=imports/optimized/zod.js": "2dffcb4bf919f89e183c2ab0c8595822c1454af1676e504ec36bde1ac72b2bc1", + "https://cdn.skypack.dev/zod@^3": "db17f3b774c7bdbb70181223fb10cdd480d1761f2e22340bd11cbcda4ccfbafe" +} diff --git a/packages/port-cache/dev_deps.js b/packages/port-cache/dev_deps.js new file mode 100644 index 00000000..3399088b --- /dev/null +++ b/packages/port-cache/dev_deps.js @@ -0,0 +1,2 @@ +// dev dependencies here +export { assert } from "https://deno.land/std@0.98.0/testing/asserts.ts"; diff --git a/packages/port-cache/dev_deps_lock.json b/packages/port-cache/dev_deps_lock.json new file mode 100644 index 00000000..7cf4ec42 --- /dev/null +++ b/packages/port-cache/dev_deps_lock.json @@ -0,0 +1,5 @@ +{ + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c" +} diff --git a/packages/port-cache/index_test.js b/packages/port-cache/index_test.js deleted file mode 100644 index 11c45318..00000000 --- a/packages/port-cache/index_test.js +++ /dev/null @@ -1,66 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ - -const test = require('tape') -const cachePort = require('./index.js') - -test('port cache ok', t => { - const goodCache = cachePort({ - createStore (name) { - return Promise.resolve({ ok: true }) - }, - destroyStore (name) { - return Promise.resolve({ ok: true }) - }, - createDoc ({ - store, - key, - value, - ttl - }) { - return Promise.resolve({ ok: true }) - }, - getDoc ({ store, key }) { - return Promise.resolve({ ok: true, doc: { beep: 'boop' } }) - }, - updateDoc ({ store, key, value, ttl }) { - return Promise.resolve({ ok: true }) - }, - deleteDoc ({ store, key }) { - return Promise.resolve({ ok: true }) - }, - listDocs ({ store, pattern }) { - return Promise.resolve({ ok: true, docs: [] }) - } - } - ) - Promise.all([ - goodCache.createStore('foo'), - goodCache.destroyStore('foo'), - goodCache.createDoc({ - store: 'foo', - key: 'hello', - value: { beep: 'world' }, - ttl: '2m' - }), - goodCache.getDoc({ store: 'foo', key: 'hello' }), - goodCache.updateDoc({ store: 'foo', key: 'hello', value: { baz: 'bam' } }), - goodCache.deleteDoc({ store: 'foo', key: 'hello' }), - goodCache.listDocs({ store: 'foo', pattern: 'w*' }) - ]) - .then(() => { - t.ok(true) - t.end() - }) - .catch(e => { - t.ok(false) - t.end() - }) -}) - -test('port cache shape not ok', t => { - t.end() -}) - -test('port cache methods not ok', t => { - t.end() -}) diff --git a/packages/port-cache/index.js b/packages/port-cache/mod.js similarity index 62% rename from packages/port-cache/index.js rename to packages/port-cache/mod.js index 34714e22..0b8c0c6a 100644 --- a/packages/port-cache/index.js +++ b/packages/port-cache/mod.js @@ -1,114 +1,123 @@ -const z = require('zod') +import { z } from "./deps.js"; + /** * @param {function} adapter - implementation detail for this port * @param {object} env - environment settings for the adapter */ -module.exports = function (adapter) { +export function cache(adapter) { const cachePort = z.object({ // list cache stores index: z.function() .args() .returns( - z.promise(z.string().array()) + z.promise(z.string().array()), ), createStore: z.function() .args(z.string()) .returns( z.promise( z.object({ - ok: z.boolean() - }) - ) + ok: z.boolean(), + }), + ), ), destroyStore: z.function() .args(z.string()) .returns( z.promise( z.object({ - ok: z.boolean() - }) - ) + ok: z.boolean(), + }), + ), ), createDoc: z.function() .args(z.object({ store: z.string(), key: z.string(), value: z.any(), - ttl: z.string().optional() + ttl: z.string().optional(), })) .returns( z.promise( z.object({ ok: z.boolean(), - error: z.string().optional() - }) - ) + error: z.string().optional(), + }), + ), ), getDoc: z.function() .args(z.object({ store: z.string(), - key: z.string() + key: z.string(), })) .returns( z.promise( z.union([ - z.object({ ok: z.boolean(), status: z.number().optional(), msg: z.string() }), - z.object({}).passthrough() - ]) - ) + z.object({ + ok: z.boolean(), + status: z.number().optional(), + msg: z.string(), + }), + z.object({}).passthrough(), + ]), + ), ), updateDoc: z.function() .args(z.object({ store: z.string(), key: z.string(), value: z.any(), - ttl: z.string().optional() + ttl: z.string().optional(), })) .returns( z.promise( z.object({ ok: z.boolean(), - error: z.string().optional() - }) - ) + error: z.string().optional(), + }), + ), ), deleteDoc: z.function() .args(z.object({ store: z.string(), - key: z.string() + key: z.string(), })) .returns( z.promise( z.object({ ok: z.boolean(), - error: z.string().optional() - }) - ) + error: z.string().optional(), + }), + ), ), listDocs: z.function() .args(z.object({ store: z.string(), - pattern: z.string().optional() + pattern: z.string().optional(), })) .returns( z.promise( z.object({ ok: z.boolean(), docs: z.array( - z.any() - ) - }) - ) - ) - }) - const instance = cachePort.parse(adapter) - instance.createStore = cachePort.shape.createStore.validate(instance.createStore) - instance.destroyStore = cachePort.shape.destroyStore.validate(instance.destroyStore) - instance.createDoc = cachePort.shape.createDoc.validate(instance.createDoc) - instance.getDoc = cachePort.shape.getDoc.validate(instance.getDoc) - instance.updateDoc = cachePort.shape.updateDoc.validate(instance.updateDoc) - instance.deleteDoc = cachePort.shape.deleteDoc.validate(instance.deleteDoc) - instance.listDocs = cachePort.shape.listDocs.validate(instance.listDocs) + z.any(), + ), + }), + ), + ), + }); + const instance = cachePort.parse(adapter); + instance.createStore = cachePort.shape.createStore.validate( + instance.createStore, + ); + instance.destroyStore = cachePort.shape.destroyStore.validate( + instance.destroyStore, + ); + instance.createDoc = cachePort.shape.createDoc.validate(instance.createDoc); + instance.getDoc = cachePort.shape.getDoc.validate(instance.getDoc); + instance.updateDoc = cachePort.shape.updateDoc.validate(instance.updateDoc); + instance.deleteDoc = cachePort.shape.deleteDoc.validate(instance.deleteDoc); + instance.listDocs = cachePort.shape.listDocs.validate(instance.listDocs); - return instance + return instance; } diff --git a/packages/port-cache/mod_test.js b/packages/port-cache/mod_test.js new file mode 100644 index 00000000..6721ad7d --- /dev/null +++ b/packages/port-cache/mod_test.js @@ -0,0 +1,67 @@ +// deno-lint-ignore-file no-unused-vars + +import { assert } from "./dev_deps.js"; + +import { cache as cachePort } from "./mod.js"; + +Deno.test("port cache ok", () => { + const goodCache = cachePort({ + createStore(name) { + return Promise.resolve({ ok: true }); + }, + destroyStore(name) { + return Promise.resolve({ ok: true }); + }, + createDoc({ + store, + key, + value, + ttl, + }) { + return Promise.resolve({ ok: true }); + }, + getDoc({ store, key }) { + return Promise.resolve({ ok: true, doc: { beep: "boop" } }); + }, + updateDoc({ store, key, value, ttl }) { + return Promise.resolve({ ok: true }); + }, + deleteDoc({ store, key }) { + return Promise.resolve({ ok: true }); + }, + listDocs({ store, pattern }) { + return Promise.resolve({ ok: true, docs: [] }); + }, + index() { + return Promise.resolve([]); + }, + }); + Promise.all([ + goodCache.createStore("foo"), + goodCache.destroyStore("foo"), + goodCache.createDoc({ + store: "foo", + key: "hello", + value: { beep: "world" }, + ttl: "2m", + }), + goodCache.getDoc({ store: "foo", key: "hello" }), + goodCache.updateDoc({ store: "foo", key: "hello", value: { baz: "bam" } }), + goodCache.deleteDoc({ store: "foo", key: "hello" }), + goodCache.listDocs({ store: "foo", pattern: "w*" }), + ]) + .then(() => { + assert(true); + }) + .catch((e) => { + assert(false); + }); +}); + +Deno.test("port cache shape not ok", (t) => { + assert(true); +}); + +Deno.test("port cache methods not ok", (t) => { + assert(true); +}); diff --git a/packages/port-cache/package.json b/packages/port-cache/package.json index 842718bd..fa7e265c 100644 --- a/packages/port-cache/package.json +++ b/packages/port-cache/package.json @@ -4,15 +4,5 @@ "description": "hyper63 service framework port for cache adapters", "homepage": "https://github.com/hyper63/hyper63#readme", "repository": "https://github.com/hyper63/hyper63", - "license": "Apache-2.0", - "main": "index.js", - "scripts": { - "test": "tape index_test.js" - }, - "dependencies": { - "zod": "^3.0.0-alpha.33" - }, - "devDependencies": { - "tape": "^5.0.1" - } + "license": "Apache-2.0" } diff --git a/packages/port-cache/scripts/test.sh b/packages/port-cache/scripts/test.sh new file mode 100755 index 00000000..09bdb23f --- /dev/null +++ b/packages/port-cache/scripts/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +deno fmt --check +deno test diff --git a/packages/port-data/deps.js b/packages/port-data/deps.js new file mode 100644 index 00000000..da312e71 --- /dev/null +++ b/packages/port-data/deps.js @@ -0,0 +1,2 @@ +// runtime dependencies here +export * as z from "https://cdn.skypack.dev/zod@^3"; diff --git a/packages/port-data/deps_lock.json b/packages/port-data/deps_lock.json new file mode 100644 index 00000000..82b6ab3f --- /dev/null +++ b/packages/port-data/deps_lock.json @@ -0,0 +1,4 @@ +{ + "https://cdn.skypack.dev/-/zod@v3.1.0-gI7iXzzTM1WELSyd5Wzr/dist=es2020,mode=imports/optimized/zod.js": "2dffcb4bf919f89e183c2ab0c8595822c1454af1676e504ec36bde1ac72b2bc1", + "https://cdn.skypack.dev/zod@^3": "db17f3b774c7bdbb70181223fb10cdd480d1761f2e22340bd11cbcda4ccfbafe" +} diff --git a/packages/port-data/dev_deps.js b/packages/port-data/dev_deps.js new file mode 100644 index 00000000..3399088b --- /dev/null +++ b/packages/port-data/dev_deps.js @@ -0,0 +1,2 @@ +// dev dependencies here +export { assert } from "https://deno.land/std@0.98.0/testing/asserts.ts"; diff --git a/packages/port-data/dev_deps_lock.json b/packages/port-data/dev_deps_lock.json new file mode 100644 index 00000000..7cf4ec42 --- /dev/null +++ b/packages/port-data/dev_deps_lock.json @@ -0,0 +1,5 @@ +{ + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c" +} diff --git a/packages/port-data/index_test.js b/packages/port-data/index_test.js deleted file mode 100644 index 11eaedc8..00000000 --- a/packages/port-data/index_test.js +++ /dev/null @@ -1,48 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ - -const test = require('tape') -const dataPort = require('./index.js') - -test('data port tests', async t => { - const adapter = dataPort({ - createDatabase: (name) => Promise.resolve({ ok: true }), - removeDatabase: (name) => Promise.resolve({ ok: true }), - createDocument: ({ db, id, doc }) => Promise.resolve({ ok: true, id }), - retrieveDocument: ({ db, id }) => Promise.resolve({ ok: true, id }), - updateDocument: ({ db, id, doc }) => Promise.resolve({ ok: true, id }), - removeDocument: ({ db, id }) => Promise.resolve({ ok: true, id }), - listDocuments: ({ db, limit, startkey, endkey, keys, descending }) => Promise.resolve({ ok: true, docs: [] }), - queryDocuments: ({ db, query }) => Promise.resolve({ ok: true, docs: [] }), - indexDocuments: ({ db, name, fields }) => Promise.resolve({ ok: true }), - bulkDocuments: ({ db, docs }) => Promise.resolve({ ok: true, results: [{ ok: true, id: '1' }] }) - }) - - const results = await Promise.all([ - adapter.createDatabase('foo'), - adapter.removeDatabase('foo'), - adapter.createDocument({ db: 'foo', id: 'bar', doc: { hello: 'world' } }), - adapter.retrieveDocument({ db: 'foo', id: 'bar' }), - adapter.updateDocument({ db: 'foo', id: 'bar', doc: { hello: 'mars' } }), - adapter.removeDocument({ db: 'foo', id: 'bar' }), - adapter.listDocuments({ db: 'foo' }), - adapter.queryDocuments({ - db: 'foo', - query: { - selector: { - id: 'bar' - } - } - }), - adapter.indexDocuments({ db: 'foo', name: 'id', fields: ['id'] }), - adapter.bulkDocuments({ db: 'foo', docs: [{ id: '1', type: 'movie' }] }) - ]) - .then(_ => ({ ok: true })) - .catch(_ => { - console.log(_) - return ({ ok: false }) - }) - - t.ok(results.ok) - - t.end() -}) diff --git a/packages/port-data/index.js b/packages/port-data/mod.js similarity index 53% rename from packages/port-data/index.js rename to packages/port-data/mod.js index 0f8b6b9d..5b6f046e 100644 --- a/packages/port-data/index.js +++ b/packages/port-data/mod.js @@ -1,39 +1,44 @@ -const z = require('zod') +import { z } from "./deps.js"; + /** * @param {function} adapter - implementation detail for this port * @param {object} env - environment settings for the adapter */ -module.exports = function (adapter) { +export function data(adapter) { const Port = z.object({ - createDatabase: z.function().args(z.string()).returns(z.promise(z.object({ ok: z.boolean() }))), - removeDatabase: z.function().args(z.string()).returns(z.promise(z.object({ ok: z.boolean() }))), + createDatabase: z.function().args(z.string()).returns( + z.promise(z.object({ ok: z.boolean() })), + ), + removeDatabase: z.function().args(z.string()).returns( + z.promise(z.object({ ok: z.boolean() })), + ), createDocument: z.function() .args(z.object({ db: z.string(), id: z.string(), - doc: z.any() + doc: z.any(), })) .returns(z.promise(z.object({ ok: z.boolean(), - id: z.string() + id: z.string(), }))), retrieveDocument: z.function().args(z.object({ db: z.string(), - id: z.string() + id: z.string(), })).returns(z.promise(z.any())), updateDocument: z.function() .args(z.object({ db: z.string(), id: z.string(), - doc: z.object() + doc: z.object(), })).returns(z.promise(z.any())), removeDocument: z.function() .args(z.object({ db: z.string(), - id: z.string() + id: z.string(), })).returns(z.promise(z.object({ ok: z.boolean(), - id: z.string() + id: z.string(), }))), listDocuments: z.function() .args(z.object({ @@ -42,11 +47,10 @@ module.exports = function (adapter) { startkey: z.string().optional(), endkey: z.string().optional(), keys: z.string().optional(), - descending: z.boolean().optional() - + descending: z.boolean().optional(), })).returns(z.promise(z.object({ ok: z.boolean(), - docs: z.array(z.any()) + docs: z.array(z.any()), }))), queryDocuments: z.function() .args(z.object({ @@ -55,48 +59,66 @@ module.exports = function (adapter) { selector: z.any(), sort: z.array(z.string()).optional(), limit: z.number().optional(), - use_index: z.string().optional() - }) + use_index: z.string().optional(), + }), })) .returns(z.promise(z.object({ ok: z.boolean(), - docs: z.array(z.any()) + docs: z.array(z.any()), }))), indexDocuments: z.function() .args(z.object({ db: z.string(), name: z.string(), - fields: z.array(z.string()) + fields: z.array(z.string()), })) .returns(z.promise(z.object({ ok: z.boolean(), - msg: z.string().optional() + msg: z.string().optional(), }))), bulkDocuments: z.function() .args(z.object({ db: z.string(), - docs: z.array(z.any()) + docs: z.array(z.any()), })) .returns(z.promise(z.object({ ok: z.boolean(), results: z.array(z.object({ ok: z.boolean(), - id: z.string() - })) - }))) - }) - const instance = Port.parse(adapter) - instance.createDatabase = Port.shape.createDatabase.validate(instance.createDatabase) - instance.removeDatabase = Port.shape.removeDatabase.validate(instance.removeDatabase) - instance.createDocument = Port.shape.createDocument.validate(instance.createDocument) - instance.retrieveDocument = Port.shape.retrieveDocument.validate(instance.retrieveDocument) + id: z.string(), + })), + }))), + }); + const instance = Port.parse(adapter); + instance.createDatabase = Port.shape.createDatabase.validate( + instance.createDatabase, + ); + instance.removeDatabase = Port.shape.removeDatabase.validate( + instance.removeDatabase, + ); + instance.createDocument = Port.shape.createDocument.validate( + instance.createDocument, + ); + instance.retrieveDocument = Port.shape.retrieveDocument.validate( + instance.retrieveDocument, + ); // instance.updateDocument = Port.shape.updateDocument.validate(instance.updateDocument) - instance.updateDocument = adapter.updateDocument - instance.removeDocument = Port.shape.removeDocument.validate(instance.removeDocument) - instance.listDocuments = Port.shape.listDocuments.validate(instance.listDocuments) - instance.queryDocuments = Port.shape.queryDocuments.validate(instance.queryDocuments) - instance.indexDocuments = Port.shape.indexDocuments.validate(instance.indexDocuments) - instance.bulkDocuments = Port.shape.bulkDocuments.validate(instance.bulkDocuments) + instance.updateDocument = adapter.updateDocument; + instance.removeDocument = Port.shape.removeDocument.validate( + instance.removeDocument, + ); + instance.listDocuments = Port.shape.listDocuments.validate( + instance.listDocuments, + ); + instance.queryDocuments = Port.shape.queryDocuments.validate( + instance.queryDocuments, + ); + instance.indexDocuments = Port.shape.indexDocuments.validate( + instance.indexDocuments, + ); + instance.bulkDocuments = Port.shape.bulkDocuments.validate( + instance.bulkDocuments, + ); - return instance + return instance; } diff --git a/packages/port-data/mod_test.js b/packages/port-data/mod_test.js new file mode 100644 index 00000000..615b0692 --- /dev/null +++ b/packages/port-data/mod_test.js @@ -0,0 +1,49 @@ +// deno-lint-ignore-file no-unused-vars + +import { assert } from "./dev_deps.js"; + +import { data as dataPort } from "./mod.js"; + +Deno.test("data port tests", async () => { + const adapter = dataPort({ + createDatabase: (name) => Promise.resolve({ ok: true }), + removeDatabase: (name) => Promise.resolve({ ok: true }), + createDocument: ({ db, id, doc }) => Promise.resolve({ ok: true, id }), + retrieveDocument: ({ db, id }) => Promise.resolve({ ok: true, id }), + updateDocument: ({ db, id, doc }) => Promise.resolve({ ok: true, id }), + removeDocument: ({ db, id }) => Promise.resolve({ ok: true, id }), + listDocuments: ({ db, limit, startkey, endkey, keys, descending }) => + Promise.resolve({ ok: true, docs: [] }), + queryDocuments: ({ db, query }) => Promise.resolve({ ok: true, docs: [] }), + indexDocuments: ({ db, name, fields }) => Promise.resolve({ ok: true }), + bulkDocuments: ({ db, docs }) => + Promise.resolve({ ok: true, results: [{ ok: true, id: "1" }] }), + }); + + const results = await Promise.all([ + adapter.createDatabase("foo"), + adapter.removeDatabase("foo"), + adapter.createDocument({ db: "foo", id: "bar", doc: { hello: "world" } }), + adapter.retrieveDocument({ db: "foo", id: "bar" }), + adapter.updateDocument({ db: "foo", id: "bar", doc: { hello: "mars" } }), + adapter.removeDocument({ db: "foo", id: "bar" }), + adapter.listDocuments({ db: "foo" }), + adapter.queryDocuments({ + db: "foo", + query: { + selector: { + id: "bar", + }, + }, + }), + adapter.indexDocuments({ db: "foo", name: "id", fields: ["id"] }), + adapter.bulkDocuments({ db: "foo", docs: [{ id: "1", type: "movie" }] }), + ]) + .then((_) => ({ ok: true })) + .catch((_) => { + console.log(_); + return ({ ok: false }); + }); + + assert(results.ok); +}); diff --git a/packages/port-data/package.json b/packages/port-data/package.json index 50411137..3abdc02f 100644 --- a/packages/port-data/package.json +++ b/packages/port-data/package.json @@ -4,15 +4,5 @@ "description": "hyper63 service framework port for data adapters", "homepage": "https://github.com/hyper63/hyper63#readme", "repository": "https://github.com/hyper63/hyper63", - "license": "Apache-2.0", - "main": "index.js", - "scripts": { - "test": "tape index_test.js" - }, - "dependencies": { - "zod": "^3.0.0-alpha.33" - }, - "devDependencies": { - "tape": "^5.0.1" - } + "license": "Apache-2.0" } diff --git a/packages/port-data/scripts/test.sh b/packages/port-data/scripts/test.sh new file mode 100755 index 00000000..09bdb23f --- /dev/null +++ b/packages/port-data/scripts/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +deno fmt --check +deno test diff --git a/packages/port-hooks/README.md b/packages/port-hooks/README.md index 70fb7b17..645e5703 100644 --- a/packages/port-hooks/README.md +++ b/packages/port-hooks/README.md @@ -1,6 +1,8 @@ # hyper63 hooks port -A hyper63 port is a schema validator for implementation details. These implementation details exist in the adapter. Using a port you can make sure that the adapter properly implements the port correctly. +A hyper63 port is a schema validator for implementation details. These +implementation details exist in the adapter. Using a port you can make sure that +the adapter properly implements the port correctly. ## Usage diff --git a/packages/port-hooks/deps.js b/packages/port-hooks/deps.js new file mode 100644 index 00000000..da312e71 --- /dev/null +++ b/packages/port-hooks/deps.js @@ -0,0 +1,2 @@ +// runtime dependencies here +export * as z from "https://cdn.skypack.dev/zod@^3"; diff --git a/packages/port-hooks/deps_lock.json b/packages/port-hooks/deps_lock.json new file mode 100644 index 00000000..82b6ab3f --- /dev/null +++ b/packages/port-hooks/deps_lock.json @@ -0,0 +1,4 @@ +{ + "https://cdn.skypack.dev/-/zod@v3.1.0-gI7iXzzTM1WELSyd5Wzr/dist=es2020,mode=imports/optimized/zod.js": "2dffcb4bf919f89e183c2ab0c8595822c1454af1676e504ec36bde1ac72b2bc1", + "https://cdn.skypack.dev/zod@^3": "db17f3b774c7bdbb70181223fb10cdd480d1761f2e22340bd11cbcda4ccfbafe" +} diff --git a/packages/port-hooks/dev_deps.js b/packages/port-hooks/dev_deps.js new file mode 100644 index 00000000..3399088b --- /dev/null +++ b/packages/port-hooks/dev_deps.js @@ -0,0 +1,2 @@ +// dev dependencies here +export { assert } from "https://deno.land/std@0.98.0/testing/asserts.ts"; diff --git a/packages/port-hooks/dev_deps_lock.json b/packages/port-hooks/dev_deps_lock.json new file mode 100644 index 00000000..7cf4ec42 --- /dev/null +++ b/packages/port-hooks/dev_deps_lock.json @@ -0,0 +1,5 @@ +{ + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c" +} diff --git a/packages/port-hooks/index.js b/packages/port-hooks/index.js deleted file mode 100644 index 7ae0ff34..00000000 --- a/packages/port-hooks/index.js +++ /dev/null @@ -1,11 +0,0 @@ -const z = require('zod') - -module.exports = function (adapter, env) { - const Port = z.object({ - // add port methods - }) - - const instance = Port.parse(adapter(env)) - // TODO: wrap all methods with validation methods - return instance -} diff --git a/packages/port-hooks/index_test.js b/packages/port-hooks/index_test.js deleted file mode 100644 index 4b162de1..00000000 --- a/packages/port-hooks/index_test.js +++ /dev/null @@ -1,6 +0,0 @@ -const test = require('tape') - -test('hooks port tests', t => { - t.ok(true) - t.end() -}) diff --git a/packages/port-hooks/mod.js b/packages/port-hooks/mod.js new file mode 100644 index 00000000..55a7567a --- /dev/null +++ b/packages/port-hooks/mod.js @@ -0,0 +1,11 @@ +import { z } from "./deps.js"; + +export function hooks(adapter, env) { + const Port = z.object({ + // add port methods + }); + + const instance = Port.parse(adapter(env)); + // TODO: wrap all methods with validation methods + return instance; +} diff --git a/packages/port-hooks/mod_test.js b/packages/port-hooks/mod_test.js new file mode 100644 index 00000000..77bea7c5 --- /dev/null +++ b/packages/port-hooks/mod_test.js @@ -0,0 +1,5 @@ +import { assert } from "./dev_deps.js"; + +Deno.test("hooks port tests", () => { + assert(true); +}); diff --git a/packages/port-hooks/package.json b/packages/port-hooks/package.json index 3f4fea74..18b19507 100644 --- a/packages/port-hooks/package.json +++ b/packages/port-hooks/package.json @@ -4,15 +4,5 @@ "description": "hyper63 service framework port for hooks adapters", "homepage": "https://github.com/hyper63/hyper63#readme", "repository": "https://github.com/hyper63/hyper63", - "license": "Apache-2.0", - "main": "index.js", - "scripts": { - "test": "tape index_test.js" - }, - "dependencies": { - "zod": "^3.0.0-alpha.33" - }, - "devDependencies": { - "tape": "^5.0.1" - } + "license": "Apache-2.0" } diff --git a/packages/port-queue/.adiorc.js b/packages/port-queue/.adiorc.js deleted file mode 100644 index dae1ffb4..00000000 --- a/packages/port-queue/.adiorc.js +++ /dev/null @@ -1,9 +0,0 @@ - -module.exports = { - ignore: { - devDependencies: [ - '@types/tape', - 'typescript' - ] - } -} diff --git a/packages/port-queue/README.md b/packages/port-queue/README.md index 3add996f..f895b5d5 100644 --- a/packages/port-queue/README.md +++ b/packages/port-queue/README.md @@ -1,13 +1,13 @@ # README -Queue Port +Queue Port -* Create a Queue -* Delete a Queue -* Post a JOB to a Queue -* Get Jobs from a queue -* Cancel a Job -* Retry a Job +- Create a Queue +- Delete a Queue +- Post a JOB to a Queue +- Get Jobs from a queue +- Cancel a Job +- Retry a Job ## Documentation @@ -26,4 +26,3 @@ yarn build ``` yarn test ``` - diff --git a/packages/port-queue/deps.js b/packages/port-queue/deps.js new file mode 100644 index 00000000..293c3d97 --- /dev/null +++ b/packages/port-queue/deps.js @@ -0,0 +1 @@ +export * as z from "https://cdn.skypack.dev/zod@^3"; diff --git a/packages/port-queue/deps_lock.json b/packages/port-queue/deps_lock.json new file mode 100644 index 00000000..82b6ab3f --- /dev/null +++ b/packages/port-queue/deps_lock.json @@ -0,0 +1,4 @@ +{ + "https://cdn.skypack.dev/-/zod@v3.1.0-gI7iXzzTM1WELSyd5Wzr/dist=es2020,mode=imports/optimized/zod.js": "2dffcb4bf919f89e183c2ab0c8595822c1454af1676e504ec36bde1ac72b2bc1", + "https://cdn.skypack.dev/zod@^3": "db17f3b774c7bdbb70181223fb10cdd480d1761f2e22340bd11cbcda4ccfbafe" +} diff --git a/packages/port-queue/dev_deps.js b/packages/port-queue/dev_deps.js new file mode 100644 index 00000000..d556eeb4 --- /dev/null +++ b/packages/port-queue/dev_deps.js @@ -0,0 +1 @@ +export { assertEquals } from "https://deno.land/std@0.98.0/testing/asserts.ts"; diff --git a/packages/port-queue/dev_deps_lock.json b/packages/port-queue/dev_deps_lock.json new file mode 100644 index 00000000..7cf4ec42 --- /dev/null +++ b/packages/port-queue/dev_deps_lock.json @@ -0,0 +1,5 @@ +{ + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c" +} diff --git a/packages/port-queue/lib/index.d.ts b/packages/port-queue/lib/index.d.ts deleted file mode 100644 index 32474df7..00000000 --- a/packages/port-queue/lib/index.d.ts +++ /dev/null @@ -1,306 +0,0 @@ -import { z } from 'zod'; -declare const QueueListResponse: z.ZodArray; -declare const QueueCreateInput: z.ZodObject<{ - name: z.ZodString; - target: z.ZodString; - secret: z.ZodOptional; -}, "strip", z.ZodTypeAny, { - secret?: string | undefined; - name: string; - target: string; -}, { - secret?: string | undefined; - name: string; - target: string; -}>; -declare const QueueResponse: z.ZodObject<{ - ok: z.ZodBoolean; - msg: z.ZodOptional; - status: z.ZodOptional; -}, "strip", z.ZodTypeAny, { - msg?: string | undefined; - status?: number | undefined; - ok: boolean; -}, { - msg?: string | undefined; - status?: number | undefined; - ok: boolean; -}>; -declare const QueuePostInput: z.ZodObject<{ - name: z.ZodString; - job: z.ZodObject<{}, "passthrough", z.ZodTypeAny, {}, {}>; -}, "strip", z.ZodTypeAny, { - name: string; - job: {}; -}, { - name: string; - job: {}; -}>; -declare const QueueGetInput: z.ZodObject<{ - name: z.ZodString; - status: z.ZodEnum<["READY", "ERROR"]>; -}, "strip", z.ZodTypeAny, { - name: string; - status: "READY" | "ERROR"; -}, { - name: string; - status: "READY" | "ERROR"; -}>; -declare const JobsResponse: z.ZodObject<{ - ok: z.ZodBoolean; - jobs: z.ZodOptional>, false>; - status: z.ZodOptional; -}, "strip", z.ZodTypeAny, { - status?: number | undefined; - jobs?: {}[] | undefined; - ok: boolean; -}, { - status?: number | undefined; - jobs?: {}[] | undefined; - ok: boolean; -}>; -declare const JobInput: z.ZodObject<{ - name: z.ZodString; - id: z.ZodString; -}, "strip", z.ZodTypeAny, { - name: string; - id: string; -}, { - name: string; - id: string; -}>; -declare const QueuePort: z.ZodObject<{ - index: z.ZodFunction, z.ZodPromise>>; - create: z.ZodFunction; - }, "strip", z.ZodTypeAny, { - secret?: string | undefined; - name: string; - target: string; - }, { - secret?: string | undefined; - name: string; - target: string; - }>]>, z.ZodPromise; - status: z.ZodOptional; - }, "strip", z.ZodTypeAny, { - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }, { - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>>>; - delete: z.ZodFunction, z.ZodPromise; - status: z.ZodOptional; - }, "strip", z.ZodTypeAny, { - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }, { - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>>>; - post: z.ZodFunction; - }, "strip", z.ZodTypeAny, { - name: string; - job: {}; - }, { - name: string; - job: {}; - }>]>, z.ZodPromise; - status: z.ZodOptional; - }, "strip", z.ZodTypeAny, { - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }, { - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>>>; - get: z.ZodFunction; - }, "strip", z.ZodTypeAny, { - name: string; - status: "READY" | "ERROR"; - }, { - name: string; - status: "READY" | "ERROR"; - }>]>, z.ZodPromise>, false>; - status: z.ZodOptional; - }, "strip", z.ZodTypeAny, { - status?: number | undefined; - jobs?: {}[] | undefined; - ok: boolean; - }, { - status?: number | undefined; - jobs?: {}[] | undefined; - ok: boolean; - }>>>; - retry: z.ZodFunction]>, z.ZodPromise; - status: z.ZodOptional; - }, "strip", z.ZodTypeAny, { - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }, { - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>>>; - cancel: z.ZodFunction]>, z.ZodPromise; - status: z.ZodOptional; - }, "strip", z.ZodTypeAny, { - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }, { - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>>>; -}, "strip", z.ZodTypeAny, { - index: () => Promise; - create: (args_0: { - secret?: string | undefined; - name: string; - target: string; - }) => Promise<{ - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>; - delete: (args_0: string) => Promise<{ - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>; - post: (args_0: { - name: string; - job: {}; - }) => Promise<{ - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>; - get: (args_0: { - name: string; - status: "READY" | "ERROR"; - }) => Promise<{ - status?: number | undefined; - jobs?: {}[] | undefined; - ok: boolean; - }>; - retry: (args_0: { - name: string; - id: string; - }) => Promise<{ - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>; - cancel: (args_0: { - name: string; - id: string; - }) => Promise<{ - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>; -}, { - index: () => Promise; - create: (args_0: { - secret?: string | undefined; - name: string; - target: string; - }) => Promise<{ - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>; - delete: (args_0: string) => Promise<{ - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>; - post: (args_0: { - name: string; - job: {}; - }) => Promise<{ - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>; - get: (args_0: { - name: string; - status: "READY" | "ERROR"; - }) => Promise<{ - status?: number | undefined; - jobs?: {}[] | undefined; - ok: boolean; - }>; - retry: (args_0: { - name: string; - id: string; - }) => Promise<{ - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>; - cancel: (args_0: { - name: string; - id: string; - }) => Promise<{ - msg?: string | undefined; - status?: number | undefined; - ok: boolean; - }>; -}>; -export declare type QueuePort = z.infer; -export declare type QueueListResponse = z.infer; -export declare type QueueCreateInput = z.infer; -export declare type QueueResponse = z.infer; -export declare type QueuePostInput = z.infer; -export declare type QueueGetInput = z.infer; -export declare type JobsResponse = z.infer; -export declare type JobInput = z.infer; -export default function (adapter: QueuePort): QueuePort; -export {}; diff --git a/packages/port-queue/lib/index.js b/packages/port-queue/lib/index.js deleted file mode 100644 index d8a4f5a9..00000000 --- a/packages/port-queue/lib/index.js +++ /dev/null @@ -1,66 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -var zod_1 = require("zod"); -var QueueListResponse = zod_1.z.string().array(); -var QueueCreateInput = zod_1.z.object({ - name: zod_1.z.string(), - target: zod_1.z.string().url(), - secret: zod_1.z.string().max(100).optional() -}); -var QueueResponse = zod_1.z.object({ - ok: zod_1.z.boolean(), - msg: zod_1.z.string().optional(), - status: zod_1.z.number().optional() -}); -var QueuePostInput = zod_1.z.object({ - name: zod_1.z.string(), - job: zod_1.z.object({}).passthrough() -}); -var QueueGetInput = zod_1.z.object({ - name: zod_1.z.string(), - status: zod_1.z.enum(['READY', 'ERROR']) -}); -var JobsResponse = zod_1.z.object({ - ok: zod_1.z.boolean(), - jobs: zod_1.z.array(zod_1.z.object({}).passthrough()).optional(), - status: zod_1.z.number().optional() -}); -var JobInput = zod_1.z.object({ - name: zod_1.z.string(), - id: zod_1.z.string() -}); -var QueuePort = zod_1.z.object({ - index: zod_1.z.function() - .args() - .returns(zod_1.z.promise(QueueListResponse)), - create: zod_1.z.function() - .args(QueueCreateInput) - .returns(zod_1.z.promise(QueueResponse)), - 'delete': zod_1.z.function() - .args(zod_1.z.string()) - .returns(zod_1.z.promise(QueueResponse)), - post: zod_1.z.function() - .args(QueuePostInput) - .returns(zod_1.z.promise(QueueResponse)), - get: zod_1.z.function() - .args(QueueGetInput) - .returns(zod_1.z.promise(JobsResponse)), - retry: zod_1.z.function() - .args(JobInput) - .returns(zod_1.z.promise(QueueResponse)), - cancel: zod_1.z.function() - .args(JobInput) - .returns(zod_1.z.promise(QueueResponse)) -}); -function default_1(adapter) { - var instance = QueuePort.parse(adapter); - // wrap the functions with validators - instance.create = QueuePort.shape.create.validate(instance.create); - instance.post = QueuePort.shape.post.validate(instance.post); - instance.delete = QueuePort.shape.delete.validate(instance.delete); - instance.get = QueuePort.shape.get.validate(instance.get); - instance.retry = QueuePort.shape.retry.validate(instance.retry); - instance.cancel = QueuePort.shape.cancel.validate(instance.cancel); - return instance; -} -exports.default = default_1; diff --git a/packages/port-queue/lib/index_test.d.ts b/packages/port-queue/lib/index_test.d.ts deleted file mode 100644 index cb0ff5c3..00000000 --- a/packages/port-queue/lib/index_test.d.ts +++ /dev/null @@ -1 +0,0 @@ -export {}; diff --git a/packages/port-queue/lib/index_test.js b/packages/port-queue/lib/index_test.js deleted file mode 100644 index 3b493bcc..00000000 --- a/packages/port-queue/lib/index_test.js +++ /dev/null @@ -1,149 +0,0 @@ -"use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __generator = (this && this.__generator) || function (thisArg, body) { - var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g; - return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; - function verb(n) { return function (v) { return step([n, v]); }; } - function step(op) { - if (f) throw new TypeError("Generator is already executing."); - while (_) try { - if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; - if (y = 0, t) op = [op[0] & 2, t.value]; - switch (op[0]) { - case 0: case 1: t = op; break; - case 4: _.label++; return { value: op[1], done: false }; - case 5: _.label++; y = op[1]; op = [0]; continue; - case 7: op = _.ops.pop(); _.trys.pop(); continue; - default: - if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } - if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } - if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } - if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } - if (t[2]) _.ops.pop(); - _.trys.pop(); continue; - } - op = body.call(thisArg, _); - } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } - if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; - } -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -var tape_1 = __importDefault(require("tape")); -var index_1 = __importDefault(require("./index")); -var adapter = { - index: function () { - return Promise.resolve([]); - }, - create: function (input) { - return Promise.resolve({ - ok: true, - msg: 'success' - }); - }, - post: function (input) { - return Promise.resolve({ - ok: true, - msg: 'success' - }); - }, - 'delete': function (name) { - return Promise.resolve({ ok: true }); - }, - get: function (input) { - return Promise.resolve({ - ok: true, - jobs: [{ - id: '1', - action: 'email', - subject: 'Hello', - body: 'world', - to: 'foo@email.com', - from: 'dnr@foo.com' - }] - }); - }, - cancel: function (input) { - return Promise.resolve({ ok: true }); - }, - retry: function (input) { - return Promise.resolve({ ok: true, status: 201 }); - } -}; -var badAdapter = { - index: function () { return Promise.reject({ ok: false, msg: 'could not create list' }); }, - create: function (input) { return Promise.reject({ ok: false, msg: 'badfood' }); }, - post: function (input) { return Promise.reject({ ok: false, msg: 'badfood' }); }, - 'delete': function (name) { return Promise.reject({ ok: false }); }, - get: function (input) { return Promise.reject({ ok: false }); }, - cancel: function (input) { return Promise.reject({ ok: false }); }, - retry: function (input) { return Promise.reject({ ok: false }); } -}; -tape_1.default('create a queue success', function (t) { return __awaiter(void 0, void 0, void 0, function () { - var x, res; - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - t.plan(3); - x = index_1.default(adapter); - return [4 /*yield*/, x.create({ - name: 'test', - target: 'https://example.com', - secret: 'somesecret' - })]; - case 1: - res = _a.sent(); - t.ok(res.ok); - return [4 /*yield*/, x.post({ - name: 'test', - job: { - action: 'email', - subject: 'Hello', - body: 'world', - to: 'foo@email.com', - from: 'dnr@foo.com' - } - })]; - case 2: - res = _a.sent(); - t.ok(res.ok); - return [4 /*yield*/, x.get({ - name: 'test', - status: 'ERROR' - })]; - case 3: - res = _a.sent(); - t.ok(res.ok); - return [2 /*return*/]; - } - }); -}); }); -tape_1.default('create a queue failure', function (t) { return __awaiter(void 0, void 0, void 0, function () { - var x, res; - return __generator(this, function (_a) { - switch (_a.label) { - case 0: - t.plan(2); - x = index_1.default(badAdapter); - return [4 /*yield*/, x.create({ name: 'foo', target: 'bar' }).catch(function (err) { return err; })]; - case 1: - res = _a.sent(); - t.notOk(res.ok); - return [4 /*yield*/, x.post({ name: 'foo', job: {} }).catch(function (err) { return err; })]; - case 2: - res = _a.sent(); - t.notOk(res.ok); - return [2 /*return*/]; - } - }); -}); }); diff --git a/packages/port-queue/src/index.ts b/packages/port-queue/mod.js similarity index 54% rename from packages/port-queue/src/index.ts rename to packages/port-queue/mod.js index 8608e2ac..12c7742c 100644 --- a/packages/port-queue/src/index.ts +++ b/packages/port-queue/mod.js @@ -1,41 +1,39 @@ -/* eslint-disable no-redeclare */ +import { z } from "./deps.js"; -import { z } from 'zod' - -const QueueListResponse = z.string().array() +const QueueListResponse = z.string().array(); const QueueCreateInput = z.object({ name: z.string(), target: z.string().url(), - secret: z.string().max(100).optional() -}) + secret: z.string().max(100).optional(), +}); const QueueResponse = z.object({ ok: z.boolean(), msg: z.string().optional(), - status: z.number().optional() -}) + status: z.number().optional(), +}); const QueuePostInput = z.object({ name: z.string(), - job: z.object({}).passthrough() -}) + job: z.object({}).passthrough(), +}); const QueueGetInput = z.object({ name: z.string(), - status: z.enum(['READY', 'ERROR']) -}) + status: z.enum(["READY", "ERROR"]), +}); const JobsResponse = z.object({ ok: z.boolean(), jobs: z.array(z.object({}).passthrough()).optional(), - status: z.number().optional() -}) + status: z.number().optional(), +}); const JobInput = z.object({ name: z.string(), - id: z.string() -}) + id: z.string(), +}); const QueuePort = z.object({ index: z.function() @@ -58,28 +56,19 @@ const QueuePort = z.object({ .returns(z.promise(QueueResponse)), cancel: z.function() .args(JobInput) - .returns(z.promise(QueueResponse)) -}) - -export type QueuePort = z.infer -export type QueueListResponse = z.infer -export type QueueCreateInput = z.infer -export type QueueResponse = z.infer -export type QueuePostInput = z.infer -export type QueueGetInput = z.infer -export type JobsResponse = z.infer -export type JobInput = z.infer + .returns(z.promise(QueueResponse)), +}); -export default function (adapter : QueuePort) : QueuePort { - const instance = QueuePort.parse(adapter) +export function queue(adapter) { + const instance = QueuePort.parse(adapter); // wrap the functions with validators - instance.create = QueuePort.shape.create.validate(instance.create) - instance.post = QueuePort.shape.post.validate(instance.post) - instance.delete = QueuePort.shape.delete.validate(instance.delete) - instance.get = QueuePort.shape.get.validate(instance.get) - instance.retry = QueuePort.shape.retry.validate(instance.retry) - instance.cancel = QueuePort.shape.cancel.validate(instance.cancel) + instance.create = QueuePort.shape.create.validate(instance.create); + instance.post = QueuePort.shape.post.validate(instance.post); + instance.delete = QueuePort.shape.delete.validate(instance.delete); + instance.get = QueuePort.shape.get.validate(instance.get); + instance.retry = QueuePort.shape.retry.validate(instance.retry); + instance.cancel = QueuePort.shape.cancel.validate(instance.cancel); - return instance + return instance; } diff --git a/packages/port-queue/mod_test.js b/packages/port-queue/mod_test.js new file mode 100644 index 00000000..6521a48f --- /dev/null +++ b/packages/port-queue/mod_test.js @@ -0,0 +1,86 @@ +// deno-lint-ignore-file no-unused-vars + +import { queue as queuePort } from "./mod.js"; +import { assertEquals } from "./dev_deps.js"; + +const test = Deno.test; + +const adapter = { + index: () => { + return Promise.resolve([]); + }, + create: (input) => { + return Promise.resolve({ + ok: true, + msg: "success", + }); + }, + post: (input) => { + return Promise.resolve({ + ok: true, + msg: "success", + }); + }, + delete: (name) => { + return Promise.resolve({ ok: true }); + }, + get: (input) => { + return Promise.resolve({ + ok: true, + jobs: [{ + id: "1", + action: "email", + subject: "Hello", + body: "world", + to: "foo@email.com", + from: "dnr@foo.com", + }], + }); + }, + cancel: (input) => Promise.resolve({ ok: true }), + retry: (input) => Promise.resolve({ ok: true, status: 201 }), +}; + +const badAdapter = { + index: () => Promise.reject({ ok: false, msg: "could not create list" }), + create: (input) => Promise.reject({ ok: false, msg: "badfood" }), + post: (input) => Promise.reject({ ok: false, msg: "badfood" }), + delete: (name) => Promise.reject({ ok: false }), + get: (input) => Promise.reject({ ok: false }), + cancel: (input) => Promise.reject({ ok: false }), + retry: (input) => Promise.reject({ ok: false }), +}; + +test("create a queue success", async (t) => { + const x = queuePort(adapter); + let res = await x.create({ + name: "test", + target: "https://example.com", + secret: "somesecret", + }); + assertEquals(res.ok, true); + res = await x.post({ + name: "test", + job: { + action: "email", + subject: "Hello", + body: "world", + to: "foo@email.com", + from: "dnr@foo.com", + }, + }); + assertEquals(res.ok, true); + res = await x.get({ + name: "test", + status: "ERROR", + }); + assertEquals(res.ok, true); +}); + +test("create a queue failure", async () => { + const x = queuePort(badAdapter); + let res = await x.create({ name: "foo", target: "bar" }).catch((err) => err); + assertEquals(res.ok, undefined); + res = await x.post({ name: "foo", job: {} }).catch((err) => err); + assertEquals(res.ok, false); +}); diff --git a/packages/port-queue/package.json b/packages/port-queue/package.json index 69d51079..6ae2a3b1 100644 --- a/packages/port-queue/package.json +++ b/packages/port-queue/package.json @@ -1,19 +1,5 @@ { "name": "@hyper63/port-queue", "version": "0.1.4", - "license": "MIT", - "main": "lib/index.js", - "typings": "lib/index", - "scripts": { - "build": "tsc", - "test": "tsc && tape lib/*_test.js" - }, - "dependencies": { - "zod": "^3.0.0-alpha.33" - }, - "devDependencies": { - "@types/tape": "^4.13.0", - "tape": "^5.2.2", - "typescript": "^4.2.3" - } + "license": "MIT" } diff --git a/packages/port-queue/scripts/test.sh b/packages/port-queue/scripts/test.sh new file mode 100755 index 00000000..88fa2a08 --- /dev/null +++ b/packages/port-queue/scripts/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +deno fmt --check +deno test *_test.js diff --git a/packages/port-queue/src/index_test.ts b/packages/port-queue/src/index_test.ts deleted file mode 100644 index 800a2515..00000000 --- a/packages/port-queue/src/index_test.ts +++ /dev/null @@ -1,93 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ - -import test from 'tape' -import queuePort, { - QueuePort, QueueListResponse, QueueCreateInput, QueuePostInput, QueueResponse, - QueueGetInput, JobsResponse, JobInput -} from './index' - -const adapter : QueuePort = { - index: () : Promise => { - return Promise.resolve([]) - }, - create: (input: QueueCreateInput) : Promise => { - return Promise.resolve({ - ok: true, - msg: 'success' - }) - }, - post: (input: QueuePostInput) : Promise => { - return Promise.resolve({ - ok: true, - msg: 'success' - }) - }, - delete: (name: string) : Promise => { - return Promise.resolve({ ok: true }) - }, - get: (input: QueueGetInput) : Promise => { - return Promise.resolve({ - ok: true, - jobs: [{ - id: '1', - action: 'email', - subject: 'Hello', - body: 'world', - to: 'foo@email.com', - from: 'dnr@foo.com' - }] - }) - }, - cancel: (input: JobInput) : Promise => - Promise.resolve({ ok: true }), - retry: (input: JobInput) : Promise => - Promise.resolve({ ok: true, status: 201 }) - -} - -const badAdapter : QueuePort = { - index: () => Promise.reject({ ok: false, msg: 'could not create list' }), - create: (input: QueueCreateInput) => Promise.reject({ ok: false, msg: 'badfood' }), - post: (input: QueuePostInput) => Promise.reject({ ok: false, msg: 'badfood' }), - delete: (name: string) => Promise.reject({ ok: false }), - get: (input: QueueGetInput) => Promise.reject({ ok: false }), - cancel: (input: JobInput) => Promise.reject({ ok: false }), - retry: (input: JobInput) => Promise.reject({ ok: false }) - -} - -test('create a queue success', async t => { - t.plan(3) - const x = queuePort(adapter) - let res = await x.create({ - name: 'test', - target: 'https://example.com', - secret: 'somesecret' - }) - t.ok(res.ok) - res = await x.post({ - name: 'test', - job: { - action: 'email', - subject: 'Hello', - body: 'world', - to: 'foo@email.com', - from: 'dnr@foo.com' - } - }) - t.ok(res.ok) - res = await x.get({ - name: 'test', - status: 'ERROR' - }) - t.ok(res.ok) -}) - -test('create a queue failure', async t => { - t.plan(2) - const x = queuePort(badAdapter) - let res = await x.create({ name: 'foo', target: 'bar' }).catch(err => err) - t.notOk(res.ok) - res = await x.post({ name: 'foo', job: {} }).catch(err => err) - t.notOk(res.ok) -}) diff --git a/packages/port-queue/tsconfig.json b/packages/port-queue/tsconfig.json deleted file mode 100644 index 1e7e37dc..00000000 --- a/packages/port-queue/tsconfig.json +++ /dev/null @@ -1,70 +0,0 @@ -{ - "compilerOptions": { - /* Visit https://aka.ms/tsconfig.json to read more about this file */ - - /* Basic Options */ - // "incremental": true, /* Enable incremental compilation */ - "target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */ - "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */ - // "lib": [], /* Specify library files to be included in the compilation. */ - // "allowJs": true, /* Allow javascript files to be compiled. */ - // "checkJs": true, /* Report errors in .js files. */ - // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ - "declaration": true, /* Generates corresponding '.d.ts' file. */ - // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ - // "sourceMap": true, /* Generates corresponding '.map' file. */ - // "outFile": "./", /* Concatenate and emit output to single file. */ - "outDir": "lib", /* Redirect output structure to the directory. */ - // "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ - // "composite": true, /* Enable project compilation */ - // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ - // "removeComments": true, /* Do not emit comments to output. */ - // "noEmit": true, /* Do not emit outputs. */ - // "importHelpers": true, /* Import emit helpers from 'tslib'. */ - // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ - // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ - - /* Strict Type-Checking Options */ - "strict": true, /* Enable all strict type-checking options. */ - // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ - // "strictNullChecks": true, /* Enable strict null checks. */ - // "strictFunctionTypes": true, /* Enable strict checking of function types. */ - // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ - // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ - // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ - // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ - - /* Additional Checks */ - // "noUnusedLocals": true, /* Report errors on unused locals. */ - // "noUnusedParameters": true, /* Report errors on unused parameters. */ - // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ - // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ - - /* Module Resolution Options */ - // "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ - // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ - // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ - // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ - // "typeRoots": [], /* List of folders to include type definitions from. */ - // "types": [], /* Type declaration files to be included in compilation. */ - // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ - "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ - // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ - // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ - - /* Source Map Options */ - // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ - // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ - // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ - // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ - - /* Experimental Options */ - // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ - // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ - - /* Advanced Options */ - "skipLibCheck": true, /* Skip type checking of declaration files. */ - "forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */ - }, - "include": ["src/**/*.ts"] -} diff --git a/packages/port-search/README.md b/packages/port-search/README.md index 71bd3261..ff3af3bd 100644 --- a/packages/port-search/README.md +++ b/packages/port-search/README.md @@ -1,8 +1,7 @@ # hyper63 search port -hyper63 port is a module that valiadates the schema of an adapter. In this case the search -port validates the schema of a -search adapter. +hyper63 port is a module that valiadates the schema of an adapter. In this case +the search port validates the schema of a search adapter. ## Usage diff --git a/packages/port-search/deps.js b/packages/port-search/deps.js new file mode 100644 index 00000000..da312e71 --- /dev/null +++ b/packages/port-search/deps.js @@ -0,0 +1,2 @@ +// runtime dependencies here +export * as z from "https://cdn.skypack.dev/zod@^3"; diff --git a/packages/port-search/deps_lock.json b/packages/port-search/deps_lock.json new file mode 100644 index 00000000..82b6ab3f --- /dev/null +++ b/packages/port-search/deps_lock.json @@ -0,0 +1,4 @@ +{ + "https://cdn.skypack.dev/-/zod@v3.1.0-gI7iXzzTM1WELSyd5Wzr/dist=es2020,mode=imports/optimized/zod.js": "2dffcb4bf919f89e183c2ab0c8595822c1454af1676e504ec36bde1ac72b2bc1", + "https://cdn.skypack.dev/zod@^3": "db17f3b774c7bdbb70181223fb10cdd480d1761f2e22340bd11cbcda4ccfbafe" +} diff --git a/packages/port-search/dev_deps.js b/packages/port-search/dev_deps.js new file mode 100644 index 00000000..3399088b --- /dev/null +++ b/packages/port-search/dev_deps.js @@ -0,0 +1,2 @@ +// dev dependencies here +export { assert } from "https://deno.land/std@0.98.0/testing/asserts.ts"; diff --git a/packages/port-search/dev_deps_lock.json b/packages/port-search/dev_deps_lock.json new file mode 100644 index 00000000..7cf4ec42 --- /dev/null +++ b/packages/port-search/dev_deps_lock.json @@ -0,0 +1,5 @@ +{ + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c" +} diff --git a/packages/port-search/index_test.js b/packages/port-search/index_test.js deleted file mode 100644 index 35068a6a..00000000 --- a/packages/port-search/index_test.js +++ /dev/null @@ -1,34 +0,0 @@ -/* eslint-disable @typescript-eslint/no-unused-vars */ - -const test = require('tape') -const searchPort = require('./index.js') - -test('port search ok', t => { - t.plan(1) - const search = searchPort({ - createIndex: ({ index, mappings }) => Promise.resolve({ ok: true }), - deleteIndex: (index) => Promise.resolve({ ok: true }), - indexDoc: ({ index, key, doc }) => Promise.resolve({ ok: true }), - getDoc: ({ index, key }) => Promise.resolve({ ok: true, key, doc: { hello: 'world' } }), - updateDoc: ({ index, key, doc }) => Promise.resolve({ ok: true }), - removeDoc: ({ index, key }) => Promise.resolve({ ok: true }), - bulk: ({ index, docs }) => Promise.resolve({ ok: true, results: [] }), - query: ({ index, q }) => Promise.resolve({ ok: true, matches: [] }) - }) - Promise.all([ - search.createIndex({ index: 'foo', mappings: {} }), - search.deleteIndex('foo'), - search.indexDoc({ index: 'foo', key: 'bar', doc: { hello: 'world' } }), - search.getDoc({ index: 'foo', key: 'bar' }), - search.updateDoc({ index: 'foo', key: 'bar', doc: { beep: 'boop' } }), - search.removeDoc({ index: 'foo', key: 'bar' }), - search.bulk({ index: 'foo', docs: [] }), - search.query({ index: 'foo', q: { query: 'foo' } }) - - ]).then(results => { - t.ok(true) - }).catch(e => { - console.log(e) - t.ok(false) - }) -}) diff --git a/packages/port-search/index.js b/packages/port-search/mod.js similarity index 68% rename from packages/port-search/index.js rename to packages/port-search/mod.js index b6665878..d6b98bd8 100644 --- a/packages/port-search/index.js +++ b/packages/port-search/mod.js @@ -1,76 +1,76 @@ -const z = require('zod') +import { z } from "./deps.js"; -module.exports = function (adapter) { +export function search(adapter) { const Port = z.object({ // add port methods createIndex: z.function() .args(z.object({ index: z.string(), - mappings: z.any() + mappings: z.any(), })) .returns(z.promise(z.object({ ok: z.boolean(), - msg: z.string().optional() + msg: z.string().optional(), }))), deleteIndex: z.function() .args(z.string()) .returns(z.promise(z.object({ ok: z.boolean(), - msg: z.string().optional() + msg: z.string().optional(), }))), indexDoc: z.function() .args(z.object({ index: z.string(), key: z.string(), // remember to invalidate if key === query - doc: z.any() + doc: z.any(), })) .returns(z.promise(z.object({ ok: z.boolean(), - msg: z.string().optional() + msg: z.string().optional(), }))), updateDoc: z.function() .args(z.object({ index: z.string(), - doc: z.any() + doc: z.any(), })) .returns(z.promise(z.object({ ok: z.boolean(), - msg: z.string().optional() + msg: z.string().optional(), }))), getDoc: z.function() .args(z.object({ index: z.string(), - key: z.string() + key: z.string(), })) .returns(z.promise(z.object({ ok: z.boolean(), key: z.string(), - doc: z.any() + doc: z.any(), }))), removeDoc: z.function() .args(z.object({ index: z.string(), - key: z.string() + key: z.string(), })) .returns(z.promise(z.object({ ok: z.boolean(), - msg: z.string().optional() + msg: z.string().optional(), }))), bulk: z.function() .args(z.object({ index: z.string(), docs: z.array( - z.any() - ) + z.any(), + ), })) .returns( z.promise( z.object({ ok: z.boolean(), - results: z.array(z.any()) - }) - ) + results: z.array(z.any()), + }), + ), ), query: z.function() .args(z.object({ @@ -78,25 +78,25 @@ module.exports = function (adapter) { q: z.object({ query: z.string(), fields: z.array(z.string()).optional(), - filter: z.any().optional() - }) + filter: z.any().optional(), + }), })) .returns(z.promise(z.object({ ok: z.boolean(), - matches: z.array(z.any()) - }))) - }) + matches: z.array(z.any()), + }))), + }); - const instance = Port.parse(adapter) + const instance = Port.parse(adapter); - instance.createIndex = Port.shape.createIndex.validate(instance.createIndex) - instance.deleteIndex = Port.shape.deleteIndex.validate(instance.deleteIndex) - instance.indexDoc = Port.shape.indexDoc.validate(instance.indexDoc) - instance.getDoc = Port.shape.getDoc.validate(instance.getDoc) - instance.updateDoc = Port.shape.updateDoc.validate(instance.updateDoc) - instance.removeDoc = Port.shape.removeDoc.validate(instance.removeDoc) + instance.createIndex = Port.shape.createIndex.validate(instance.createIndex); + instance.deleteIndex = Port.shape.deleteIndex.validate(instance.deleteIndex); + instance.indexDoc = Port.shape.indexDoc.validate(instance.indexDoc); + instance.getDoc = Port.shape.getDoc.validate(instance.getDoc); + instance.updateDoc = Port.shape.updateDoc.validate(instance.updateDoc); + instance.removeDoc = Port.shape.removeDoc.validate(instance.removeDoc); - instance.query = Port.shape.query.validate(instance.query) + instance.query = Port.shape.query.validate(instance.query); - return instance + return instance; } diff --git a/packages/port-search/mod_test.js b/packages/port-search/mod_test.js new file mode 100644 index 00000000..b6d0a04a --- /dev/null +++ b/packages/port-search/mod_test.js @@ -0,0 +1,35 @@ +// deno-lint-ignore-file no-unused-vars + +import { assert } from "./dev_deps.js"; + +import { search as searchPort } from "./mod.js"; + +Deno.test("port search ok", () => { + const search = searchPort({ + createIndex: ({ index, mappings }) => Promise.resolve({ ok: true }), + deleteIndex: (index) => Promise.resolve({ ok: true }), + indexDoc: ({ index, key, doc }) => Promise.resolve({ ok: true }), + getDoc: ({ index, key }) => + Promise.resolve({ ok: true, key, doc: { hello: "world" } }), + updateDoc: ({ index, key, doc }) => Promise.resolve({ ok: true }), + removeDoc: ({ index, key }) => Promise.resolve({ ok: true }), + bulk: ({ index, docs }) => Promise.resolve({ ok: true, results: [] }), + query: ({ index, q }) => Promise.resolve({ ok: true, matches: [] }), + }); + + Promise.all([ + search.createIndex({ index: "foo", mappings: {} }), + search.deleteIndex("foo"), + search.indexDoc({ index: "foo", key: "bar", doc: { hello: "world" } }), + search.getDoc({ index: "foo", key: "bar" }), + search.updateDoc({ index: "foo", key: "bar", doc: { beep: "boop" } }), + search.removeDoc({ index: "foo", key: "bar" }), + search.bulk({ index: "foo", docs: [] }), + search.query({ index: "foo", q: { query: "foo" } }), + ]).then(() => { + assert(true); + }).catch((e) => { + console.log(e); + assert(false); + }); +}); diff --git a/packages/port-search/package.json b/packages/port-search/package.json index 64340c39..b91c3b36 100644 --- a/packages/port-search/package.json +++ b/packages/port-search/package.json @@ -4,15 +4,5 @@ "description": "hyper63 service framework port for search adapters", "homepage": "https://github.com/hyper63/hyper63#readme", "repository": "https://github.com/hyper63/hyper63", - "license": "Apache-2.0", - "main": "index.js", - "scripts": { - "test": "tape index_test.js" - }, - "dependencies": { - "zod": "^3.0.0-alpha.33" - }, - "devDependencies": { - "tape": "^5.0.1" - } + "license": "Apache-2.0" } diff --git a/packages/port-search/scripts/test.sh b/packages/port-search/scripts/test.sh new file mode 100755 index 00000000..d2e41fbd --- /dev/null +++ b/packages/port-search/scripts/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +deno fmt --check +deno test diff --git a/packages/port-storage/deps.js b/packages/port-storage/deps.js new file mode 100644 index 00000000..da312e71 --- /dev/null +++ b/packages/port-storage/deps.js @@ -0,0 +1,2 @@ +// runtime dependencies here +export * as z from "https://cdn.skypack.dev/zod@^3"; diff --git a/packages/port-storage/deps_lock.json b/packages/port-storage/deps_lock.json new file mode 100644 index 00000000..82b6ab3f --- /dev/null +++ b/packages/port-storage/deps_lock.json @@ -0,0 +1,4 @@ +{ + "https://cdn.skypack.dev/-/zod@v3.1.0-gI7iXzzTM1WELSyd5Wzr/dist=es2020,mode=imports/optimized/zod.js": "2dffcb4bf919f89e183c2ab0c8595822c1454af1676e504ec36bde1ac72b2bc1", + "https://cdn.skypack.dev/zod@^3": "db17f3b774c7bdbb70181223fb10cdd480d1761f2e22340bd11cbcda4ccfbafe" +} diff --git a/packages/port-storage/dev_deps.js b/packages/port-storage/dev_deps.js new file mode 100644 index 00000000..3399088b --- /dev/null +++ b/packages/port-storage/dev_deps.js @@ -0,0 +1,2 @@ +// dev dependencies here +export { assert } from "https://deno.land/std@0.98.0/testing/asserts.ts"; diff --git a/packages/port-storage/dev_deps_lock.json b/packages/port-storage/dev_deps_lock.json new file mode 100644 index 00000000..7cf4ec42 --- /dev/null +++ b/packages/port-storage/dev_deps_lock.json @@ -0,0 +1,5 @@ +{ + "https://deno.land/std@0.98.0/fmt/colors.ts": "db22b314a2ae9430ae7460ce005e0a7130e23ae1c999157e3bb77cf55800f7e4", + "https://deno.land/std@0.98.0/testing/_diff.ts": "961eaf6d9f5b0a8556c9d835bbc6fa74f5addd7d3b02728ba7936ff93364f7a3", + "https://deno.land/std@0.98.0/testing/asserts.ts": "289b6ccf3b422ffb311cbf56996960d6530f8d8c0a7281803764c5ec672f9f9c" +} diff --git a/packages/port-storage/index_test.js b/packages/port-storage/index_test.js deleted file mode 100644 index 9ef0d5aa..00000000 --- a/packages/port-storage/index_test.js +++ /dev/null @@ -1,6 +0,0 @@ -const test = require('tape') - -test('storage port tests', t => { - t.ok(true) - t.end() -}) diff --git a/packages/port-storage/index.js b/packages/port-storage/mod.js similarity index 67% rename from packages/port-storage/index.js rename to packages/port-storage/mod.js index 576257e7..cc9dc085 100644 --- a/packages/port-storage/index.js +++ b/packages/port-storage/mod.js @@ -1,62 +1,66 @@ -const z = require('zod') +import { z } from "./deps.js"; /** * @param {function} adapter - implementation detail for this port * @param {object} env - environment settings for the adapter */ -module.exports = function (adapter) { +export function storage(adapter) { const Port = z.object({ makeBucket: z.function() .args(z.string()) .returns(z.promise(z.object({ ok: z.boolean(), - msg: z.string().optional() + msg: z.string().optional(), }))), removeBucket: z.function() .args(z.string()) .returns(z.promise(z.object({ ok: z.boolean(), - msg: z.string().optional() + msg: z.string().optional(), }))), listBuckets: z.function() .args(z.void()) .returns(z.promise(z.object({ ok: z.boolean(), - buckets: z.array(z.string()) + buckets: z.array(z.string()), }))), putObject: z.function() .args(z.object({ bucket: z.string(), object: z.string(), - stream: z.any() + stream: z.any(), })) .returns(z.promise(z.object({ ok: z.boolean() }))), removeObject: z.function() .args(z.object({ bucket: z.string(), - object: z.string() + object: z.string(), })) .returns(z.promise(z.object({ ok: z.boolean() }))), getObject: z.function() .args(z.object({ bucket: z.string(), - object: z.string() + object: z.string(), })) .returns(z.promise(z.any())), listObjects: z.function() .args(z.object({ bucket: z.string(), - prefix: z.string().optional() + prefix: z.string().optional(), })) - .returns(z.promise(z.any())) - }) - const instance = Port.parse(adapter) - instance.makeBucket = Port.shape.makeBucket.validate(instance.makeBucket) - instance.removeBucket = Port.shape.removeBucket.validate(instance.removeBucket) - instance.listBuckets = Port.shape.listBuckets.validate(instance.listBuckets) - instance.putObject = Port.shape.putObject.validate(instance.putObject) - instance.removeObject = Port.shape.removeObject.validate(instance.removeObject) - instance.listObjects = Port.shape.listObjects.validate(instance.listObjects) + .returns(z.promise(z.any())), + }); + const instance = Port.parse(adapter); + instance.makeBucket = Port.shape.makeBucket.validate(instance.makeBucket); + instance.removeBucket = Port.shape.removeBucket.validate( + instance.removeBucket, + ); + instance.listBuckets = Port.shape.listBuckets.validate(instance.listBuckets); + instance.putObject = Port.shape.putObject.validate(instance.putObject); + instance.removeObject = Port.shape.removeObject.validate( + instance.removeObject, + ); + instance.listObjects = Port.shape.listObjects.validate(instance.listObjects); - return instance + return instance; } diff --git a/packages/port-storage/mod_test.js b/packages/port-storage/mod_test.js new file mode 100644 index 00000000..7ab3bbdd --- /dev/null +++ b/packages/port-storage/mod_test.js @@ -0,0 +1,5 @@ +import { assert } from "./dev_deps.js"; + +Deno.test("storage port tests", () => { + assert(true); +}); diff --git a/packages/port-storage/package.json b/packages/port-storage/package.json index 11b8687e..1f94e5d3 100644 --- a/packages/port-storage/package.json +++ b/packages/port-storage/package.json @@ -4,15 +4,5 @@ "description": "hyper63 service framework port for storage adapters", "homepage": "https://github.com/hyper63/hyper63#readme", "repository": "https://github.com/hyper63/hyper63", - "license": "Apache-2.0", - "main": "index.js", - "scripts": { - "test": "tape index_test.js" - }, - "dependencies": { - "zod": "^3.0.0-alpha.33" - }, - "devDependencies": { - "tape": "^5.0.1" - } + "license": "Apache-2.0" } diff --git a/packages/port-storage/scripts/test.sh b/packages/port-storage/scripts/test.sh new file mode 100755 index 00000000..09bdb23f --- /dev/null +++ b/packages/port-storage/scripts/test.sh @@ -0,0 +1,4 @@ +#!/usr/bin/env bash + +deno fmt --check +deno test diff --git a/rfcs/0000-template.md b/rfcs/0000-template.md index f074ce5c..36242727 100644 --- a/rfcs/0000-template.md +++ b/rfcs/0000-template.md @@ -11,46 +11,46 @@ ## Motivation > Why are we doing this? What use cases does it support? What is the expected -outcome? +> outcome? ## Detailed design > This is the bulk of the RFC. -> Explain the design in enough detail for somebody -familiar with the framework to understand, and for somebody familiar with the -implementation to implement. This should get into specifics and corner-cases, -and include examples of how the feature is used. Any new terminology should be -defined here. +> Explain the design in enough detail for somebody familiar with the framework +> to understand, and for somebody familiar with the implementation to implement. +> This should get into specifics and corner-cases, and include examples of how +> the feature is used. Any new terminology should be defined here. ## How we teach this > What names and terminology work best for these concepts and why? How is this -idea best presented? As a continuation of existing Atlas patterns, or as a -wholly new one? +> idea best presented? As a continuation of existing Atlas patterns, or as a +> wholly new one? > Would the acceptance of this proposal mean the Atlas guides must be -re-organized or altered? Does it change how Atlas is taught to new users -at any level? +> re-organized or altered? Does it change how Atlas is taught to new users at +> any level? -> How should this feature be introduced and taught to existing Atlas -users? +> How should this feature be introduced and taught to existing Atlas users? ## Drawbacks -> Why should we *not* do this? Please consider the impact on teaching Atlas, -on the integration of this feature with other existing and planned features, -on the impact of the API churn on existing apps, etc. +> Why should we _not_ do this? Please consider the impact on teaching Atlas, on +> the integration of this feature with other existing and planned features, on +> the impact of the API churn on existing apps, etc. -> There are tradeoffs to choosing any path, please attempt to identify them here. +> There are tradeoffs to choosing any path, please attempt to identify them +> here. ## Alternatives > What other designs have been considered? What is the impact of not doing this? -> This section could also include prior art, that is, how other frameworks in the same domain have solved this problem. +> This section could also include prior art, that is, how other frameworks in +> the same domain have solved this problem. ## Unresolved questions > Optional, but suggested for first drafts. What parts of the design are still -TBD? +> TBD? diff --git a/yarn.lock b/yarn.lock index a9d3fbba..2189646d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -365,22 +365,6 @@ dependencies: chalk "^4.0.0" -"@eslint/eslintrc@^0.3.0": - version "0.3.0" - resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-0.3.0.tgz#d736d6963d7003b6514e6324bec9c602ac340318" - integrity sha512-1JTKgrOKAHVivSvOYw+sJOunkBjUOvjqWk1DPja7ZFhIS2mX/4EgTT8M7eTK9jrKhL/FvXXEbQwIs3pg1xp3dg== - dependencies: - ajv "^6.12.4" - debug "^4.1.1" - espree "^7.3.0" - globals "^12.1.0" - ignore "^4.0.6" - import-fresh "^3.2.1" - js-yaml "^3.13.1" - lodash "^4.17.20" - minimatch "^3.0.4" - strip-json-comments "^3.1.1" - "@hyper63/adapter-beequeue@^0.0.1": version "0.0.1" resolved "https://registry.yarnpkg.com/@hyper63/adapter-beequeue/-/adapter-beequeue-0.0.1.tgz#0a97635c95ad72c76a2b7bce8e92485cc9583467" @@ -642,16 +626,6 @@ resolved "https://registry.yarnpkg.com/@types/http-errors/-/http-errors-1.8.0.tgz#682477dbbbd07cd032731cb3b0e7eaee3d026b69" integrity sha512-2aoSC4UUbHDj2uCsCxcG/vRMXey/m17bC7UwitVm5hn22nI8O8Y9iDpA76Orc+DWkQ4zZrOKEshCqR/jSuXAHA== -"@types/json-schema@^7.0.7": - version "7.0.7" - resolved "https://registry.yarnpkg.com/@types/json-schema/-/json-schema-7.0.7.tgz#98a993516c859eb0d5c4c8f098317a9ea68db9ad" - integrity sha512-cxWFQVseBm6O9Gbw1IWb8r6OS4OhSt3hPZLkFApLjM8TEXROBuQGLAH2i2gZpcXdLBIrpXuTDhH7Vbm1iXmNGA== - -"@types/json5@^0.0.29": - version "0.0.29" - resolved "https://registry.yarnpkg.com/@types/json5/-/json5-0.0.29.tgz#ee28707ae94e11d2b827bcbe5270bcea7f3e71ee" - integrity sha1-7ihweulOEdK4J7y+UnC86n8+ce4= - "@types/keygrip@*": version "1.0.2" resolved "https://registry.yarnpkg.com/@types/keygrip/-/keygrip-1.0.2.tgz#513abfd256d7ad0bf1ee1873606317b33b1b2a72" @@ -790,76 +764,6 @@ dependencies: "@types/node" "*" -"@typescript-eslint/eslint-plugin@^4.26.0": - version "4.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-4.26.0.tgz#12bbd6ebd5e7fabd32e48e1e60efa1f3554a3242" - integrity sha512-yA7IWp+5Qqf+TLbd8b35ySFOFzUfL7i+4If50EqvjT6w35X8Lv0eBHb6rATeWmucks37w+zV+tWnOXI9JlG6Eg== - dependencies: - "@typescript-eslint/experimental-utils" "4.26.0" - "@typescript-eslint/scope-manager" "4.26.0" - debug "^4.3.1" - functional-red-black-tree "^1.0.1" - lodash "^4.17.21" - regexpp "^3.1.0" - semver "^7.3.5" - tsutils "^3.21.0" - -"@typescript-eslint/experimental-utils@4.26.0": - version "4.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-4.26.0.tgz#ba7848b3f088659cdf71bce22454795fc55be99a" - integrity sha512-TH2FO2rdDm7AWfAVRB5RSlbUhWxGVuxPNzGT7W65zVfl8H/WeXTk1e69IrcEVsBslrQSTDKQSaJD89hwKrhdkw== - dependencies: - "@types/json-schema" "^7.0.7" - "@typescript-eslint/scope-manager" "4.26.0" - "@typescript-eslint/types" "4.26.0" - "@typescript-eslint/typescript-estree" "4.26.0" - eslint-scope "^5.1.1" - eslint-utils "^3.0.0" - -"@typescript-eslint/parser@^4.26.0": - version "4.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-4.26.0.tgz#31b6b732c9454f757b020dab9b6754112aa5eeaf" - integrity sha512-b4jekVJG9FfmjUfmM4VoOItQhPlnt6MPOBUL0AQbiTmm+SSpSdhHYlwayOm4IW9KLI/4/cRKtQCmDl1oE2OlPg== - dependencies: - "@typescript-eslint/scope-manager" "4.26.0" - "@typescript-eslint/types" "4.26.0" - "@typescript-eslint/typescript-estree" "4.26.0" - debug "^4.3.1" - -"@typescript-eslint/scope-manager@4.26.0": - version "4.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-4.26.0.tgz#60d1a71df162404e954b9d1c6343ff3bee496194" - integrity sha512-G6xB6mMo4xVxwMt5lEsNTz3x4qGDt0NSGmTBNBPJxNsrTXJSm21c6raeYroS2OwQsOyIXqKZv266L/Gln1BWqg== - dependencies: - "@typescript-eslint/types" "4.26.0" - "@typescript-eslint/visitor-keys" "4.26.0" - -"@typescript-eslint/types@4.26.0": - version "4.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.26.0.tgz#7c6732c0414f0a69595f4f846ebe12616243d546" - integrity sha512-rADNgXl1kS/EKnDr3G+m7fB9yeJNnR9kF7xMiXL6mSIWpr3Wg5MhxyfEXy/IlYthsqwBqHOr22boFbf/u6O88A== - -"@typescript-eslint/typescript-estree@4.26.0": - version "4.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-4.26.0.tgz#aea17a40e62dc31c63d5b1bbe9a75783f2ce7109" - integrity sha512-GHUgahPcm9GfBuy3TzdsizCcPjKOAauG9xkz9TR8kOdssz2Iz9jRCSQm6+aVFa23d5NcSpo1GdHGSQKe0tlcbg== - dependencies: - "@typescript-eslint/types" "4.26.0" - "@typescript-eslint/visitor-keys" "4.26.0" - debug "^4.3.1" - globby "^11.0.3" - is-glob "^4.0.1" - semver "^7.3.5" - tsutils "^3.21.0" - -"@typescript-eslint/visitor-keys@4.26.0": - version "4.26.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-4.26.0.tgz#26d2583169222815be4dcd1da4fe5459bc3bcc23" - integrity sha512-cw4j8lH38V1ycGBbF+aFiLUls9Z0Bw8QschP3mkth50BbWzgFS33ISIgBzUMuQ2IdahoEv/rXstr8Zhlz4B1Zg== - dependencies: - "@typescript-eslint/types" "4.26.0" - eslint-visitor-keys "^2.0.0" - "@vercel/fetch-cached-dns@^2.0.1": version "2.0.1" resolved "https://registry.yarnpkg.com/@vercel/fetch-cached-dns/-/fetch-cached-dns-2.0.1.tgz#b929ba5b4b6f7108abf49adaf03309159047c134" @@ -957,16 +861,6 @@ accepts@^1.3.5, accepts@~1.3.7: mime-types "~2.1.24" negotiator "0.6.2" -acorn-jsx@^5.3.1: - version "5.3.1" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.1.tgz#fc8661e11b7ac1539c47dbfea2e72b3af34d267b" - integrity sha512-K0Ptm/47OKfQRpNQ2J/oIN/3QYiK6FwW+eJbILhsdxh2WTLdl+30o8aGdTbm5JbffpFFAg/g+zi1E+jvJha5ng== - -acorn@^7.4.0: - version "7.4.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" - integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== - adio@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/adio/-/adio-1.2.1.tgz#72392eccb39564bd602afef65b0a924a5490eeeb" @@ -994,26 +888,6 @@ aggregate-error@^3.0.0: clean-stack "^2.0.0" indent-string "^4.0.0" -ajv@^6.10.0, ajv@^6.12.4: - version "6.12.6" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" - integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ajv@^7.0.2: - version "7.0.4" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-7.0.4.tgz#827e5f5ae32f5e5c1637db61f253a112229b5e2f" - integrity sha512-xzzzaqgEQfmuhbhAoqjJ8T/1okb6gAzXn/eQRNpAN1AEUoHJTNF9xCDRTtf/s3SKldtZfa+RJeTs+BQq+eZ/sw== - dependencies: - fast-deep-equal "^3.1.1" - json-schema-traverse "^1.0.0" - require-from-string "^2.0.2" - uri-js "^4.2.2" - ansi-colors@^4.1.1: version "4.1.1" resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348" @@ -1242,31 +1116,11 @@ array-ify@^1.0.0: resolved "https://registry.yarnpkg.com/array-ify/-/array-ify-1.0.0.tgz#9e528762b4a9066ad163a6962a364418e9626ece" integrity sha1-nlKHYrSpBmrRY6aWKjZEGOlibs4= -array-includes@^3.1.3: - version "3.1.3" - resolved "https://registry.yarnpkg.com/array-includes/-/array-includes-3.1.3.tgz#c7f619b382ad2afaf5326cddfdc0afc61af7690a" - integrity sha512-gcem1KlBU7c9rB+Rq8/3PPKsK2kjqeEBa3bD5kkQo4nYlOHQCJqIJFqBXDEfwaRuYTT4E+FxA9xez7Gf/e3Q7A== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.18.0-next.2" - get-intrinsic "^1.1.1" - is-string "^1.0.5" - array-union@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/array-union/-/array-union-2.1.0.tgz#b798420adbeb1de828d84acd8a2e23d3efe85e8d" integrity sha512-HGyxoOTYUyCM6stUe6EJgnd4EoewAI7zMdfqO+kGjnlZmBDz/cR5pf8r/cR4Wq60sL/p0IkcjUEEPwS3GFrIyw== -array.prototype.flat@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.2.4.tgz#6ef638b43312bd401b4c6199fdec7e2dc9e9a123" - integrity sha512-4470Xi3GAPAjZqFcljX2xzckv1qeKPizoNkiS0+O4IoPR2ZNpcjE0pkhdihlDouK+x6QOast26B4Q/O9DJnwSg== - dependencies: - call-bind "^1.0.0" - define-properties "^1.1.3" - es-abstract "^1.18.0-next.1" - arrify@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/arrify/-/arrify-1.0.1.tgz#898508da2226f380df904728456849c1501a4b0d" @@ -1720,7 +1574,7 @@ cross-spawn@^6.0.5: shebang-command "^1.2.0" which "^1.2.9" -cross-spawn@^7.0.2, cross-spawn@^7.0.3: +cross-spawn@^7.0.3: version "7.0.3" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== @@ -1744,7 +1598,7 @@ dargs@^7.0.0: resolved "https://registry.yarnpkg.com/dargs/-/dargs-7.0.0.tgz#04015c41de0bcb69ec84050f3d9be0caf8d6d5cc" integrity sha512-2iy1EkLdlBzQGvbweYRFxmFath8+K7+AKB0TlhHWkNuH+TmovaMH/Wp7V7R4u7f4SnX3OgLsU9t1NI9ioDnUpg== -debug@2.6.9, debug@^2.6.9: +debug@2.6.9: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== @@ -1758,14 +1612,14 @@ debug@3.1.0: dependencies: ms "2.0.0" -debug@^3.1.0, debug@^3.2.7: +debug@^3.1.0: version "3.2.7" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.7.tgz#72580b7e9145fb39b6676f9c5e5fb100b934179a" integrity sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ== dependencies: ms "^2.1.1" -debug@^4.0.1, debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: +debug@^4.1.0, debug@^4.1.1, debug@^4.3.1: version "4.3.1" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.1.tgz#f0d229c505e0c6d8c49ac553d1b13dc183f6b2ee" integrity sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ== @@ -1811,11 +1665,6 @@ deep-equal@^2.0.5: which-collection "^1.0.1" which-typed-array "^1.1.2" -deep-is@^0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" - integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= - deferred-leveldown@~5.3.0: version "5.3.0" resolved "https://registry.yarnpkg.com/deferred-leveldown/-/deferred-leveldown-5.3.0.tgz#27a997ad95408b61161aa69bd489b86c71b78058" @@ -1841,11 +1690,6 @@ delayed-stream@~1.0.0: resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= -denque@^1.4.1: - version "1.5.0" - resolved "https://registry.yarnpkg.com/denque/-/denque-1.5.0.tgz#773de0686ff2d8ec2ff92914316a47b73b1c73de" - integrity sha512-CYiCSgIF1p6EUByQPlGkKnP1M9g0ZV3qMIrqMqZqdwazygIA/YP2vrbcyl1h/WppKJTdl1F85cXIle+394iDAQ== - depd@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" @@ -1893,20 +1737,6 @@ dir-glob@^3.0.1: dependencies: path-type "^4.0.0" -doctrine@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-2.1.0.tgz#5cd01fc101621b42c4cd7f5d1a66243716d3f39d" - integrity sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw== - dependencies: - esutils "^2.0.2" - -doctrine@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" - integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== - dependencies: - esutils "^2.0.2" - dot-prop@^5.1.0: version "5.3.0" resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-5.3.0.tgz#90ccce708cd9cd82cc4dc8c3ddd9abdd55b20e88" @@ -1975,7 +1805,7 @@ end-stream@~0.1.0: dependencies: write-stream "~0.4.3" -enquirer@^2.3.5, enquirer@^2.3.6: +enquirer@^2.3.6: version "2.3.6" resolved "https://registry.yarnpkg.com/enquirer/-/enquirer-2.3.6.tgz#2a7fe5dd634a1e4125a975ec994ff5456dc3734d" integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== @@ -2038,28 +1868,6 @@ es-abstract@^1.18.0-next.1: string.prototype.trimend "^1.0.3" string.prototype.trimstart "^1.0.3" -es-abstract@^1.18.2: - version "1.18.3" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.18.3.tgz#25c4c3380a27aa203c44b2b685bba94da31b63e0" - integrity sha512-nQIr12dxV7SSxE6r6f1l3DtAeEYdsGpps13dR0TwJg1S8gyp4ZPgy3FZcHBgbiQqnoqSTb+oC+kO4UQ0C/J8vw== - dependencies: - call-bind "^1.0.2" - es-to-primitive "^1.2.1" - function-bind "^1.1.1" - get-intrinsic "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.2" - is-callable "^1.2.3" - is-negative-zero "^2.0.1" - is-regex "^1.1.3" - is-string "^1.0.6" - object-inspect "^1.10.3" - object-keys "^1.1.1" - object.assign "^4.1.2" - string.prototype.trimend "^1.0.4" - string.prototype.trimstart "^1.0.4" - unbox-primitive "^1.0.1" - es-get-iterator@^1.1.1: version "1.1.2" resolved "https://registry.yarnpkg.com/es-get-iterator/-/es-get-iterator-1.1.2.tgz#9234c54aba713486d7ebde0220864af5e2b283f7" @@ -2103,196 +1911,16 @@ escape-string-regexp@^1.0.5: resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= -eslint-config-standard@^16.0.3: - version "16.0.3" - resolved "https://registry.yarnpkg.com/eslint-config-standard/-/eslint-config-standard-16.0.3.tgz#6c8761e544e96c531ff92642eeb87842b8488516" - integrity sha512-x4fmJL5hGqNJKGHSjnLdgA6U6h1YW/G2dW9fA+cyVur4SK6lyue8+UgNKWlZtUDTXvgKDD/Oa3GQjmB5kjtVvg== - -eslint-import-resolver-node@^0.3.4: - version "0.3.4" - resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.4.tgz#85ffa81942c25012d8231096ddf679c03042c717" - integrity sha512-ogtf+5AB/O+nM6DIeBUNr2fuT7ot9Qg/1harBfBtaP13ekEWFQEEMP94BCB7zaNW3gyY+8SHYF00rnqYwXKWOA== - dependencies: - debug "^2.6.9" - resolve "^1.13.1" - -eslint-module-utils@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.6.1.tgz#b51be1e473dd0de1c5ea638e22429c2490ea8233" - integrity sha512-ZXI9B8cxAJIH4nfkhTwcRTEAnrVfobYqwjWy/QMCZ8rHkZHFjf9yO4BzpiF9kCSfNlMG54eKigISHpX0+AaT4A== - dependencies: - debug "^3.2.7" - pkg-dir "^2.0.0" - -eslint-plugin-es@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz#75a7cdfdccddc0589934aeeb384175f221c57893" - integrity sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ== - dependencies: - eslint-utils "^2.0.0" - regexpp "^3.0.0" - -eslint-plugin-import@^2.23.4: - version "2.23.4" - resolved "https://registry.yarnpkg.com/eslint-plugin-import/-/eslint-plugin-import-2.23.4.tgz#8dceb1ed6b73e46e50ec9a5bb2411b645e7d3d97" - integrity sha512-6/wP8zZRsnQFiR3iaPFgh5ImVRM1WN5NUWfTIRqwOdeiGJlBcSk82o1FEVq8yXmy4lkIzTo7YhHCIxlU/2HyEQ== - dependencies: - array-includes "^3.1.3" - array.prototype.flat "^1.2.4" - debug "^2.6.9" - doctrine "^2.1.0" - eslint-import-resolver-node "^0.3.4" - eslint-module-utils "^2.6.1" - find-up "^2.0.0" - has "^1.0.3" - is-core-module "^2.4.0" - minimatch "^3.0.4" - object.values "^1.1.3" - pkg-up "^2.0.0" - read-pkg-up "^3.0.0" - resolve "^1.20.0" - tsconfig-paths "^3.9.0" - -eslint-plugin-node@^11.1.0: - version "11.1.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz#c95544416ee4ada26740a30474eefc5402dc671d" - integrity sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g== - dependencies: - eslint-plugin-es "^3.0.0" - eslint-utils "^2.0.0" - ignore "^5.1.1" - minimatch "^3.0.4" - resolve "^1.10.1" - semver "^6.1.0" - -eslint-plugin-promise@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/eslint-plugin-promise/-/eslint-plugin-promise-5.1.0.tgz#fb2188fb734e4557993733b41aa1a688f46c6f24" - integrity sha512-NGmI6BH5L12pl7ScQHbg7tvtk4wPxxj8yPHH47NvSmMtFneC077PSeY3huFj06ZWZvtbfxSPt3RuOQD5XcR4ng== - -eslint-scope@^5.1.1: - version "5.1.1" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.1.tgz#e786e59a66cb92b3f6c1fb0d508aab174848f48c" - integrity sha512-2NxwbF/hZ0KpepYN0cNbo+FN6XoK7GaHlQhgx/hIZl6Va0bF45RQOOwhLIy8lQDbuCiadSLCBnH2CFYquit5bw== - dependencies: - esrecurse "^4.3.0" - estraverse "^4.1.1" - -eslint-utils@^2.0.0, eslint-utils@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-2.1.0.tgz#d2de5e03424e707dc10c74068ddedae708741b27" - integrity sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg== - dependencies: - eslint-visitor-keys "^1.1.0" - -eslint-utils@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" - integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== - dependencies: - eslint-visitor-keys "^2.0.0" - -eslint-visitor-keys@^1.1.0, eslint-visitor-keys@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" - integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== - -eslint-visitor-keys@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.0.0.tgz#21fdc8fbcd9c795cc0321f0563702095751511a8" - integrity sha512-QudtT6av5WXels9WjIM7qz1XD1cWGvX4gGXvp/zBn9nXG02D0utdU3Em2m/QjTnrsk6bBjmCygl3rmj118msQQ== - -eslint@^7.10.0: - version "7.19.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-7.19.0.tgz#6719621b196b5fad72e43387981314e5d0dc3f41" - integrity sha512-CGlMgJY56JZ9ZSYhJuhow61lMPPjUzWmChFya71Z/jilVos7mR/jPgaEfVGgMBY5DshbKdG8Ezb8FDCHcoMEMg== - dependencies: - "@babel/code-frame" "^7.0.0" - "@eslint/eslintrc" "^0.3.0" - ajv "^6.10.0" - chalk "^4.0.0" - cross-spawn "^7.0.2" - debug "^4.0.1" - doctrine "^3.0.0" - enquirer "^2.3.5" - eslint-scope "^5.1.1" - eslint-utils "^2.1.0" - eslint-visitor-keys "^2.0.0" - espree "^7.3.1" - esquery "^1.2.0" - esutils "^2.0.2" - file-entry-cache "^6.0.0" - functional-red-black-tree "^1.0.1" - glob-parent "^5.0.0" - globals "^12.1.0" - ignore "^4.0.6" - import-fresh "^3.0.0" - imurmurhash "^0.1.4" - is-glob "^4.0.0" - js-yaml "^3.13.1" - json-stable-stringify-without-jsonify "^1.0.1" - levn "^0.4.1" - lodash "^4.17.20" - minimatch "^3.0.4" - natural-compare "^1.4.0" - optionator "^0.9.1" - progress "^2.0.0" - regexpp "^3.1.0" - semver "^7.2.1" - strip-ansi "^6.0.0" - strip-json-comments "^3.1.0" - table "^6.0.4" - text-table "^0.2.0" - v8-compile-cache "^2.0.3" - esm@^3.2.25: version "3.2.25" resolved "https://registry.yarnpkg.com/esm/-/esm-3.2.25.tgz#342c18c29d56157688ba5ce31f8431fbb795cc10" integrity sha512-U1suiZ2oDVWv4zPO56S0NcR5QriEahGtdN2OR6FiOG4WJvcjBVFB0qI4+eKoWFH483PKGuLuu6V8Z4T5g63UVA== -espree@^7.3.0, espree@^7.3.1: - version "7.3.1" - resolved "https://registry.yarnpkg.com/espree/-/espree-7.3.1.tgz#f2df330b752c6f55019f8bd89b7660039c1bbbb6" - integrity sha512-v3JCNCE64umkFpmkFGqzVKsOT0tN1Zr+ueqLZfpV1Ob8e+CEgPWa+OxCoGH3tnhimMKIaBm4m/vaRpJ/krRz2g== - dependencies: - acorn "^7.4.0" - acorn-jsx "^5.3.1" - eslint-visitor-keys "^1.3.0" - esprima@^4.0.0: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== -esquery@^1.2.0: - version "1.3.1" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.3.1.tgz#b78b5828aa8e214e29fb74c4d5b752e1c033da57" - integrity sha512-olpvt9QG0vniUBZspVRN6lwB7hOZoTRtT+jzR+tS4ffYx2mzbw+z0XCOk44aaLYKApNX5nMm+E+P6o25ip/DHQ== - dependencies: - estraverse "^5.1.0" - -esrecurse@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" - integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== - dependencies: - estraverse "^5.2.0" - -estraverse@^4.1.1: - version "4.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" - integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== - -estraverse@^5.1.0, estraverse@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880" - integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ== - -esutils@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - etag@~1.8.1: version "1.8.1" resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" @@ -2379,12 +2007,7 @@ faker@^5.1.0: resolved "https://registry.yarnpkg.com/faker/-/faker-5.3.1.tgz#67f8f5c170b97a76b875389e0e8b9155da7b4853" integrity sha512-sVdoApX/awJHO9DZHZsHVaJBNFiJW0n3lPs0q/nFxp/Mtya1dr2sCMktST3mdxNMHjkvKTTMAW488E+jH1eSbg== -fast-deep-equal@^3.1.1: - version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" - integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== - -fast-glob@^3.0.3, fast-glob@^3.1.1: +fast-glob@^3.0.3: version "3.2.5" resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.5.tgz#7939af2a656de79a4f1901903ee8adcaa7cb9661" integrity sha512-2DtFcgT68wiTTiwZ2hNdJfcHNke9XOfnwmBRWXhmeKM8rF0TGwmC/Qto3S7RoZKp5cilZbxzO5iTNTQsJ+EeDg== @@ -2401,11 +2024,6 @@ fast-json-stable-stringify@^2.0.0: resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== -fast-levenshtein@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= - fast-xml-parser@^3.17.5: version "3.17.6" resolved "https://registry.yarnpkg.com/fast-xml-parser/-/fast-xml-parser-3.17.6.tgz#4f5df8cf927c3e59a10362abcfb7335c34bc5c5f" @@ -2441,13 +2059,6 @@ fetch-mock@^9.10.7, fetch-mock@^9.11.0: querystring "^0.2.0" whatwg-url "^6.5.0" -file-entry-cache@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.0.tgz#7921a89c391c6d93efec2169ac6bf300c527ea0a" - integrity sha512-fqoO76jZ3ZnYrXLDRxBR1YvOvc0k844kcOg40bgsPrE25LAb/PDqTY+ho64Xh2c8ZXgIKldchCFHczG2UVRcWA== - dependencies: - flat-cache "^3.0.4" - fill-range@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" @@ -2468,13 +2079,6 @@ finalhandler@~1.1.2: statuses "~1.5.0" unpipe "~1.0.0" -find-up@^2.0.0, find-up@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7" - integrity sha1-RdG35QbHF93UgndaK3eSCjwMV6c= - dependencies: - locate-path "^2.0.0" - find-up@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" @@ -2498,19 +2102,6 @@ find-up@^5.0.0: locate-path "^6.0.0" path-exists "^4.0.0" -flat-cache@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" - integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== - dependencies: - flatted "^3.1.0" - rimraf "^3.0.2" - -flatted@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.1.1.tgz#c4b489e80096d9df1dfc97c79871aea7c617c469" - integrity sha512-zAoAQiudy+r5SvnSw3KJy5os/oRJYHzrzja/tBDqrZtNhUw8bt6y8OBzMWcjWr+8liV8Eb6yOhw8WZ7VFZ5ZzA== - for-each@^0.3.3: version "0.3.3" resolved "https://registry.yarnpkg.com/for-each/-/for-each-0.3.3.tgz#69b447e88a0a5d32c3e7084f3f1710034b21376e" @@ -2617,13 +2208,6 @@ git-raw-commits@^2.0.0: split2 "^3.0.0" through2 "^4.0.0" -glob-parent@^5.0.0: - version "5.1.1" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229" - integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ== - dependencies: - is-glob "^4.0.1" - glob-parent@^5.1.0: version "5.1.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" @@ -2672,13 +2256,6 @@ globals@^11.1.0: resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== -globals@^12.1.0: - version "12.4.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-12.4.0.tgz#a18813576a41b00a24a97e7f815918c2e19925f8" - integrity sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg== - dependencies: - type-fest "^0.8.1" - globby@10.0.0: version "10.0.0" resolved "https://registry.yarnpkg.com/globby/-/globby-10.0.0.tgz#abfcd0630037ae174a88590132c2f6804e291072" @@ -2693,18 +2270,6 @@ globby@10.0.0: merge2 "^1.2.3" slash "^3.0.0" -globby@^11.0.3: - version "11.0.3" - resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.3.tgz#9b1f0cb523e171dd1ad8c7b2a9fb4b644b9593cb" - integrity sha512-ffdmosjA807y7+lA1NM0jELARVmYul/715xiILEjo3hBLPTcirgQNnXECn5g3mtR8TOLCVbkfua1Hpen25/Xcg== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.1.1" - ignore "^5.1.4" - merge2 "^1.3.0" - slash "^3.0.0" - graceful-fs@^4.1.2: version "4.2.4" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" @@ -2772,7 +2337,7 @@ hard-rejection@^2.1.0: resolved "https://registry.yarnpkg.com/hard-rejection/-/hard-rejection-2.1.0.tgz#1c6eda5c1685c63942766d79bb40ae773cecd883" integrity sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA== -has-bigints@^1.0.0, has-bigints@^1.0.1: +has-bigints@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.1.tgz#64fe6acb020673e3b78db035a5af69aa9d07b113" integrity sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA== @@ -2883,12 +2448,7 @@ ieee754@^1.1.13: resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== -ignore@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" - integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== - -ignore@^5.1.1, ignore@^5.1.4: +ignore@^5.1.1: version "5.1.8" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.8.tgz#f150a8b50a34289b33e22f5889abd4d8016f0e57" integrity sha512-BMpfD7PpiETpBl/A6S498BaIJ6Y/ABT93ETbby2fP00v4EbvPBXWEoaR1UBPKs3iR53pJY7EtZk5KACI57i1Uw== @@ -2914,11 +2474,6 @@ import-fresh@^3.0.0, import-fresh@^3.2.1: parent-module "^1.0.0" resolve-from "^4.0.0" -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= - indent-string@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/indent-string/-/indent-string-4.0.0.tgz#624f8f4497d619b2d9768531d58f4122854d7251" @@ -2993,13 +2548,6 @@ is-core-module@^2.1.0, is-core-module@^2.2.0: dependencies: has "^1.0.3" -is-core-module@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.4.0.tgz#8e9fc8e15027b011418026e98f0e6f4d86305cc1" - integrity sha512-6A2fkfq1rfeQZjxrZJGerpLCTHRNEBiSgnu0+obeJpEPZRUooHgsizvzv0ZjJwOz3iWIHdJtVWJ/tmPr3D21/A== - dependencies: - has "^1.0.3" - is-date-object@^1.0.1, is-date-object@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e" @@ -3025,7 +2573,7 @@ is-fullwidth-code-point@^3.0.0: resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== -is-glob@^4.0.0, is-glob@^4.0.1: +is-glob@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== @@ -3080,14 +2628,6 @@ is-regex@^1.1.1, is-regex@^1.1.2: call-bind "^1.0.2" has-symbols "^1.0.1" -is-regex@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.3.tgz#d029f9aff6448b93ebbe3f33dac71511fdcbef9f" - integrity sha512-qSVXFz28HM7y+IWX6vLCsexdlvzT1PJNFSBuaQLQ5o0IEw8UDYW6/2+eCMVyIsbM8CNLX2a/QWmSpyxYEHY7CQ== - dependencies: - call-bind "^1.0.2" - has-symbols "^1.0.2" - is-regexp@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-regexp/-/is-regexp-1.0.0.tgz#fd2d883545c46bac5a633e7b9a09e87fa2cb5069" @@ -3108,11 +2648,6 @@ is-string@^1.0.5: resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.5.tgz#40493ed198ef3ff477b8c7f92f644ec82a5cd3a6" integrity sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ== -is-string@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/is-string/-/is-string-1.0.6.tgz#3fe5d5992fb0d93404f32584d4b0179a71b54a5f" - integrity sha512-2gdzbKUuqtQ3lYNrUTQYoClPhm7oQu4UdpSZMp1/DGgkHBT8E2Z1l0yMdb6D4zNAxwDiMv8MdulKROJGNl0Q0w== - is-subset@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-subset/-/is-subset-0.1.1.tgz#8a59117d932de1de00f245fcdd39ce43f1e939a6" @@ -3211,33 +2746,11 @@ json-parse-even-better-errors@^2.3.0: resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-schema-traverse@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" - integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== - -json-stable-stringify-without-jsonify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" - integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= - json-stream@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/json-stream/-/json-stream-1.0.0.tgz#1a3854e28d2bbeeab31cc7ddf683d2ddc5652708" integrity sha1-GjhU4o0rvuqzHMfd9oPS3cVlJwg= -json5@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json5/-/json5-1.0.1.tgz#779fb0018604fa854eacbf6252180d83543e3dbe" - integrity sha512-aKS4WQjPenRxiQsC93MNfjx+nbF4PAdYzmd/1JIj8HYzqfbu86beTuNgXDzPknWk0n0uARlyewZo4s++ES36Ow== - dependencies: - minimist "^1.2.0" - json5@^2.1.2: version "2.2.0" resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.0.tgz#2dfefe720c6ba525d9ebd909950f0515316c89a3" @@ -3386,14 +2899,6 @@ levelup@4.4.0, levelup@^4.3.2: level-supports "~1.0.0" xtend "~4.0.0" -levn@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" - integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== - dependencies: - prelude-ls "^1.2.1" - type-check "~0.4.0" - lines-and-columns@^1.1.6: version "1.1.6" resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" @@ -3443,14 +2948,6 @@ load-json-file@^4.0.0: pify "^3.0.0" strip-bom "^3.0.0" -locate-path@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e" - integrity sha1-K1aLJl7slExtnA3pw9u7ygNUzY4= - dependencies: - p-locate "^2.0.0" - path-exists "^3.0.0" - locate-path@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" @@ -3528,7 +3025,7 @@ lodash.sortby@^4.7.0: resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg= -lodash@^4.17.15, lodash@^4.17.21: +lodash@^4.17.15: version "4.17.21" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== @@ -3566,11 +3063,6 @@ long@^4.0.0: resolved "https://registry.yarnpkg.com/long/-/long-4.0.0.tgz#9a7b71cfb7d361a194ea555241c92f7468d5bf28" integrity sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA== -looper@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/looper/-/looper-3.0.0.tgz#2efa54c3b1cbaba9b94aee2e5914b0be57fbb749" - integrity sha1-LvpUw7HLq6m5Su4uWRSwvlf7t0k= - lru-cache@5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" @@ -3715,7 +3207,7 @@ minimist-options@4.1.0: is-plain-obj "^1.1.0" kind-of "^6.0.3" -minimist@^1.2.0, minimist@^1.2.5: +minimist@^1.2.5: version "1.2.5" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== @@ -3738,11 +3230,6 @@ minio@^7.0.16: xml "^1.0.0" xml2js "^0.4.15" -minisearch@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/minisearch/-/minisearch-3.0.2.tgz#fa470114ffc7c4bcc0786b2adb7010d4b773bc32" - integrity sha512-7rTrJEzovKNi5LSwiIr5aCfJNNo6Lk4O9HTVzjFTMdp+dSr6UisUnEqdwj4rBgNcAcaWW5ClpXnpgTurv8PGqA== - mkdirp@^0.5.1: version "0.5.5" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" @@ -3794,11 +3281,6 @@ napi-macros@~2.0.0: resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b" integrity sha512-A0xLykHtARfueITVDernsAWdtIMbOJgKgcluwENp3AlsKN/PloyO10HtmoqnFAQAcxPkgZN7wdfPfEd0zNGxbg== -natural-compare@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= - negotiator@0.6.2: version "0.6.2" resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" @@ -3881,11 +3363,6 @@ object-assign@^4, object-assign@^4.1.1: resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= -object-inspect@^1.10.3: - version "1.10.3" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.10.3.tgz#c2aa7d2d09f50c99375704f7a0adf24c5782d369" - integrity sha512-e5mCJlSH7poANfC8z8S9s9S2IN5/4Zb3aZ33f5s8YqoazCFzNLloLU8r5VCG+G7WoqLvAAZoVMcy3tp/3X0Plw== - object-inspect@^1.9.0: version "1.9.0" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.9.0.tgz#c90521d74e1127b67266ded3394ad6116986533a" @@ -3944,15 +3421,6 @@ object.getownpropertydescriptors@^2.1.1: define-properties "^1.1.3" es-abstract "^1.18.0-next.1" -object.values@^1.1.3: - version "1.1.4" - resolved "https://registry.yarnpkg.com/object.values/-/object.values-1.1.4.tgz#0d273762833e816b693a637d30073e7051535b30" - integrity sha512-TnGo7j4XSnKQoK3MfvkzqKCi0nVe/D9I9IjwTNYdb/fxYHpjrluHVOgw0AF6jrRFGMPHdfuidR09tIDiIvnaSg== - dependencies: - call-bind "^1.0.2" - define-properties "^1.1.3" - es-abstract "^1.18.2" - on-finished@^2.3.0, on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" @@ -3974,30 +3442,11 @@ onetime@^5.1.0, onetime@^5.1.2: dependencies: mimic-fn "^2.1.0" -optionator@^0.9.1: - version "0.9.1" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" - integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== - dependencies: - deep-is "^0.1.3" - fast-levenshtein "^2.0.6" - levn "^0.4.1" - prelude-ls "^1.2.1" - type-check "^0.4.0" - word-wrap "^1.2.3" - p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= -p-limit@^1.1.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-1.3.0.tgz#b86bd5f0c25690911c7590fcbfc2010d54b3ccb8" - integrity sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q== - dependencies: - p-try "^1.0.0" - p-limit@^2.0.0, p-limit@^2.2.0: version "2.3.0" resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" @@ -4012,13 +3461,6 @@ p-limit@^3.0.2: dependencies: yocto-queue "^0.1.0" -p-locate@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-2.0.0.tgz#20a0103b222a70c8fd39cc2e580680f3dde5ec43" - integrity sha1-IKAQOyIqcMj9OcwuWAaA893l7EM= - dependencies: - p-limit "^1.1.0" - p-locate@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" @@ -4047,11 +3489,6 @@ p-map@^4.0.0: dependencies: aggregate-error "^3.0.0" -p-try@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" - integrity sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M= - p-try@^2.0.0: version "2.2.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" @@ -4159,20 +3596,6 @@ pify@^3.0.0: resolved "https://registry.yarnpkg.com/pify/-/pify-3.0.0.tgz#e5a4acd2c101fdf3d9a4d07f0dbc4db49dd28176" integrity sha1-5aSs0sEB/fPZpNB/DbxNtJ3SgXY= -pkg-dir@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b" - integrity sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s= - dependencies: - find-up "^2.1.0" - -pkg-up@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/pkg-up/-/pkg-up-2.0.0.tgz#c819ac728059a461cab1c3889a2be3c49a004d7f" - integrity sha1-yBmscoBZpGHKscOImivjxJoATX8= - dependencies: - find-up "^2.1.0" - please-upgrade-node@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/please-upgrade-node/-/please-upgrade-node-3.2.0.tgz#aeddd3f994c933e4ad98b99d9a556efa0e2fe942" @@ -4359,21 +3782,11 @@ pouchdb@^7.2.2: uuid "8.1.0" vuvuzela "1.0.3" -prelude-ls@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" - integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== - process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== -progress@^2.0.0: - version "2.0.3" - resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" - integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== - promise-callbacks@^3.8.1: version "3.8.2" resolved "https://registry.yarnpkg.com/promise-callbacks/-/promise-callbacks-3.8.2.tgz#00babc159508f2afd86db742f1be8a6031d37001" @@ -4400,16 +3813,6 @@ psl@^1.1.33: resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== -pull-stream-to-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/pull-stream-to-stream/-/pull-stream-to-stream-2.0.0.tgz#943b67362b9b37890b2881c50175711d7bb67020" - integrity sha512-lhgwUn2hQalava39zAzrRhjvVOpR99mfx5tCYRMD8Is85kBvXhQ39zIonVA+2TzdOFjmTQYbPWbEH0HAG+0t0Q== - -pull-stream@^3.2.3, pull-stream@^3.6.14: - version "3.6.14" - resolved "https://registry.yarnpkg.com/pull-stream/-/pull-stream-3.6.14.tgz#529dbd5b86131f4a5ed636fdf7f6af00781357ee" - integrity sha512-KIqdvpqHHaTUA2mCYcLG1ibEbu/LCKoJZsBWyv9lSYtPkJPBq8m3Hxa103xHi6D2thj5YXa0TqK3L3GUkwgnew== - punycode@^2.1.0, punycode@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" @@ -4460,14 +3863,6 @@ raw-body@2.4.0: iconv-lite "0.4.24" unpipe "1.0.0" -read-pkg-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-3.0.0.tgz#3ed496685dba0f8fe118d0691dc51f4a1ff96f07" - integrity sha1-PtSWaF26D4/hGNBpHcUfSh/5bwc= - dependencies: - find-up "^2.0.0" - read-pkg "^3.0.0" - read-pkg-up@^7.0.1: version "7.0.1" resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" @@ -4553,33 +3948,11 @@ redis-commands@^1.2.0: resolved "https://registry.yarnpkg.com/redis-commands/-/redis-commands-1.7.0.tgz#15a6fea2d58281e27b1cd1acfb4b293e278c3a89" integrity sha512-nJWqw3bTFy21hX/CPKHth6sfhZbdiHP6bTawSgQBlKOVRG7EZkfHbbHwQJnrE4vsQf0CMNE+3gJ4Fmm16vdVlQ== -redis-commands@^1.5.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/redis-commands/-/redis-commands-1.6.0.tgz#36d4ca42ae9ed29815cdb30ad9f97982eba1ce23" - integrity sha512-2jnZ0IkjZxvguITjFTrGiLyzQZcTvaw8DAaCXxZq/dsHXz7KfMQ3OUJy7Tz9vnRtZRVz6VRCPDvruvU8Ts44wQ== - -redis-errors@^1.0.0, redis-errors@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/redis-errors/-/redis-errors-1.2.0.tgz#eb62d2adb15e4eaf4610c04afe1529384250abad" - integrity sha1-62LSrbFeTq9GEMBK/hUpOEJQq60= - -redis-mock@^0.54.0: - version "0.54.0" - resolved "https://registry.yarnpkg.com/redis-mock/-/redis-mock-0.54.0.tgz#caef81c802820811b5596d44b1587062c85bfb16" - integrity sha512-aJHQ63CO7/OaGyGzqCEnSccpaVRB1Hb4QzFeGj8jW8wrOQhQVQYhyOES7rc9+68mGHuLFYHDE8aFT9/EHQ9p/g== - redis-parser@^2.6.0: version "2.6.0" resolved "https://registry.yarnpkg.com/redis-parser/-/redis-parser-2.6.0.tgz#52ed09dacac108f1a631c07e9b69941e7a19504b" integrity sha1-Uu0J2srBCPGmMcB+m2mUHnoZUEs= -redis-parser@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/redis-parser/-/redis-parser-3.0.0.tgz#b66d828cdcafe6b4b8a428a7def4c6bcac31c8b4" - integrity sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ= - dependencies: - redis-errors "^1.0.0" - redis@^2.7.1: version "2.8.0" resolved "https://registry.yarnpkg.com/redis/-/redis-2.8.0.tgz#202288e3f58c49f6079d97af7a10e1303ae14b02" @@ -4589,16 +3962,6 @@ redis@^2.7.1: redis-commands "^1.2.0" redis-parser "^2.6.0" -redis@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/redis/-/redis-3.0.2.tgz#bd47067b8a4a3e6a2e556e57f71cc82c7360150a" - integrity sha512-PNhLCrjU6vKVuMOyFu7oSP296mwBkcE6lrAjruBYG5LgdSqtRBoVQIylrMyVZD/lkF24RSNNatzvYag6HRBHjQ== - dependencies: - denque "^1.4.1" - redis-commands "^1.5.0" - redis-errors "^1.2.0" - redis-parser "^3.0.0" - regenerator-runtime@^0.13.4: version "0.13.7" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz#cac2dacc8a1ea675feaabaeb8ae833898ae46f55" @@ -4612,11 +3975,6 @@ regexp.prototype.flags@^1.3.0: call-bind "^1.0.2" define-properties "^1.1.3" -regexpp@^3.0.0, regexpp@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.1.0.tgz#206d0ad0a5648cffbdb8ae46438f3dc51c9f78e2" - integrity sha512-ZOIzd8yVsQQA7j8GCSlPGXwg5PfmA1mrq0JP4nGhh54LaKN3xdai/vHUDu74pKwV8OxseMS65u2NImosQcSD0Q== - relative-require-regex@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/relative-require-regex/-/relative-require-regex-1.0.1.tgz#6f06eecd5bc6b5370e08ab5b3b7000f2ce3e6d2f" @@ -4627,11 +3985,6 @@ require-directory@^2.1.1: resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= -require-from-string@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/require-from-string/-/require-from-string-2.0.2.tgz#89a7fdd938261267318eafe14f9c32e598c36909" - integrity sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw== - require-main-filename@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" @@ -4664,7 +4017,7 @@ resolve-global@1.0.0, resolve-global@^1.0.0: dependencies: global-dirs "^0.1.1" -resolve@^1.1.6, resolve@^1.10.1, resolve@^1.13.1, resolve@^1.20.0: +resolve@^1.1.6, resolve@^1.20.0: version "1.20.0" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== @@ -4764,25 +4117,13 @@ semver-compare@^1.0.0: resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== -semver@7.3.5, semver@^7.3.4, semver@^7.3.5: +semver@7.3.5, semver@^7.3.4: version "7.3.5" resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7" integrity sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ== dependencies: lru-cache "^6.0.0" -semver@^6.1.0: - version "6.3.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" - integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== - -semver@^7.2.1: - version "7.3.4" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.4.tgz#27aaa7d2e4ca76452f98d3add093a72c943edc97" - integrity sha512-tCfb2WLjqFAtXn4KEdxIhalnRtoKFN7nAwj0B3ZXCbQloV2tq5eDbcTmT68JJD3nRJq24/XgxtQKFIpQdtvmVw== - dependencies: - lru-cache "^6.0.0" - send@0.17.1: version "0.17.1" resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" @@ -4980,14 +4321,6 @@ sprintf-js@~1.0.2: resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= -stream-to-pull-stream@^1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/stream-to-pull-stream/-/stream-to-pull-stream-1.7.3.tgz#4161aa2d2eb9964de60bfa1af7feaf917e874ece" - integrity sha512-6sNyqJpr5dIOQdgNy/xcDWwDuzAsAwVzhzrWlAPAQ7Lkjx/rv0wgvxEyKwTq6FmNd5rjTrELt/CLmaSw7crMGg== - dependencies: - looper "^3.0.0" - pull-stream "^3.2.3" - streamsearch@0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-0.1.2.tgz#808b9d0e56fc273d809ba57338e929919a1a9f1a" @@ -5134,11 +4467,6 @@ strip-indent@^3.0.0: dependencies: min-indent "^1.0.0" -strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" - integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== - sublevel-pouchdb@7.2.2: version "7.2.2" resolved "https://registry.yarnpkg.com/sublevel-pouchdb/-/sublevel-pouchdb-7.2.2.tgz#49e46cd37883bf7ff5006d7c5b9bcc7bcc1f422f" @@ -5179,16 +4507,6 @@ symbol-observable@^1.0.4: resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804" integrity sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ== -table@^6.0.4: - version "6.0.7" - resolved "https://registry.yarnpkg.com/table/-/table-6.0.7.tgz#e45897ffbcc1bcf9e8a87bf420f2c9e5a7a52a34" - integrity sha512-rxZevLGTUzWna/qBLObOe16kB2RTnnbhciwgPbMMlazz1yZGVEgnZK762xyVdVznhqxrfCeBMmMkgOOaPwjH7g== - dependencies: - ajv "^7.0.2" - lodash "^4.17.20" - slice-ansi "^4.0.0" - string-width "^4.2.0" - tape@^5.0.1, tape@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/tape/-/tape-5.1.1.tgz#51e6fc7af0c15cf39faa6aac3d0a1d2ca6fd4744" @@ -5240,11 +4558,6 @@ text-extensions@^1.0.0: resolved "https://registry.yarnpkg.com/text-extensions/-/text-extensions-1.9.0.tgz#1853e45fee39c945ce6f6c36b2d659b5aabc2a26" integrity sha512-wiBrwC1EhBelW12Zy26JeOUkQ5mRu+5o8rpsJk5+2t+Y5vE7e842qtZDQ2g1NpX/29HdyFeJ4nSIhI47ENSxlQ== -text-table@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= - through2@3.0.2, through2@^3.0.1: version "3.0.2" resolved "https://registry.yarnpkg.com/through2/-/through2-3.0.2.tgz#99f88931cfc761ec7678b41d5d7336b5b6a07bf4" @@ -5320,35 +4633,11 @@ ts-toolbelt@^6.15.1: resolved "https://registry.yarnpkg.com/ts-toolbelt/-/ts-toolbelt-6.15.5.tgz#cb3b43ed725cb63644782c64fbcad7d8f28c0a83" integrity sha512-FZIXf1ksVyLcfr7M317jbB67XFJhOO1YqdTcuGaq9q5jLUoTikukZ+98TPjKiP2jC5CgmYdWWYs0s2nLSU0/1A== -tsconfig-paths@^3.9.0: - version "3.9.0" - resolved "https://registry.yarnpkg.com/tsconfig-paths/-/tsconfig-paths-3.9.0.tgz#098547a6c4448807e8fcb8eae081064ee9a3c90b" - integrity sha512-dRcuzokWhajtZWkQsDVKbWyY+jgcLC5sqJhg2PSgf4ZkH2aHPvaOY8YWGhmjb68b5qqTfasSsDO9k7RUiEmZAw== - dependencies: - "@types/json5" "^0.0.29" - json5 "^1.0.1" - minimist "^1.2.0" - strip-bom "^3.0.0" - -tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3: +tslib@^1.10.0, tslib@^1.9.0, tslib@^1.9.3: version "1.14.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tsutils@^3.21.0: - version "3.21.0" - resolved "https://registry.yarnpkg.com/tsutils/-/tsutils-3.21.0.tgz#b48717d394cea6c1e096983eed58e9d61715b623" - integrity sha512-mHKK3iUXL+3UF6xL5k0PEhKRUBKPBCv/+RkEOpjRWxxx27KKRBmmA60A9pgOUvMi8GKhRMPEmjBRPzs2W7O1OA== - dependencies: - tslib "^1.8.1" - -type-check@^0.4.0, type-check@~0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" - integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== - dependencies: - prelude-ls "^1.2.1" - type-fest@^0.18.0: version "0.18.1" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.18.1.tgz#db4bc151a4a2cf4eebf9add5db75508db6cc841f" @@ -5397,16 +4686,6 @@ unbox-primitive@^1.0.0: has-symbols "^1.0.0" which-boxed-primitive "^1.0.1" -unbox-primitive@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.1.tgz#085e215625ec3162574dc8859abee78a59b14471" - integrity sha512-tZU/3NqK3dA5gpE1KtyiJUrEB0lxnGkMFHptJ7q6ewdZ8s12QrODwNbhIJStmJkd1QDXa1NRA8aF2A1zk/Ypyw== - dependencies: - function-bind "^1.1.1" - has-bigints "^1.0.1" - has-symbols "^1.0.2" - which-boxed-primitive "^1.0.2" - universalify@^0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" @@ -5422,13 +4701,6 @@ unpipe@1.0.0, unpipe@~1.0.0: resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= -uri-js@^4.2.2: - version "4.4.1" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.1.tgz#9b1a52595225859e55f669d928f88c6c57f2a77e" - integrity sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg== - dependencies: - punycode "^2.1.0" - util-deprecate@^1.0.1, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" @@ -5460,16 +4732,11 @@ uuid@^3.1.0: resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== -uuid@^8.0.0, uuid@^8.3.1, uuid@^8.3.2: +uuid@^8.0.0, uuid@^8.3.2: version "8.3.2" resolved "https://registry.yarnpkg.com/uuid/-/uuid-8.3.2.tgz#80d5b5ced271bb9af6c445f21a1a04c606cefbe2" integrity sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg== -v8-compile-cache@^2.0.3: - version "2.2.0" - resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.2.0.tgz#9471efa3ef9128d2f7c6a7ca39c4dd6b5055b132" - integrity sha512-gTpR5XQNKFwOd4clxfnhaqvfqMpqEwr4tOtCyz4MtYZX2JYhfr1JvBFKdS+7K/9rfpZR3VLX+YWBbKoxCgS43Q== - validate-npm-package-license@^3.0.1: version "3.0.4" resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" @@ -5502,7 +4769,7 @@ whatwg-url@^6.5.0: tr46 "^1.0.1" webidl-conversions "^4.0.2" -which-boxed-primitive@^1.0.1, which-boxed-primitive@^1.0.2: +which-boxed-primitive@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/which-boxed-primitive/-/which-boxed-primitive-1.0.2.tgz#13757bc89b209b049fe5d86430e21cf40a89a8e6" integrity sha512-bwZdv0AKLpplFY2KZRX6TvyuN7ojjr7lwkg6ml0roIy9YeuSr7JS372qlNW18UQYzgYK9ziGcerWqZOmEn9VNg== @@ -5555,11 +4822,6 @@ which@^2.0.1: dependencies: isexe "^2.0.0" -word-wrap@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" - integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== - wrap-ansi@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-5.1.0.tgz#1fd1f67235d5b6d0fee781056001bfb694c03b09"