From d42eee7a077b4b921df1d4d3e7ffaa6c019187ea Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Tue, 24 Jan 2023 15:19:51 -0500 Subject: [PATCH 01/29] apk git --- Dockerfile | 2 ++ 1 file changed, 2 insertions(+) diff --git a/Dockerfile b/Dockerfile index 0af809b..0d28751 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,7 @@ FROM node:18.12.1-alpine +RUN apk update && apk --no-cache add git + RUN mkdir /app WORKDIR /app From 487603011b2de15d7ba4a75b589cf5f89fefeaf4 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Tue, 24 Jan 2023 15:37:27 -0500 Subject: [PATCH 02/29] added mysql to docker-compose --- .env.test | 1 + docker-compose.local.yml | 4 ++-- docker-compose.yml | 12 +++++++++--- package.json | 2 +- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/.env.test b/.env.test index 8929558..72d84d2 100644 --- a/.env.test +++ b/.env.test @@ -1,3 +1,4 @@ NODE_ENV=test +MYSQL_TEST_CREDENTIALS={"user":"root","host":"mysql","port":3306,"database":"mysql","ssl":"disabled"} MSSQL_CREDENTIALS:Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; MSSQL_CREDENTIALS_READ_ONLY:Server=mssql,1433;Database=master;User Id=reader;Password=re@derP@ssw0rd;trustServerCertificate=true; \ No newline at end of file diff --git a/docker-compose.local.yml b/docker-compose.local.yml index 341dbf2..e0a8ada 100644 --- a/docker-compose.local.yml +++ b/docker-compose.local.yml @@ -3,5 +3,5 @@ version: "3.7" services: mssql: image: mcr.microsoft.com/azure-sql-edge - expose: - - "1433" + ports: + - "1433:1433" \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 9e87eb9..51439de 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,11 +5,12 @@ services: build: . depends_on: - mssql + - mysql env_file: - .env.test networks: - db_proxy_test - command: sh -c "set -o pipefail && wait-on -d 10000 -t 30000 tcp:mssql:1433 && node ./data/seed.mssql.js && TZ=UTC NODE_ENV=TEST node_modules/.bin/mocha" + command: sh -c "set -o pipefail && wait-on -d 15000 -t 30000 tcp:mysql:3306 tcp:mssql:1433 && node ./data/seed.mssql.js && TZ=UTC NODE_ENV=TEST node_modules/.bin/mocha" mssql: image: mcr.microsoft.com/mssql/server:2019-latest @@ -20,8 +21,13 @@ services: - MSSQL_SLEEP=7 volumes: - ./data/AdventureWorks2019.bak:/var/opt/mssql/backup/test.bak - ports: - - "1433:1433" + networks: + - db_proxy_test + + mysql: + image: mariadb:10.6.4 + environment: + - MARIADB_ALLOW_EMPTY_ROOT_PASSWORD=yes networks: - db_proxy_test diff --git a/package.json b/package.json index cc0dfe6..c087568 100644 --- a/package.json +++ b/package.json @@ -51,7 +51,7 @@ "test": "mocha", "test:local": "docker-compose -f docker-compose.yml -f docker-compose.local.yml up --build", "test:ci": "docker-compose -f docker-compose.yml up --build --exit-code-from test", - "test:db": "docker-compose -f docker-compose.yml -f docker-compose.local.yml up mssql" + "test:db": "docker-compose -f docker-compose.yml -f docker-compose.local.yml up mssql mysql" }, "author": "Observable", "license": "ISC", From e31f9e9901807043f0a70850407dbfa88460fd98 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 25 Jan 2023 10:00:32 -0500 Subject: [PATCH 03/29] basic config --- .eslintrc.js => .eslintrc.cjs | 0 .github/workflows/test.yml | 47 ++++ docker-compose.yml | 4 + package.json | 3 + test/mssql.test.js | 31 +-- yarn.lock | 438 +++++++++++++++++++++++++++++++++- 6 files changed, 496 insertions(+), 27 deletions(-) rename .eslintrc.js => .eslintrc.cjs (100%) create mode 100644 .github/workflows/test.yml diff --git a/.eslintrc.js b/.eslintrc.cjs similarity index 100% rename from .eslintrc.js rename to .eslintrc.cjs diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..ec9fd12 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,47 @@ +name: Test +on: push + +jobs: + test: + runs-on: ubuntu-20.04 + defaults: + run: + working-directory: . + env: + DOCKER_PACKAGE: ghcr.io/${{ github.repository }}/database-proxy_test + + steps: + - uses: actions/checkout@v3 + - name: Docker login + run: echo ${GITHUB_TOKEN} | docker login -u ${GITHUB_ACTOR} --password-stdin ghcr.io + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Republish + id: republish + continue-on-error: true + if: ${{ needs.Changes.outputs.connector == 'false' }} + run: | + ../.github/retry docker pull ${DOCKER_PACKAGE}:${{ github.event.before }} + docker tag ${DOCKER_PACKAGE}:${{ github.event.before }} ${DOCKER_PACKAGE}:${GITHUB_SHA} + ../.github/retry docker push ${DOCKER_PACKAGE}:${GITHUB_SHA} + + - name: Build + if: ${{ steps.republish.outcome != 'success' }} + run: | + touch .env.test + docker-compose build + - name: Lint + if: ${{ steps.republish.outcome != 'success' }} + run: docker-compose run lint + - name: Test + if: ${{ steps.republish.outcome != 'success' }} + run: docker-compose run test + - name: Container logs + if: failure() + run: docker-compose logs --no-color --timestamps + - name: Publish + if: ${{ steps.republish.outcome != 'success' }} + run: | + docker tag database-proxy_test:latest ${DOCKER_PACKAGE}:${GITHUB_SHA} + ../.github/retry docker push ${DOCKER_PACKAGE}:${GITHUB_SHA} \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 51439de..a2f799a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,6 +1,10 @@ version: "3.7" services: + lint: + build: . + command: eslint . + test: build: . depends_on: diff --git a/package.json b/package.json index c087568..b147cf7 100644 --- a/package.json +++ b/package.json @@ -36,10 +36,13 @@ "@babel/preset-env": "^7.19.4", "@babel/register": "^7.18.9", "chai": "^4.3.6", + "eslint": "^8.32.0", + "eslint-plugin-prettier": "^4.2.1", "mocha": "^10.1.0", "mock-req": "^0.2.0", "mock-res": "^0.6.0", "nodemon": "^1.19.1", + "prettier": "^2.8.3", "wait-on": "^6.0.1" }, "peerDependencies": { diff --git a/test/mssql.test.js b/test/mssql.test.js index 3b12f4c..38a0542 100644 --- a/test/mssql.test.js +++ b/test/mssql.test.js @@ -32,7 +32,7 @@ describe("mssql", () => { describe("when querying", () => { it("should stream the results of simple query", () => { - return new Promise(async (resolve, reject) => { + return new Promise((resolve) => { const req = new MockReq({method: "POST", url: "/query-stream"}).end({ sql: "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer", params: [], @@ -41,7 +41,7 @@ describe("mssql", () => { const res = new MockRes(onEnd); const index = mssql(credentials); - await index(req, res); + index(req, res); function onEnd() { const [schema, row] = this._getString().split("\n"); @@ -62,17 +62,18 @@ describe("mssql", () => { }); }); it("should handle parameter graciously", () => { - return new Promise(async (resolve, reject) => { + return new Promise((resolve) => { const testCustomerId = 3; const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", + sql: + "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", params: [testCustomerId], }); const res = new MockRes(onEnd); const index = mssql(credentials); - await index(req, res); + index(req, res); function onEnd() { const [schema, row] = this._getString().split("\n"); @@ -93,17 +94,18 @@ describe("mssql", () => { }); }); it("should replace cell reference in the SQL query", () => { - return new Promise(async (resolve, reject) => { + return new Promise((resolve) => { const testCustomerId = 5; const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", + sql: + "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", params: [testCustomerId], }); const res = new MockRes(onEnd); const index = mssql(credentials); - await index(req, res); + index(req, res); function onEnd() { const [schema, row] = this._getString().split("\n"); @@ -124,7 +126,7 @@ describe("mssql", () => { }); }); it("should handle duplicated column names", () => { - return new Promise(async (resolve, reject) => { + return new Promise((resolve) => { const req = new MockReq({method: "POST", url: "/query-stream"}).end({ sql: "SELECT 1 as _a1, 2 as _a1 FROM test.SalesLT.SalesOrderDetail", params: [], @@ -133,10 +135,10 @@ describe("mssql", () => { const res = new MockRes(onEnd); const index = mssql(credentials); - await index(req, res); + index(req, res); function onEnd() { - const [schema, row] = this._getString().split("\n"); + const [, row] = this._getString().split("\n"); expect(row).to.equal( JSON.stringify({ @@ -149,16 +151,17 @@ describe("mssql", () => { }); }); it("should select the last value of any detected duplicated columns", () => { - return new Promise(async (resolve, reject) => { + return new Promise((resolve) => { const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: "SELECT TOP 1 ModifiedDate, ModifiedDate FROM test.SalesLT.SalesOrderDetail", + sql: + "SELECT TOP 1 ModifiedDate, ModifiedDate FROM test.SalesLT.SalesOrderDetail", params: [], }); const res = new MockRes(onEnd); const index = mssql(credentials); - await index(req, res); + index(req, res); function onEnd() { const [schema, row] = this._getString().split("\n"); diff --git a/yarn.lock b/yarn.lock index 6c13730..abe3269 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1129,6 +1129,21 @@ uuid "^9.0.0" winston "^3.8.2" +"@eslint/eslintrc@^1.4.1": + version "1.4.1" + resolved "https://registry.yarnpkg.com/@eslint/eslintrc/-/eslintrc-1.4.1.tgz#af58772019a2d271b7e2d4c23ff4ddcba3ccfb3e" + integrity sha512-XXrH9Uarn0stsyldqDYq8r++mROmWRI1xKMXa640Bb//SY1+ECYX6VzT6Lcx5frD0V30XieqJ0oX9I2Xj5aoMA== + dependencies: + ajv "^6.12.4" + debug "^4.3.2" + espree "^9.4.0" + globals "^13.19.0" + ignore "^5.2.0" + import-fresh "^3.2.1" + js-yaml "^4.1.0" + minimatch "^3.1.2" + strip-json-comments "^3.1.1" + "@hapi/hoek@^9.0.0": version "9.3.0" resolved "https://registry.yarnpkg.com/@hapi/hoek/-/hoek-9.3.0.tgz#8368869dcb735be2e7f5cb7647de78e167a251fb" @@ -1141,6 +1156,25 @@ dependencies: "@hapi/hoek" "^9.0.0" +"@humanwhocodes/config-array@^0.11.8": + version "0.11.8" + resolved "https://registry.yarnpkg.com/@humanwhocodes/config-array/-/config-array-0.11.8.tgz#03595ac2075a4dc0f191cc2131de14fbd7d410b9" + integrity sha512-UybHIJzJnR5Qc/MsD9Kr+RpO2h+/P1GhOwdiLPXK5TWk5sgTdu88bTD9UP+CKbPPh5Rni1u0GjAdYQLemG8g+g== + dependencies: + "@humanwhocodes/object-schema" "^1.2.1" + debug "^4.1.1" + minimatch "^3.0.5" + +"@humanwhocodes/module-importer@^1.0.1": + version "1.0.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz#af5b2691a22b44be847b0ca81641c5fb6ad0172c" + integrity sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA== + +"@humanwhocodes/object-schema@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@humanwhocodes/object-schema/-/object-schema-1.2.1.tgz#b520529ec21d8e5945a1851dfd1c32e94e39ff45" + integrity sha512-ZnQMnLV4e7hDlUvw8H+U8ASL02SS2Gn6+9Ac3wGGLIe7+je2AeAOxPY+izIPJDfFDb7eDjev0Us8MO1iFRN8hA== + "@jridgewell/gen-mapping@^0.1.0": version "0.1.1" resolved "https://registry.yarnpkg.com/@jridgewell/gen-mapping/-/gen-mapping-0.1.1.tgz#e5d2e450306a9491e3bd77e323e38d7aff315996" @@ -1186,6 +1220,27 @@ resolved "https://registry.yarnpkg.com/@js-joda/core/-/core-5.4.2.tgz#fedb8b4b98cf0750daf5802fa2a661edbf83892b" integrity sha512-QIDIZ9a0NfDStgD47VaTgwiPjlw1p4QPLwjOB/9+/DqIztoQopPNNAd+HdtQMHgE+ibP3dJacd8/TVL/A1RaaA== +"@nodelib/fs.scandir@2.1.5": + version "2.1.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz#7619c2eb21b25483f6d167548b4cfd5a7488c3d5" + integrity sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g== + dependencies: + "@nodelib/fs.stat" "2.0.5" + run-parallel "^1.1.9" + +"@nodelib/fs.stat@2.0.5": + version "2.0.5" + resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz#5bd262af94e9d25bd1e71b05deed44876a222e8b" + integrity sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A== + +"@nodelib/fs.walk@^1.2.8": + version "1.2.8" + resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz#e95737e8bb6746ddedf69c556953494f196fe69a" + integrity sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg== + dependencies: + "@nodelib/fs.scandir" "2.1.5" + fastq "^1.6.0" + "@opentelemetry/api@^1.0.1": version "1.2.0" resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.2.0.tgz#89ef99401cde6208cff98760b67663726ef26686" @@ -1274,6 +1329,11 @@ abbrev@1: resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8" integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q== +acorn-jsx@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.3.2.tgz#7ed5bb55908b3b2f1bc55c6af1653bada7f07937" + integrity sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ== + acorn-walk@^8.2.0: version "8.2.0" resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-8.2.0.tgz#741210f2e2426454508853a2f44d0ab83b7f69c1" @@ -1284,6 +1344,11 @@ acorn@^8.7.0: resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.1.tgz#0a3f9cbecc4ec3bea6f0a80b66ae8dd2da250b73" integrity sha512-7zFpHzhnqYKrkYdUjF1HI1bzd0VygEGX8lFk4k5zVMqHEoES+P+7TKI+EvLO9WVMJ8eekdO0aDEK044xTXwPPA== +acorn@^8.8.0: + version "8.8.2" + resolved "https://registry.yarnpkg.com/acorn/-/acorn-8.8.2.tgz#1b2f25db02af965399b9776b0c2c391276d37c4a" + integrity sha512-xjIYgE8HBrkpd/sJqOGNspf8uHG+NOHGOw6a/Urj8taM2EXfdNAH2oFcPeIFfsv3+kz/mJrS5VuMqbNLjCa2vw== + agent-base@6, agent-base@^6.0.0, agent-base@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" @@ -1291,6 +1356,16 @@ agent-base@6, agent-base@^6.0.0, agent-base@^6.0.2: dependencies: debug "4" +ajv@^6.10.0, ajv@^6.12.4: + version "6.12.6" + resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.6.tgz#baf5a62e802b07d977034586f8c3baf5adf26df4" + integrity sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g== + dependencies: + fast-deep-equal "^3.1.1" + fast-json-stable-stringify "^2.0.0" + json-schema-traverse "^0.4.1" + uri-js "^4.2.2" + ajv@^8.11.0: version "8.11.0" resolved "https://registry.yarnpkg.com/ajv/-/ajv-8.11.0.tgz#977e91dd96ca669f54a11e23e378e33b884a565f" @@ -1729,6 +1804,11 @@ call-bind@^1.0.0, call-bind@^1.0.2: function-bind "^1.1.1" get-intrinsic "^1.0.2" +callsites@^3.0.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" + integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== + camelcase@^4.0.0: version "4.1.0" resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd" @@ -1776,7 +1856,7 @@ chalk@^2.0.0, chalk@^2.0.1: escape-string-regexp "^1.0.5" supports-color "^5.3.0" -chalk@^4.1.0: +chalk@^4.0.0, chalk@^4.1.0: version "4.1.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01" integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA== @@ -2013,6 +2093,15 @@ cross-spawn@^5.0.1: shebang-command "^1.2.0" which "^1.2.9" +cross-spawn@^7.0.2: + version "7.0.3" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" + integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== + dependencies: + path-key "^3.1.0" + shebang-command "^2.0.0" + which "^2.0.1" + crypto-random-string@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/crypto-random-string/-/crypto-random-string-1.0.0.tgz#a230f64f568310e1498009940790ec99545bca7e" @@ -2023,7 +2112,7 @@ data-uri-to-buffer@3: resolved "https://registry.yarnpkg.com/data-uri-to-buffer/-/data-uri-to-buffer-3.0.1.tgz#594b8973938c5bc2c33046535785341abc4f3636" integrity sha512-WboRycPNsVw3B3TL559F7kuBUM4d8CgMEvk6xEJlOp7OBPjt6G7z8WMWlD2rOFZLk6OYfFIUGsCOWzcQH9K2og== -debug@4, debug@4.3.4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.3: +debug@4, debug@4.3.4, debug@^4.1.0, debug@^4.1.1, debug@^4.3.2, debug@^4.3.3: version "4.3.4" resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.4.tgz#1319f6579357f2338d3337d2cdd4914bb5dcc865" integrity sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ== @@ -2071,7 +2160,7 @@ deep-extend@^0.6.0: resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac" integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA== -deep-is@~0.1.3: +deep-is@^0.1.3, deep-is@~0.1.3: version "0.1.4" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.4.tgz#a6f2dce612fadd2ef1f519b73551f17e85199831" integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== @@ -2165,6 +2254,13 @@ digest-header@^1.0.0: dependencies: utility "^1.17.0" +doctrine@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" + integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== + dependencies: + esutils "^2.0.2" + dot-prop@^4.2.1: version "4.2.1" resolved "https://registry.yarnpkg.com/dot-prop/-/dot-prop-4.2.1.tgz#45884194a71fc2cda71cbb4bceb3a4dd2f433ba4" @@ -2278,7 +2374,7 @@ escape-html@^1.0.3: resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" integrity sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow== -escape-string-regexp@4.0.0: +escape-string-regexp@4.0.0, escape-string-regexp@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" integrity sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA== @@ -2300,16 +2396,121 @@ escodegen@^1.8.1: optionalDependencies: source-map "~0.6.1" +eslint-plugin-prettier@^4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-4.2.1.tgz#651cbb88b1dab98bfd42f017a12fa6b2d993f94b" + integrity sha512-f/0rXLXUt0oFYs8ra4w49wYZBG5GKZpAYsJSm6rnYL5uVDjd+zowwMwVZHnAjf4edNrKpCDYfXDgmRE/Ak7QyQ== + dependencies: + prettier-linter-helpers "^1.0.0" + +eslint-scope@^7.1.1: + version "7.1.1" + resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-7.1.1.tgz#fff34894c2f65e5226d3041ac480b4513a163642" + integrity sha512-QKQM/UXpIiHcLqJ5AOyIW7XZmzjkzQXYE54n1++wb0u9V/abW3l9uQnxX8Z5Xd18xyKIMTUAyQ0k1e8pz6LUrw== + dependencies: + esrecurse "^4.3.0" + estraverse "^5.2.0" + +eslint-utils@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-3.0.0.tgz#8aebaface7345bb33559db0a1f13a1d2d48c3672" + integrity sha512-uuQC43IGctw68pJA1RgbQS8/NP7rch6Cwd4j3ZBtgo4/8Flj4eGE7ZYSZRN3iq5pVUv6GPdW5Z1RFleo84uLDA== + dependencies: + eslint-visitor-keys "^2.0.0" + +eslint-visitor-keys@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-2.1.0.tgz#f65328259305927392c938ed44eb0a5c9b2bd303" + integrity sha512-0rSmRBzXgDzIsD6mGdJgevzgezI534Cer5L/vyMX0kHzT/jiB43jRhd9YUlMGYLQy2zprNmoT8qasCGtY+QaKw== + +eslint-visitor-keys@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" + integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== + +eslint@^8.32.0: + version "8.32.0" + resolved "https://registry.yarnpkg.com/eslint/-/eslint-8.32.0.tgz#d9690056bb6f1a302bd991e7090f5b68fbaea861" + integrity sha512-nETVXpnthqKPFyuY2FNjz/bEd6nbosRgKbkgS/y1C7LJop96gYHWpiguLecMHQ2XCPxn77DS0P+68WzG6vkZSQ== + dependencies: + "@eslint/eslintrc" "^1.4.1" + "@humanwhocodes/config-array" "^0.11.8" + "@humanwhocodes/module-importer" "^1.0.1" + "@nodelib/fs.walk" "^1.2.8" + ajv "^6.10.0" + chalk "^4.0.0" + cross-spawn "^7.0.2" + debug "^4.3.2" + doctrine "^3.0.0" + escape-string-regexp "^4.0.0" + eslint-scope "^7.1.1" + eslint-utils "^3.0.0" + eslint-visitor-keys "^3.3.0" + espree "^9.4.0" + esquery "^1.4.0" + esutils "^2.0.2" + fast-deep-equal "^3.1.3" + file-entry-cache "^6.0.1" + find-up "^5.0.0" + glob-parent "^6.0.2" + globals "^13.19.0" + grapheme-splitter "^1.0.4" + ignore "^5.2.0" + import-fresh "^3.0.0" + imurmurhash "^0.1.4" + is-glob "^4.0.0" + is-path-inside "^3.0.3" + js-sdsl "^4.1.4" + js-yaml "^4.1.0" + json-stable-stringify-without-jsonify "^1.0.1" + levn "^0.4.1" + lodash.merge "^4.6.2" + minimatch "^3.1.2" + natural-compare "^1.4.0" + optionator "^0.9.1" + regexpp "^3.2.0" + strip-ansi "^6.0.1" + strip-json-comments "^3.1.0" + text-table "^0.2.0" + +espree@^9.4.0: + version "9.4.1" + resolved "https://registry.yarnpkg.com/espree/-/espree-9.4.1.tgz#51d6092615567a2c2cff7833445e37c28c0065bd" + integrity sha512-XwctdmTO6SIvCzd9810yyNzIrOrqNYV9Koizx4C/mRhf9uq0o4yHoCEU/670pOxOL/MSraektvSAji79kX90Vg== + dependencies: + acorn "^8.8.0" + acorn-jsx "^5.3.2" + eslint-visitor-keys "^3.3.0" + esprima@^4.0.0, esprima@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== +esquery@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.4.0.tgz#2148ffc38b82e8c7057dfed48425b3e61f0f24a5" + integrity sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w== + dependencies: + estraverse "^5.1.0" + +esrecurse@^4.3.0: + version "4.3.0" + resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" + integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== + dependencies: + estraverse "^5.2.0" + estraverse@^4.2.0: version "4.3.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== +estraverse@^5.1.0, estraverse@^5.2.0: + version "5.3.0" + resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.3.0.tgz#2eea5290702f26ab8fe5370370ff86c965d21123" + integrity sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA== + esutils@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" @@ -2392,21 +2593,45 @@ extglob@^2.0.4: snapdragon "^0.8.1" to-regex "^3.0.1" -fast-deep-equal@^3.1.1: +fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== -fast-levenshtein@~2.0.6: +fast-diff@^1.1.2: + version "1.2.0" + resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.2.0.tgz#73ee11982d86caaf7959828d519cfe927fac5f03" + integrity sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w== + +fast-json-stable-stringify@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" + integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== + +fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: version "2.0.6" resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== +fastq@^1.6.0: + version "1.15.0" + resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.15.0.tgz#d04d07c6a2a68fe4599fea8d2e103a937fae6b3a" + integrity sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw== + dependencies: + reusify "^1.0.4" + fecha@^4.2.0: version "4.2.3" resolved "https://registry.yarnpkg.com/fecha/-/fecha-4.2.3.tgz#4d9ccdbc61e8629b259fdca67e65891448d569fd" integrity sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw== +file-entry-cache@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-6.0.1.tgz#211b2dd9659cb0394b073e7323ac3c933d522027" + integrity sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg== + dependencies: + flat-cache "^3.0.4" + file-uri-to-path@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" @@ -2443,7 +2668,7 @@ find-cache-dir@^2.0.0: make-dir "^2.0.0" pkg-dir "^3.0.0" -find-up@5.0.0: +find-up@5.0.0, find-up@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-5.0.0.tgz#4c92819ecb7083561e4f4a240a86be5198f536fc" integrity sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng== @@ -2458,11 +2683,24 @@ find-up@^3.0.0: dependencies: locate-path "^3.0.0" +flat-cache@^3.0.4: + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== + dependencies: + flatted "^3.1.0" + rimraf "^3.0.2" + flat@^5.0.2: version "5.0.2" resolved "https://registry.yarnpkg.com/flat/-/flat-5.0.2.tgz#8ca6fe332069ffa9d324c327198c598259ceb241" integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ== +flatted@^3.1.0: + version "3.2.7" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== + fn.name@1.x.x: version "1.1.0" resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" @@ -2653,6 +2891,13 @@ glob-parent@^3.1.0: is-glob "^3.1.0" path-dirname "^1.0.0" +glob-parent@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-6.0.2.tgz#6d237d99083950c79290f24c7642a3de9a28f9e3" + integrity sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A== + dependencies: + is-glob "^4.0.3" + glob-parent@~5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" @@ -2696,6 +2941,13 @@ globals@^11.1.0: resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== +globals@^13.19.0: + version "13.19.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.19.0.tgz#7a42de8e6ad4f7242fbcca27ea5b23aca367b5c8" + integrity sha512-dkQ957uSRWHw7CFXLUtUHQI3g3aWApYhfNR2O6jn/907riyTYKVBmxYVROkBcY614FSSeSJh7Xm7SrUWCxvJMQ== + dependencies: + type-fest "^0.20.2" + globalthis@^1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/globalthis/-/globalthis-1.0.3.tgz#5852882a52b80dc301b0660273e1ed082f0b6ccf" @@ -2725,6 +2977,11 @@ graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0: resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== +grapheme-splitter@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz#9cf3a665c6247479896834af35cf1dbb4400767e" + integrity sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ== + has-bigints@^1.0.1, has-bigints@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" @@ -2893,6 +3150,19 @@ ignore-by-default@^1.0.1: resolved "https://registry.yarnpkg.com/ignore-by-default/-/ignore-by-default-1.0.1.tgz#48ca6d72f6c6a3af00a9ad4ae6876be3889e2b09" integrity sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA== +ignore@^5.2.0: + version "5.2.4" + resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.2.4.tgz#a291c0c6178ff1b960befe47fcdec301674a6324" + integrity sha512-MAb38BcSbH0eHNBxn7ql2NH/kX33OkB3lZ1BNdh7ENeRChHTYsTvWrMubiIAMNS2llXEEgZ1MUOBtXChP3kaFQ== + +import-fresh@^3.0.0, import-fresh@^3.2.1: + version "3.3.0" + resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.3.0.tgz#37162c25fcb9ebaa2e6e53d5b4d88ce17d9e0c2b" + integrity sha512-veYYhQa+D1QBKznvhUHxb8faxlrwUnxseDAbAp457E0wLNio2bOSKnjYDhMj+YiAq61xrMGhQk9iXVk5FzgQMw== + dependencies: + parent-module "^1.0.0" + resolve-from "^4.0.0" + import-lazy@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/import-lazy/-/import-lazy-2.1.0.tgz#05698e3d45c88e8d7e9d92cb0584e77f096f3e43" @@ -3105,7 +3375,7 @@ is-glob@^3.1.0: dependencies: is-extglob "^2.1.0" -is-glob@^4.0.0, is-glob@^4.0.1, is-glob@~4.0.1: +is-glob@^4.0.0, is-glob@^4.0.1, is-glob@^4.0.3, is-glob@~4.0.1: version "4.0.3" resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.3.tgz#64f61e42cbbb2eec2071a9dac0b28ba1e65d5084" integrity sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg== @@ -3161,6 +3431,11 @@ is-path-inside@^1.0.0: dependencies: path-is-inside "^1.0.1" +is-path-inside@^3.0.3: + version "3.0.3" + resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-3.0.3.tgz#d231362e53a07ff2b0e0ea7fed049161ffd16283" + integrity sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ== + is-plain-obj@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-2.1.0.tgz#45e42e37fccf1f40da8e5f76ee21515840c09287" @@ -3320,12 +3595,17 @@ js-md4@^0.3.2: resolved "https://registry.yarnpkg.com/js-md4/-/js-md4-0.3.2.tgz#cd3b3dc045b0c404556c81ddb5756c23e59d7cf5" integrity sha512-/GDnfQYsltsjRswQhN9fhv3EMw2sCpUdrdxyWDOUK7eyD++r3gRhzgiQgc/x4MAv2i1iuQ4lxO5mvqM3vj4bwA== +js-sdsl@^4.1.4: + version "4.3.0" + resolved "https://registry.yarnpkg.com/js-sdsl/-/js-sdsl-4.3.0.tgz#aeefe32a451f7af88425b11fdb5f58c90ae1d711" + integrity sha512-mifzlm2+5nZ+lEcLJMoBK0/IH/bDg8XnJfd/Wq6IP+xoCjLZsTOnV2QpxlVbX9bMnkl5PdEjNtBJ9Cj1NjifhQ== + js-tokens@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== -js-yaml@4.1.0: +js-yaml@4.1.0, js-yaml@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-4.1.0.tgz#c1fb65f8f5017901cdd2c951864ba18458a10602" integrity sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA== @@ -3347,11 +3627,21 @@ jsesc@~0.5.0: resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== +json-schema-traverse@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" + integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== + json-schema-traverse@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz#ae7bcb3656ab77a73ba5c49bf654f38e6b6860e2" integrity sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug== +json-stable-stringify-without-jsonify@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" + integrity sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw== + json5@^2.2.1: version "2.2.1" resolved "https://registry.yarnpkg.com/json5/-/json5-2.2.1.tgz#655d50ed1e6f95ad1a3caababd2b0efda10b395c" @@ -3455,6 +3745,14 @@ latest-version@^3.0.0: dependencies: package-json "^4.0.0" +levn@^0.4.1: + version "0.4.1" + resolved "https://registry.yarnpkg.com/levn/-/levn-0.4.1.tgz#ae4562c007473b932a6200d403268dd2fffc6ade" + integrity sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ== + dependencies: + prelude-ls "^1.2.1" + type-check "~0.4.0" + levn@~0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" @@ -3513,6 +3811,11 @@ lodash.isstring@^4.0.1: resolved "https://registry.yarnpkg.com/lodash.isstring/-/lodash.isstring-4.0.1.tgz#d527dfb5456eca7cc9bb95d5daeaf88ba54a5451" integrity sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw== +lodash.merge@^4.6.2: + version "4.6.2" + resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" + integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== + lodash.once@^4.0.0: version "4.1.1" resolved "https://registry.yarnpkg.com/lodash.once/-/lodash.once-4.1.1.tgz#0dd3971213c7c56df880977d504c88fb471a97ac" @@ -3668,7 +3971,7 @@ minimatch@5.0.1: dependencies: brace-expansion "^2.0.1" -minimatch@^3.0.4, minimatch@^3.1.1: +minimatch@^3.0.4, minimatch@^3.0.5, minimatch@^3.1.1, minimatch@^3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.1.2.tgz#19cd194bfd3e428f049a70817c038d89ab4be35b" integrity sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw== @@ -3846,6 +4149,11 @@ native-duplexpair@^1.0.0: resolved "https://registry.yarnpkg.com/native-duplexpair/-/native-duplexpair-1.0.0.tgz#7899078e64bf3c8a3d732601b3d40ff05db58fa0" integrity sha512-E7QQoM+3jvNtlmyfqRZ0/U75VFgCls+fSkbml2MpgWkWyz3ox8Y58gNhfuziuQYGNNQAbFZJQck55LHCnCK6CA== +natural-compare@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" + integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw== + netmask@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/netmask/-/netmask-2.0.2.tgz#8b01a07644065d536383835823bc52004ebac5e7" @@ -4013,6 +4321,18 @@ optionator@^0.8.1: type-check "~0.3.2" word-wrap "~1.2.3" +optionator@^0.9.1: + version "0.9.1" + resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.9.1.tgz#4f236a6373dae0566a6d43e1326674f50c291499" + integrity sha512-74RlY5FCnhq4jRxVUPKDaRwrVNXMqsGsiW6AJw4XK8hmtm10wC0ypZBLw5IIp85NZMr91+qd1RvvENwg7jjRFw== + dependencies: + deep-is "^0.1.3" + fast-levenshtein "^2.0.6" + levn "^0.4.1" + prelude-ls "^1.2.1" + type-check "^0.4.0" + word-wrap "^1.2.3" + os-name@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/os-name/-/os-name-1.0.3.tgz#1b379f64835af7c5a7f498b357cb95215c159edf" @@ -4105,6 +4425,13 @@ packet-reader@1.0.0: resolved "https://registry.yarnpkg.com/packet-reader/-/packet-reader-1.0.0.tgz#9238e5480dedabacfe1fe3f2771063f164157d74" integrity sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ== +parent-module@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" + integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== + dependencies: + callsites "^3.0.0" + parse-passwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" @@ -4145,6 +4472,11 @@ path-key@^2.0.0: resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" integrity sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw== +path-key@^3.1.0: + version "3.1.1" + resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" + integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== + path-parse@^1.0.7: version "1.0.7" resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.7.tgz#fbc114b60ca42b30d9daf5858e4bd68bbedb6735" @@ -4284,6 +4616,11 @@ postgres-interval@^1.1.0: dependencies: xtend "^4.0.0" +prelude-ls@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.2.1.tgz#debc6489d7a6e6b0e7611888cec880337d316396" + integrity sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g== + prelude-ls@~1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" @@ -4294,6 +4631,18 @@ prepend-http@^1.0.1: resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" integrity sha512-PhmXi5XmoyKw1Un4E+opM2KcsJInDvKyuOumcjjw3waw86ZNjHwVUOOWLc4bCzLdcKNaWBH9e99sbWzDQsVaYg== +prettier-linter-helpers@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz#d23d41fe1375646de2d0104d3454a3008802cf7b" + integrity sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w== + dependencies: + fast-diff "^1.1.2" + +prettier@^2.8.3: + version "2.8.3" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.3.tgz#ab697b1d3dd46fb4626fbe2f543afe0cc98d8632" + integrity sha512-tJ/oJ4amDihPoufT5sM0Z1SKEuKay8LfVAMlbbhnnkvt6BUserZylqo2PN+p9KeljLr0OHa2rXHU1T8reeoTrw== + process-nextick-args@~2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" @@ -4385,6 +4734,11 @@ querystringify@^2.1.1: resolved "https://registry.yarnpkg.com/querystringify/-/querystringify-2.2.0.tgz#3345941b4153cb9d082d8eee4cda2016a9aef7f6" integrity sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ== +queue-microtask@^1.2.2: + version "1.2.3" + resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" + integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== + randombytes@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" @@ -4511,6 +4865,11 @@ regexp.prototype.flags@^1.4.3: define-properties "^1.1.3" functions-have-names "^1.2.2" +regexpp@^3.2.0: + version "3.2.0" + resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-3.2.0.tgz#0425a2768d8f23bad70ca4b90461fa2f1213e1b2" + integrity sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg== + regexpu-core@^5.1.0: version "5.2.1" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-5.2.1.tgz#a69c26f324c1e962e9ffd0b88b055caba8089139" @@ -4585,6 +4944,11 @@ requires-port@^1.0.0: resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff" integrity sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ== +resolve-from@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" + integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== + resolve-url@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" @@ -4604,18 +4968,30 @@ ret@~0.1.10: resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== +reusify@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" + integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== + rfdc@^1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/rfdc/-/rfdc-1.3.0.tgz#d0b7c441ab2720d05dc4cf26e01c89631d9da08b" integrity sha512-V2hovdzFbOi77/WajaSMXk2OLm+xNIeQdMMuB7icj7bk6zi2F8GGAxigcnDFpJHbNyNcgyJDiP+8nOrY5cZGrA== -rimraf@^3.0.0: +rimraf@^3.0.0, rimraf@^3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== dependencies: glob "^7.1.3" +run-parallel@^1.1.9: + version "1.2.0" + resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" + integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== + dependencies: + queue-microtask "^1.2.2" + rxjs@^7.5.4: version "7.5.7" resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-7.5.7.tgz#2ec0d57fdc89ece220d2e702730ae8f1e49def39" @@ -4744,11 +5120,23 @@ shebang-command@^1.2.0: dependencies: shebang-regex "^1.0.0" +shebang-command@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" + integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== + dependencies: + shebang-regex "^3.0.0" + shebang-regex@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" integrity sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ== +shebang-regex@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" + integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== + side-channel@^1.0.4: version "1.0.4" resolved "https://registry.yarnpkg.com/side-channel/-/side-channel-1.0.4.tgz#efce5c8fdc104ee751b25c58d4290011fa5ea2cf" @@ -5042,7 +5430,7 @@ strip-eof@^1.0.0: resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" integrity sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q== -strip-json-comments@3.1.1: +strip-json-comments@3.1.1, strip-json-comments@^3.1.0, strip-json-comments@^3.1.1: version "3.1.1" resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== @@ -5119,6 +5507,11 @@ text-hex@1.0.x: resolved "https://registry.yarnpkg.com/text-hex/-/text-hex-1.0.0.tgz#69dc9c1b17446ee79a92bf5b884bb4b9127506f5" integrity sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg== +text-table@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" + integrity sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw== + thenify-all@^1.0.0: version "1.6.0" resolved "https://registry.yarnpkg.com/thenify-all/-/thenify-all-1.6.0.tgz#1a1918d402d8fc3f98fbf234db0bcc8cc10e9726" @@ -5245,6 +5638,13 @@ tunnel@^0.0.6: resolved "https://registry.yarnpkg.com/tunnel/-/tunnel-0.0.6.tgz#72f1314b34a5b192db012324df2cc587ca47f92c" integrity sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg== +type-check@^0.4.0, type-check@~0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.4.0.tgz#07b8203bfa7056c0657050e3ccd2c37730bab8f1" + integrity sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew== + dependencies: + prelude-ls "^1.2.1" + type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" @@ -5257,6 +5657,11 @@ type-detect@^4.0.0, type-detect@^4.0.5: resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== +type-fest@^0.20.2: + version "0.20.2" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.20.2.tgz#1bf207f4b28f91583666cb5fbd327887301cd5f4" + integrity sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ== + type-fest@^0.3.0: version "0.3.1" resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.3.1.tgz#63d00d204e059474fe5e1b7c011112bbd1dc29e1" @@ -5556,6 +5961,13 @@ which@^1.2.9: dependencies: isexe "^2.0.0" +which@^2.0.1: + version "2.0.2" + resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" + integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== + dependencies: + isexe "^2.0.0" + widest-line@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/widest-line/-/widest-line-2.0.1.tgz#7438764730ec7ef4381ce4df82fb98a53142a3fc" @@ -5596,7 +6008,7 @@ winston@^3.1.0, winston@^3.8.2: triple-beam "^1.3.0" winston-transport "^4.5.0" -word-wrap@~1.2.3: +word-wrap@^1.2.3, word-wrap@~1.2.3: version "1.2.3" resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== From fa8695ecaf4c003b6e80cff178e1cd5daafabdb6 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 25 Jan 2023 10:05:58 -0500 Subject: [PATCH 04/29] fix eslint errors --- .eslintrc.cjs | 2 +- lib/databricks.js | 6 ++---- lib/oracle.js | 2 -- 3 files changed, 3 insertions(+), 7 deletions(-) diff --git a/.eslintrc.cjs b/.eslintrc.cjs index e76ab19..5b44d71 100644 --- a/.eslintrc.cjs +++ b/.eslintrc.cjs @@ -1,7 +1,7 @@ module.exports = { parserOptions: { sourceType: "module", - ecmaVersion: 2018 + ecmaVersion: 2022 }, env: { node: true, diff --git a/lib/databricks.js b/lib/databricks.js index 073e169..8efb1c5 100644 --- a/lib/databricks.js +++ b/lib/databricks.js @@ -253,14 +253,14 @@ export async function queryStream(req, res, connection) { res.write(`${JSON.stringify(responseSchema)}`); res.write("\n"); - await new Promise(async (resolve, reject) => { + await new Promise((resolve, reject) => { const stream = new Readable.from(rows); stream.once("data", () => { clearInterval(keepAlive); }); - stream.on("close", (error) => { + stream.on("close", () => { resolve(); stream.destroy(); }); @@ -345,8 +345,6 @@ export async function check(req, res, connection) { }); return {ok: true}; - } catch (e) { - throw e; } finally { if (connection) { try { diff --git a/lib/oracle.js b/lib/oracle.js index 6acee3c..b04e24c 100644 --- a/lib/oracle.js +++ b/lib/oracle.js @@ -204,8 +204,6 @@ export async function check(req, res, pool) { ); return {ok: true}; - } catch (e) { - throw e; } finally { if (connection) { try { From b795f3eb661f8776a9e7f256f8e2c2cc95b01cc0 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 25 Jan 2023 10:13:52 -0500 Subject: [PATCH 05/29] test.yml --- .github/workflows/test.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index ec9fd12..c0ee1b6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -28,9 +28,7 @@ jobs: - name: Build if: ${{ steps.republish.outcome != 'success' }} - run: | - touch .env.test - docker-compose build + run: docker-compose build - name: Lint if: ${{ steps.republish.outcome != 'success' }} run: docker-compose run lint From 2194d4f5113fb600d1f64ce7a9cc8b5dcaf0ee1f Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 25 Jan 2023 10:17:47 -0500 Subject: [PATCH 06/29] clean up env --- .env.test | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.env.test b/.env.test index 72d84d2..d95a9ae 100644 --- a/.env.test +++ b/.env.test @@ -1,4 +1,3 @@ NODE_ENV=test MYSQL_TEST_CREDENTIALS={"user":"root","host":"mysql","port":3306,"database":"mysql","ssl":"disabled"} -MSSQL_CREDENTIALS:Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; -MSSQL_CREDENTIALS_READ_ONLY:Server=mssql,1433;Database=master;User Id=reader;Password=re@derP@ssw0rd;trustServerCertificate=true; \ No newline at end of file +MSSQL_CREDENTIALS:Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; \ No newline at end of file From 013ec0c1525f15a4b977c354f0753001776bc1e5 Mon Sep 17 00:00:00 2001 From: Sylvestre Date: Wed, 25 Jan 2023 15:02:25 -0500 Subject: [PATCH 07/29] Update .env.test Co-authored-by: Visnu Pitiyanuvath --- .env.test | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.env.test b/.env.test index d95a9ae..4e7edc3 100644 --- a/.env.test +++ b/.env.test @@ -1,3 +1,3 @@ NODE_ENV=test MYSQL_TEST_CREDENTIALS={"user":"root","host":"mysql","port":3306,"database":"mysql","ssl":"disabled"} -MSSQL_CREDENTIALS:Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; \ No newline at end of file +MSSQL_CREDENTIALS=Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; \ No newline at end of file From 258733b48d22554e04672312fdba8d209c85bae8 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 25 Jan 2023 15:06:04 -0500 Subject: [PATCH 08/29] no need to publish --- .github/workflows/test.yml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index c0ee1b6..d823299 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -37,9 +37,4 @@ jobs: run: docker-compose run test - name: Container logs if: failure() - run: docker-compose logs --no-color --timestamps - - name: Publish - if: ${{ steps.republish.outcome != 'success' }} - run: | - docker tag database-proxy_test:latest ${DOCKER_PACKAGE}:${GITHUB_SHA} - ../.github/retry docker push ${DOCKER_PACKAGE}:${GITHUB_SHA} \ No newline at end of file + run: docker-compose logs --no-color --timestamps \ No newline at end of file From ce53c3ff5b3d7f1ea18225ca755b63cd3a422770 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 25 Jan 2023 15:58:59 -0500 Subject: [PATCH 09/29] added mysql tests --- .env.test | 2 +- .env.test.js | 1 + .eslintrc.cjs | 2 +- docker-compose.local.yml | 6 +- test/mysql.test.js | 215 +++++++++++++++++++++++++++++++++++++++ 5 files changed, 223 insertions(+), 3 deletions(-) create mode 100644 test/mysql.test.js diff --git a/.env.test b/.env.test index 4e7edc3..db2bc58 100644 --- a/.env.test +++ b/.env.test @@ -1,3 +1,3 @@ NODE_ENV=test -MYSQL_TEST_CREDENTIALS={"user":"root","host":"mysql","port":3306,"database":"mysql","ssl":"disabled"} +MYSQL_CREDENTIALS=mysql://root@mysql:3306/mysql?sslMode=DISABLED MSSQL_CREDENTIALS=Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; \ No newline at end of file diff --git a/.env.test.js b/.env.test.js index f096681..17f565b 100644 --- a/.env.test.js +++ b/.env.test.js @@ -1,4 +1,5 @@ export const MSSQL_CREDENTIALS = env("MSSQL_CREDENTIALS"); +export const MYSQL_CREDENTIALS = env("MYSQL_CREDENTIALS"); export const NODE_ENV = env("NODE_ENV"); function env(key, defaultValue) { diff --git a/.eslintrc.cjs b/.eslintrc.cjs index 5b44d71..b199af2 100644 --- a/.eslintrc.cjs +++ b/.eslintrc.cjs @@ -12,7 +12,7 @@ module.exports = { { files: ["*.test.js"], env: { - jest: true + mocha: true } } ] diff --git a/docker-compose.local.yml b/docker-compose.local.yml index e0a8ada..0f44c34 100644 --- a/docker-compose.local.yml +++ b/docker-compose.local.yml @@ -4,4 +4,8 @@ services: mssql: image: mcr.microsoft.com/azure-sql-edge ports: - - "1433:1433" \ No newline at end of file + - "1433:1433" + + mysql: + ports: + - "3306:3306" \ No newline at end of file diff --git a/test/mysql.test.js b/test/mysql.test.js new file mode 100644 index 0000000..c84b62a --- /dev/null +++ b/test/mysql.test.js @@ -0,0 +1,215 @@ +import assert from "node:assert"; +import MockReq from "mock-req"; +import MockRes from "mock-res"; +import logger from "../middleware/logger.js"; +import mysql from "../lib/mysql.js"; + +import {MYSQL_CREDENTIALS} from "../.env.test.js"; +const index = logger(mysql(MYSQL_CREDENTIALS)); + +describe("MySQL", () => { + describe("when checking", () => { + it("should do MySQL credential check", async () => { + const req = new MockReq({method: "POST", url: "/check"}); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal( + /User has too permissive grants/.test(error.message), + true + ); + } + }); + }); + + describe("when querying", () => { + it("should resolves MySQL requests", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: ` + select c1 + from (select 'hello' as c1 union all select 2 as c1) as foo + where c1 = ?`, + params: ["hello"], + }); + const res = new MockRes(); + await index(req, res); + + const {data, schema} = res._getJSON(); + + assert.deepEqual(data, [{c1: "hello"}]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "string"]}}, + }, + }); + }); + + it("should handle MySQL errors", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "SELECT * FROM users", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Table 'mysql.users' doesn't exist"); + } + }); + + it("should handle MySQL empty query", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Query was empty"); + } + }); + + it("should handle MySQL empty results", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `SELECT 1 AS c1 LIMIT 0`, + }); + const res = new MockRes(); + + await index(req, res); + + const {data, schema} = res._getJSON(); + assert.deepEqual(data, []); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "integer"], long: true}}, + }, + }); + }); + }); + + describe("when check the dataTypeSchema", () => { + it("should provide the right MySQL types", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: + "select 1 as c1, 3.14 as c2, 0xdeadbeef as c3, 'hello' as c4, DATE '2019-01-01' as c5, 1234567890 as c6", + }); + const res = new MockRes(); + + await index(req, res); + const {data, schema} = res._getJSON(); + assert.deepEqual(data, [ + { + c1: 1, + c2: 3.14, + c3: {type: "Buffer", data: [222, 173, 190, 239]}, + c4: "hello", + c5: "2019-01-01T00:00:00.000Z", + c6: 1234567890, + }, + ]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: { + c1: {type: ["null", "integer"], long: true}, + c2: {type: ["null", "number"], newdecimal: true}, + c3: {type: ["null", "object"], buffer: true}, + c4: {type: ["null", "string"]}, + c5: {type: ["null", "string"], date: true}, + c6: {type: ["null", "string"], bigint: true}, + }, + }, + }); + }); + }); + + describe("when streaming", () => { + it("should handle MySQL stream requests", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: ` + select c1 + from (select 'hello' as c1 union all select 2 as c1) as foo + where c1 = ?`, + params: ["hello"], + }); + + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + + assert.equal( + response, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "string"]}}, + }, + }) + + "\n" + + JSON.stringify({c1: "hello"}) + + "\n" + ); + }); + + it("should handle MySQL stream errors", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT * FROM users", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Table 'mysql.users' doesn't exist"); + } + }); + + it("should hande MySQL stream empty query", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Query was empty"); + } + }); + + it("MySQL stream empty results", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT 1 AS c1 LIMIT 0", + }); + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + + assert.equal( + response, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "integer"], long: true}}, + }, + }) + "\n\n" + ); + }); + }); +}); From 289988184f862af51c362e89b9320765325e3085 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 25 Jan 2023 17:17:25 -0500 Subject: [PATCH 10/29] forcing exits --- docker-compose.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker-compose.yml b/docker-compose.yml index a2f799a..9a8352a 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -14,7 +14,7 @@ services: - .env.test networks: - db_proxy_test - command: sh -c "set -o pipefail && wait-on -d 15000 -t 30000 tcp:mysql:3306 tcp:mssql:1433 && node ./data/seed.mssql.js && TZ=UTC NODE_ENV=TEST node_modules/.bin/mocha" + command: sh -c "set -o pipefail && wait-on -d 15000 -t 30000 tcp:mysql:3306 tcp:mssql:1433 && node ./data/seed.mssql.js && TZ=UTC NODE_ENV=TEST node_modules/.bin/mocha --exit" mssql: image: mcr.microsoft.com/mssql/server:2019-latest From aa0327f397df6d6e7988abd18b8a8efb1d5f3f50 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Thu, 26 Jan 2023 09:32:05 -0500 Subject: [PATCH 11/29] cleaned up sql server tests --- test/mssql.test.js | 314 ++++++++++++++++++++++----------------------- 1 file changed, 155 insertions(+), 159 deletions(-) diff --git a/test/mssql.test.js b/test/mssql.test.js index 38a0542..c3e8b07 100644 --- a/test/mssql.test.js +++ b/test/mssql.test.js @@ -1,4 +1,4 @@ -import {expect} from "chai"; +import assert from "node:assert"; import MockReq from "mock-req"; import MockRes from "mock-res"; @@ -6,194 +6,190 @@ import {MSSQL_CREDENTIALS} from "../.env.test.js"; import mssql, {dataTypeSchema} from "../lib/mssql.js"; const credentials = MSSQL_CREDENTIALS; -describe("mssql", () => { +describe("SQL Server", () => { describe("when checking", () => { - describe("with system admin user", () => { - it("should throw a too permissive error", () => { - const req = new MockReq({ - method: "POST", - url: "/check", - }); - const res = new MockRes(); - const index = mssql(credentials); - - return index(req, res).then( - () => Promise.reject("Expect call to throw!"), - (err) => { - expect(err.statusCode).to.equal(200); - expect( - err.message.includes("User has too permissive grants") - ).to.equal(true); - } - ); + it("should throw a too permissive error", async () => { + const req = new MockReq({ + method: "POST", + url: "/check", }); + const res = new MockRes(); + const index = mssql(credentials); + + try { + await index(req, res); + } catch (error) { + assert.equal( + /User has too permissive grants/.test(error.message), + true + ); + } }); }); - describe("when querying", () => { - it("should stream the results of simple query", () => { - return new Promise((resolve) => { - const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer", - params: [], - }); + describe("when streaming", () => { + it("should stream the results of simple query", (done) => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer", + params: [], + }); - const res = new MockRes(onEnd); + const res = new MockRes(onEnd); - const index = mssql(credentials); - index(req, res); + const index = mssql(credentials); + index(req, res); - function onEnd() { - const [schema, row] = this._getString().split("\n"); + function onEnd() { + const [schema, row] = this._getString().split("\n"); - expect(schema).to.equal( - JSON.stringify({ - type: "array", - items: { - type: "object", - properties: {CustomerID: {type: ["null", "integer"]}}, - }, - }) - ); - expect(row).to.equal(JSON.stringify({CustomerID: 12})); + assert.equal( + schema, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {CustomerID: {type: ["null", "integer"]}}, + }, + }) + ); + assert.equal(row, JSON.stringify({CustomerID: 12})); + done(); + } + }); - resolve(); - } + it("should handle parameter graciously", (done) => { + const testCustomerId = 3; + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: + "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", + params: [testCustomerId], }); + + const res = new MockRes(onEnd); + + const index = mssql(credentials); + index(req, res); + + function onEnd() { + const [schema, row] = this._getString().split("\n"); + + assert.equal( + schema, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {CustomerID: {type: ["null", "integer"]}}, + }, + }) + ); + assert.equal(row, JSON.stringify({CustomerID: testCustomerId})); + + done(); + } }); - it("should handle parameter graciously", () => { - return new Promise((resolve) => { - const testCustomerId = 3; - const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: - "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", - params: [testCustomerId], - }); - - const res = new MockRes(onEnd); - - const index = mssql(credentials); - index(req, res); - - function onEnd() { - const [schema, row] = this._getString().split("\n"); - - expect(schema).to.equal( - JSON.stringify({ - type: "array", - items: { - type: "object", - properties: {CustomerID: {type: ["null", "integer"]}}, - }, - }) - ); - expect(row).to.equal(JSON.stringify({CustomerID: testCustomerId})); - resolve(); - } + it("should replace cell reference in the SQL query", (done) => { + const testCustomerId = 5; + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: + "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", + params: [testCustomerId], }); + + const res = new MockRes(onEnd); + + const index = mssql(credentials); + index(req, res); + + function onEnd() { + const [schema, row] = this._getString().split("\n"); + + assert.equal( + schema, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {CustomerID: {type: ["null", "integer"]}}, + }, + }) + ); + assert.equal(row, JSON.stringify({CustomerID: testCustomerId})); + + done(); + } }); - it("should replace cell reference in the SQL query", () => { - return new Promise((resolve) => { - const testCustomerId = 5; - const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: - "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", - params: [testCustomerId], - }); - - const res = new MockRes(onEnd); - - const index = mssql(credentials); - index(req, res); - - function onEnd() { - const [schema, row] = this._getString().split("\n"); - - expect(schema).to.equal( - JSON.stringify({ - type: "array", - items: { - type: "object", - properties: {CustomerID: {type: ["null", "integer"]}}, - }, - }) - ); - expect(row).to.equal(JSON.stringify({CustomerID: testCustomerId})); - resolve(); - } + it("should handle duplicated column names", (done) => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT 1 as _a1, 2 as _a1 FROM test.SalesLT.SalesOrderDetail", + params: [], }); - }); - it("should handle duplicated column names", () => { - return new Promise((resolve) => { - const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: "SELECT 1 as _a1, 2 as _a1 FROM test.SalesLT.SalesOrderDetail", - params: [], - }); - const res = new MockRes(onEnd); + const res = new MockRes(onEnd); - const index = mssql(credentials); - index(req, res); + const index = mssql(credentials); + index(req, res); - function onEnd() { - const [, row] = this._getString().split("\n"); + function onEnd() { + const [, row] = this._getString().split("\n"); - expect(row).to.equal( - JSON.stringify({ - _a1: 2, - }) - ); + assert.equal( + row, + JSON.stringify({ + _a1: 2, + }) + ); - resolve(); - } - }); + done(); + } }); - it("should select the last value of any detected duplicated columns", () => { - return new Promise((resolve) => { - const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: - "SELECT TOP 1 ModifiedDate, ModifiedDate FROM test.SalesLT.SalesOrderDetail", - params: [], - }); - - const res = new MockRes(onEnd); - - const index = mssql(credentials); - index(req, res); - - function onEnd() { - const [schema, row] = this._getString().split("\n"); - - expect(schema).to.equal( - JSON.stringify({ - type: "array", - items: { - type: "object", - properties: { - ModifiedDate: {type: ["null", "string"], date: true}, - }, - }, - }) - ); - expect(row).to.equal( - JSON.stringify({ - ModifiedDate: "2008-06-01T00:00:00.000Z", - }) - ); - - resolve(); - } + + it("should select the last value of any detected duplicated columns", (done) => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: + "SELECT TOP 1 ModifiedDate, ModifiedDate FROM test.SalesLT.SalesOrderDetail", + params: [], }); + + const res = new MockRes(onEnd); + + const index = mssql(credentials); + index(req, res); + + function onEnd() { + const [schema, row] = this._getString().split("\n"); + + assert.equal( + schema, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: { + ModifiedDate: {type: ["null", "string"], date: true}, + }, + }, + }) + ); + assert.equal( + row, + JSON.stringify({ + ModifiedDate: "2008-06-01T00:00:00.000Z", + }) + ); + + done(); + } }); }); describe("when check the dataTypeSchema", () => { it("should TYPES.Image.name to object", () => { const {type} = dataTypeSchema({type: "Image"}); - expect(type[0]).to.equal("null"); - expect(type[1]).to.equal("object"); + assert.equal(type[0], "null"); + assert.equal(type[1], "object"); }); }); }); From a08e89b2b40eca35476997aa31d38c67d4ceb2ed Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Tue, 31 Jan 2023 15:18:31 -0500 Subject: [PATCH 12/29] WIP --- lib/mysql.js | 2 +- lib/postgres.js | 189 ++++++++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 184 insertions(+), 7 deletions(-) diff --git a/lib/mysql.js b/lib/mysql.js index e4725f9..8bd584c 100644 --- a/lib/mysql.js +++ b/lib/mysql.js @@ -170,7 +170,7 @@ export default (url) => async (req, res) => { throw notFound(); }; -function schema(fields) { +function schema(fields = {}) { return { type: "array", items: { diff --git a/lib/postgres.js b/lib/postgres.js index c6c9ce8..b7c931b 100644 --- a/lib/postgres.js +++ b/lib/postgres.js @@ -2,6 +2,8 @@ import {json} from "micro"; import pg from "pg"; import QueryStream from "pg-query-stream"; import JSONStream from "JSONStream"; +import {validateQueryPayload} from "./validate.js"; +import {badRequest, failedCheck} from "./errors.js"; const {Pool} = pg; @@ -43,6 +45,163 @@ export default (url) => { }; }; +export async function query(req, res, pool) { + const body = await json(req); + if (!validateQueryPayload(body)) throw badRequest(); + const {sql, params} = body; + const client = await pool.connect(); + const keepAlive = setInterval(() => res.write("\n"), 25e3); + + try { + let rowCount = 0; + let bytes = 0; + const queryStream = new QueryStream(sql, params); + try { + const stream = await client.query(queryStream); + + await new Promise((resolve, reject) => { + stream + .on("end", resolve) + .on("error", reject) + .once("readable", () => clearInterval(keepAlive)) + .once("readable", () => { + res.write(`{"schema":${JSON.stringify(schema(queryStream))}`); + }) + .pipe(JSONStream.stringify(`,"data":[`, ",", "]}")) + .on("data", (chunk) => { + bytes += chunk.length; + rowCount++; + if (rowCount && rowCount % 2e3 === 0) + req.log({ + progress: { + rows: rowCount, + fields: queryStream.cursor._result.fields.length, + bytes, + done: false, + }, + }); + }) + .pipe(res); + }); + } catch (error) { + if (!error.statusCode) error.statusCode = 400; + throw error; + } + req.log({ + progress: { + rows: rowCount, + fields: queryStream.cursor._result.fields.length, + bytes, + done: true, + }, + }); + } finally { + clearInterval(keepAlive); + client.release(); + } +} + +export async function queryStream(req, res, pool) { + const body = await json(req); + if (!validateQueryPayload(body)) throw badRequest(); + const {sql, params} = body; + const client = await pool.connect(); + res.setHeader("Content-Type", "text/plain"); + const keepAlive = setInterval(() => res.write("\n"), 25e3); + + try { + let rowCount = 0; + let bytes = 0; + + const queryStream = new QueryStream(sql, params); + req.on("close", () => queryStream.cursor.close()); + try { + const stream = await client.query(queryStream); + + await new Promise((resolve, reject) => { + stream + .on("end", resolve) + .on("error", reject) + .once("readable", () => clearInterval(keepAlive)) + .once("readable", () => { + res.write(JSON.stringify(schema(queryStream))); + res.write("\n"); + }) + .pipe(JSONStream.stringify("", "\n", "\n")) + .on("data", (chunk) => { + bytes += chunk.length; + rowCount++; + if (rowCount % 2e3 === 0) { + req.log({ + progress: { + rows: rowCount, + fields: queryStream.cursor._result.fields.length, + bytes, + done: false, + }, + }); + } + }) + .pipe(res); + }); + } catch (error) { + if (!error.statusCode) error.statusCode = 400; + throw error; + } + req.log({ + progress: { + rows: rowCount, + fields: queryStream.cursor._result.fields.length, + bytes, + done: true, + }, + }); + } finally { + clearInterval(keepAlive); + client.release(); + } +} + +export async function check(req, res, pool) { + // TODO: use table_privileges and column_privileges to ensure public + // privileges aren't too permissive? + const {rows} = await pool.query(` + SELECT DISTINCT privilege_type + FROM information_schema.role_table_grants + WHERE grantee = user + + UNION + + SELECT DISTINCT privilege_type + FROM information_schema.role_column_grants + WHERE grantee = user + `); + + const privileges = rows.map((r) => r.privilege_type); + const permissive = privileges.filter((p) => p !== "SELECT"); + if (permissive.length) + throw failedCheck( + `User has too permissive privileges: ${permissive.join(", ")}` + ); + + return {ok: true}; +} + +function schema(queryStream) { + return { + type: "array", + items: { + type: "object", + properties: queryStream.cursor._result.fields.reduce( + (schema, {name, dataTypeID}) => ( + (schema[name] = dataTypeSchema(dataTypeID)), schema + ), + {} + ), + }, + }; +} + // https://www.postgresql.org/docs/9.6/datatype.html const array = ["null", "array"], boolean = ["null", "boolean"], @@ -53,15 +212,22 @@ const array = ["null", "array"], function dataTypeSchema(dataTypeID) { switch (dataTypeID) { // https://github.com/brianc/node-pg-types/blob/master/lib/textParsers.js#L166 + case 18: + return {type: string, char: true}; case 20: //parseBigInteger // int8 return {type: string, bigint: true}; case 21: //parseInteger // int2 + return {type: integer, int16: true}; case 23: //parseInteger // int4 + return {type: integer, int32: true}; + case 24: + return {type: string, regproc: true}; case 26: //parseInteger // oid - return {type: integer}; + return {type: integer, oid: true}; case 700: //parseFloat // float4/real + return {type: number, float32: true}; case 701: //parseFloat // float8/double - return {type: number}; + return {type: number, float64: true}; case 16: //parseBool return {type: boolean}; case 1082: //parseDate // date @@ -76,24 +242,34 @@ function dataTypeSchema(dataTypeID) { return {type: object}; case 1000: //parseBoolArray return {type: array, items: {type: boolean}}; - case 1001: //parseByteAArray + case 1001: //parseByteArray return {type: array, items: {type: object, buffer: true}}; + case 1002: + return {type: array, items: {type: string, char: true}}; case 1005: //parseIntegerArray // _int2 + return {type: array, items: {type: integer, int16: true}}; case 1007: //parseIntegerArray // _int4 + return {type: array, items: {type: integer, int32: true}}; case 1028: //parseIntegerArray // oid[] - return {type: array, items: {type: integer}}; + return {type: array, items: {type: integer, oid: true}}; case 1016: //parseBigIntegerArray // _int8 return {type: array, items: {type: string, bigint: true}}; case 1017: //parsePointArray // point[] return {type: array, items: {type: object}}; case 1021: //parseFloatArray // _float4 + return {type: array, items: {type: number, float32: true}}; case 1022: //parseFloatArray // _float8 + return {type: array, items: {type: number, float64: true}}; case 1231: //parseFloatArray // _numeric - return {type: array, items: {type: number}}; + return {type: array, items: {type: string, numeric: true}}; case 1014: //parseStringArray //char + return {type: array, items: {type: string, char: true}}; case 1015: //parseStringArray //varchar + return {type: array, items: {type: string, varchar: true}}; case 1008: //parseStringArray + return {type: array, items: {type: string, regproc: true}}; case 1009: //parseStringArray + return {type: array, items: {type: string, text: true}}; case 1040: //parseStringArray // macaddr[] case 1041: //parseStringArray // inet[] return {type: array, items: {type: string}}; @@ -121,8 +297,9 @@ function dataTypeSchema(dataTypeID) { return {type: array, items: {type: string}}; // https://github.com/brianc/node-pg-types/blob/master/lib/binaryParsers.js#L236 case 1700: //parseNumeric - return {type: number}; + return {type: string, numeric: true}; case 25: //parseText + return {type: string, text: true}; default: return {type: string}; } From 5e1b6c8657de4c78e7a2f3b06bad1e09fba5f917 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Thu, 26 Jan 2023 14:45:52 -0500 Subject: [PATCH 13/29] added pools and mysql pools implementation --- lib/mysql.js | 17 +++++++++++++++- lib/pools.js | 55 ++++++++++++++++++++++++++++++++++++++++++++++++++++ package.json | 2 ++ yarn.lock | 50 +++++++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 123 insertions(+), 1 deletion(-) create mode 100644 lib/pools.js diff --git a/lib/mysql.js b/lib/mysql.js index bc719b4..aa9609b 100644 --- a/lib/mysql.js +++ b/lib/mysql.js @@ -1,11 +1,26 @@ import JSONStream from "JSONStream"; import {json} from "micro"; -import mysql, {createConnection} from "mysql2"; +import mysql, {createConnection, createPool} from "mysql2"; import {failedCheck} from "./errors.js"; import {notFound} from "./errors.js"; +import Pools from "./pools.js"; const {Types, ConnectionConfig} = mysql; +export const pools = new Pools(({host, port, database, user, password, ssl}) => + createPool({ + host, + port, + database, + user, + password, + ssl: ssl === "required" ? {} : false, + connectTimeout: 25e3, + connectionLimit: 30, + decimalNumbers: true, + }) +); + export async function query(req, res, pool) { const {sql, params} = await json(req); const keepAlive = setInterval(() => res.write("\n"), 25e3); diff --git a/lib/pools.js b/lib/pools.js new file mode 100644 index 0000000..fa872b5 --- /dev/null +++ b/lib/pools.js @@ -0,0 +1,55 @@ +import LRU from "lru-cache"; +import * as Sentry from "@sentry/node"; + +const ttl = 1000 * 60 * 10; // 10m + +export default class Pools { + constructor(createPool) { + this.createPool = createPool; + this.cache = new LRU({ + max: 100, + ttl, + updateAgeOnGet: true, + dispose(_key, pool) { + pool.end(); + }, + }); + + let loop; + (loop = () => { + this.cache.purgeStale(); + this.timeout = setTimeout(loop, ttl / 2); + })(); + } + + async get(credentials) { + const key = JSON.stringify(credentials); + if (this.cache.has(key)) return this.cache.get(key); + const pool = await this.createPool(credentials); + + pool.on("error", (error) => { + // We need to attach a handler otherwise the process could exit, but we + // just don't care about these errors because the client will get cleaned + // up already. For debugging purposes, we'll add a Sentry breadcrumb if + // something else errors more loudly. + Sentry.addBreadcrumb({ + message: error.message, + category: "pool", + level: "error", + data: error, + }); + }); + + this.cache.set(key, pool); + return pool; + } + + del(credentials) { + this.cache.del(JSON.stringify(credentials)); + } + + end() { + if (this.timeout) clearTimeout(this.timeout); + for (const pool of this.cache.values()) pool.end(); + } +} diff --git a/package.json b/package.json index bd2a811..1334ef6 100644 --- a/package.json +++ b/package.json @@ -19,8 +19,10 @@ }, "dependencies": { "@databricks/sql": "https://github.com/observablehq/databricks-sql-nodejs", + "@sentry/node": "^7.33.0", "JSONStream": "^1.3.5", "ajv": "^8.11.0", + "lru-cache": "^7.14.1", "micro": "^9.3.4", "mssql": "^9.0.1", "mysql2": "^3.0.1", diff --git a/yarn.lock b/yarn.lock index 6c13730..19ded06 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1191,6 +1191,41 @@ resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.2.0.tgz#89ef99401cde6208cff98760b67663726ef26686" integrity sha512-0nBr+VZNKm9tvNDZFstI3Pq1fCTEDK5OZTnVKNvBNAKgd0yIvmwsP4m61rEv7ZP+tOUjWJhROpxK5MsnlF911g== +"@sentry/core@7.33.0": + version "7.33.0" + resolved "https://registry.yarnpkg.com/@sentry/core/-/core-7.33.0.tgz#7cba1670c041fae02794729b74e9fb9d1f519755" + integrity sha512-mrSgUnXjxHVi0cVea1lv7gC/Y66ya2a3atCHaPEij/+l+3APg5d0Ixt1zMx5YllMiZKf6wpxlZ0uwXcqdAAw+w== + dependencies: + "@sentry/types" "7.33.0" + "@sentry/utils" "7.33.0" + tslib "^1.9.3" + +"@sentry/node@^7.33.0": + version "7.33.0" + resolved "https://registry.yarnpkg.com/@sentry/node/-/node-7.33.0.tgz#d5c7c7094543dd9819422dfc69952ed40416bfab" + integrity sha512-isQVF9LLSG4EZLHiSJ3chgK6f3ZBdGxm8fX6YGm8HWz07CubJddes3yBPLPRNXrRLd7X3SK8pPcK5oc3LIKqAw== + dependencies: + "@sentry/core" "7.33.0" + "@sentry/types" "7.33.0" + "@sentry/utils" "7.33.0" + cookie "^0.4.1" + https-proxy-agent "^5.0.0" + lru_map "^0.3.3" + tslib "^1.9.3" + +"@sentry/types@7.33.0": + version "7.33.0" + resolved "https://registry.yarnpkg.com/@sentry/types/-/types-7.33.0.tgz#7d4893a783360a868382e5194b50dbf034ba23c0" + integrity sha512-5kkmYjtBWSbPxfYGiXdZFPS6xpFBNlXvDqeX4NpCFXz6/LiEDn6tZ61kuCSFb8MZlyqyCX5WsP3aiI2FJfpGIA== + +"@sentry/utils@7.33.0": + version "7.33.0" + resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-7.33.0.tgz#e6910139328b49b9cc21186521bdb10390dfd915" + integrity sha512-msp02GV1gOfaN5FjKjWxI00rtbYLXEE5cTGldhs/Dt9KI63dDk1nwPDkSLhg6joqRItAq0thlBh6un717HdWbg== + dependencies: + "@sentry/types" "7.33.0" + tslib "^1.9.3" + "@sideway/address@^4.1.3": version "4.1.4" resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0" @@ -1975,6 +2010,11 @@ convert-source-map@^1.7.0: resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== +cookie@^0.4.1: + version "0.4.2" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432" + integrity sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA== + copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" @@ -3584,6 +3624,11 @@ lru-cache@^7.14.1: resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.14.1.tgz#8da8d2f5f59827edb388e63e459ac23d6d408fea" integrity sha512-ysxwsnTKdAx96aTRdhDOCQfDgbHnt8SK0KY8SEjO0wHinhWOFTESbjVCMPbU1uGXg/ch4lifqx0wfjOawU2+WA== +lru_map@^0.3.3: + version "0.3.3" + resolved "https://registry.yarnpkg.com/lru_map/-/lru_map-0.3.3.tgz#b5c8351b9464cbd750335a79650a0ec0e56118dd" + integrity sha512-Pn9cox5CsMYngeDbmChANltQl+5pi6XmTrraMSzhPmMBbmgcxmqWry0U3PGapCU1yB4/LqCcom7qhHZiF/jGfQ== + make-dir@^1.0.0: version "1.3.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c" @@ -5235,6 +5280,11 @@ triple-beam@^1.3.0: resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.3.0.tgz#a595214c7298db8339eeeee083e4d10bd8cb8dd9" integrity sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw== +tslib@^1.9.3: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + tslib@^2.0.1, tslib@^2.1.0, tslib@^2.2.0: version "2.4.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" From 9a0b4a4d9746499c4dc7fcd3b4e8918d7dbf2e2d Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Thu, 26 Jan 2023 16:23:12 -0500 Subject: [PATCH 14/29] add pools for oracle --- lib/oracle.js | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/lib/oracle.js b/lib/oracle.js index 6acee3c..0045684 100644 --- a/lib/oracle.js +++ b/lib/oracle.js @@ -4,6 +4,7 @@ import {Transform} from "stream"; import {badRequest, failedCheck} from "./errors.js"; import {validateQueryPayload} from "./validate.js"; +import Pools from "./pools.js"; const READ_ONLY = new Set(["SELECT", "USAGE", "CONNECT"]); export class OracleSingleton { @@ -217,6 +218,23 @@ export async function check(req, res, pool) { } } +export const pools = new Pools(async (credentials) => { + const oracledb = await OracleSingleton.getInstance(); + credentials.connectionString = decodeURI(credentials.connectionString); + const pool = await oracledb.createPool(credentials); + + Object.defineProperty(pool, "end", { + value() { + // We must ensure there is no query still running before we close the pool. + if (this._connectionsOut === 0) { + this.close(); + } + }, + }); + + return pool; +}); + export default async ({url, username, password}) => { OracleSingleton.initialize(); // We do not want to import the oracledb library until we are sure that the user is looking to use Oracle. From d71a0b6a1fcbf98185a850d830131d9af0059894 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Thu, 26 Jan 2023 16:39:48 -0500 Subject: [PATCH 15/29] pools for mssql postgres and snowflake --- lib/mssql.js | 11 +++++++++-- lib/postgres.js | 17 +++++++++++++++++ lib/snowflake.js | 35 ++++++++++++++++++++++++++++------- 3 files changed, 54 insertions(+), 9 deletions(-) diff --git a/lib/mssql.js b/lib/mssql.js index ce2c4f1..4dc10b2 100644 --- a/lib/mssql.js +++ b/lib/mssql.js @@ -5,6 +5,7 @@ import {Transform} from "stream"; import {failedCheck, badRequest, notImplemented} from "./errors.js"; import {validateQueryPayload} from "./validate.js"; +import Pools from "./pools.js"; const TYPES = mssql.TYPES; const READ_ONLY = new Set(["SELECT", "USAGE", "CONNECT"]); @@ -124,8 +125,6 @@ export async function check(req, res, pool) { return {ok: true}; } -export const ConnectionPool = mssql.ConnectionPool; - export default (credentials) => { const pool = new mssql.ConnectionPool(credentials); @@ -144,6 +143,14 @@ export default (credentials) => { }; }; +export const pools = new Pools((credentials) => + Object.defineProperty(new mssql.ConnectionPool(credentials), "end", { + value() { + this.close(); + }, + }) +); + // See https://github.com/tediousjs/node-mssql/blob/66587d97c9ce21bffba8ca360c72a540f2bc47a6/lib/datatypes.js#L6 const boolean = ["null", "boolean"], integer = ["null", "integer"], diff --git a/lib/postgres.js b/lib/postgres.js index c6c9ce8..bbd1bc1 100644 --- a/lib/postgres.js +++ b/lib/postgres.js @@ -3,8 +3,25 @@ import pg from "pg"; import QueryStream from "pg-query-stream"; import JSONStream from "JSONStream"; +import Pools from "./pools.js"; + const {Pool} = pg; +export const pools = new Pools( + ({host, port, database, user, password, ssl}) => + new pg.Pool({ + host, + port, + database, + user, + password, + ssl: ssl === "required" ? {rejectUnauthorized: false} : false, + connectionTimeoutMillis: 25e3, + statement_timeout: 240e3, + max: 30, + }) +); + export default (url) => { const pool = new Pool({connectionString: url}); diff --git a/lib/snowflake.js b/lib/snowflake.js index 5503b65..5cdaae2 100644 --- a/lib/snowflake.js +++ b/lib/snowflake.js @@ -3,11 +3,32 @@ import {URL} from "url"; import JSONStream from "JSONStream"; import snowflake from "snowflake-sdk"; -export default url => { +import Pools from "./pools.js"; + +export const pools = new Pools( + ({host, user, password, database, schema, warehouse, role}) => + Object.defineProperty( + snowflake.createConnection({ + account: host, + username: user, + password, + database, + schema, + warehouse, + role, + }), + "end", + { + value() { + this.destroy(); + }, + } + ) +); + +export default (url) => { url = new URL(url); - const {host, username, password, pathname, searchParams} = new URL( - url - ); + const {host, username, password, pathname, searchParams} = new URL(url); const connection = snowflake.createConnection({ account: host, username, @@ -15,7 +36,7 @@ export default url => { database: pathname.slice(1), schema: searchParams.get("schema"), warehouse: searchParams.get("warehouse"), - role: searchParams.get("role") + role: searchParams.get("role"), }); const connecting = new WeakSet(); @@ -61,8 +82,8 @@ export default url => { (schema[column.getName()] = dataTypeSchema(column)), schema ), {} - ) - } + ), + }, }; res.end(`,"schema":${JSON.stringify(schema)}}`); }; From 8fa9b9c4c631dc71be061d21540b79dff9ab2cc3 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Thu, 26 Jan 2023 16:47:27 -0500 Subject: [PATCH 16/29] delete method is deprecated --- lib/pools.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/pools.js b/lib/pools.js index fa872b5..d9f0f9e 100644 --- a/lib/pools.js +++ b/lib/pools.js @@ -45,7 +45,7 @@ export default class Pools { } del(credentials) { - this.cache.del(JSON.stringify(credentials)); + this.cache.delete(JSON.stringify(credentials)); } end() { From a921b2fd93d1a36537052316d2fd647077f41ec6 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Thu, 26 Jan 2023 16:53:33 -0500 Subject: [PATCH 17/29] install client libraries --- package.json | 4 ++-- yarn.lock | 18 +++++++++--------- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/package.json b/package.json index 1334ef6..a7c1110 100644 --- a/package.json +++ b/package.json @@ -27,10 +27,10 @@ "mssql": "^9.0.1", "mysql2": "^3.0.1", "open": "^6.3.0", - "pg": "^8.7.1", + "pg": "^8.7.3", "pg-query-stream": "^4.2.1", "serialize-error": "^4.1.0", - "snowflake-sdk": "^1.5.0", + "snowflake-sdk": "^1.6.11", "yargs": "^13.2.4" }, "devDependencies": { diff --git a/yarn.lock b/yarn.lock index 19ded06..bedd255 100644 --- a/yarn.lock +++ b/yarn.lock @@ -190,7 +190,7 @@ jsonwebtoken "^8.5.1" uuid "^8.3.0" -"@azure/storage-blob@^12.5.0": +"@azure/storage-blob@^12.11.0": version "12.12.0" resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.12.0.tgz#25e277c885692d5adcd8c2a949789b2837a74c59" integrity sha512-o/Mf6lkyYG/eBW4/hXB9864RxVNmAkcKHjsGR6Inlp5hupa3exjSyH2KjO3tLO//YGA+tS+17hM2bxRl9Sn16g== @@ -3797,7 +3797,7 @@ moment-timezone@^0.5.15: dependencies: moment ">= 2.9.0" -"moment@>= 2.9.0", moment@^2.29.3: +"moment@>= 2.9.0", moment@^2.29.4: version "2.29.4" resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108" integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w== @@ -4250,7 +4250,7 @@ pg-types@^2.1.0: postgres-date "~1.0.4" postgres-interval "^1.1.0" -pg@^8.7.1: +pg@^8.7.3: version "8.8.0" resolved "https://registry.yarnpkg.com/pg/-/pg-8.8.0.tgz#a77f41f9d9ede7009abfca54667c775a240da686" integrity sha512-UXYN0ziKj+AeNNP7VDMwrehpACThH7LUl/p8TDFpEUuSejCUIwGSfxpHsPvtM6/WXFy6SU4E5RG4IJV/TZAGjw== @@ -4855,12 +4855,12 @@ snapdragon@^0.8.1: source-map-resolve "^0.5.0" use "^3.1.0" -snowflake-sdk@^1.5.0: - version "1.6.14" - resolved "https://registry.yarnpkg.com/snowflake-sdk/-/snowflake-sdk-1.6.14.tgz#7e1fd52df738c3cdb74277b7b2b35a40394c1941" - integrity sha512-sKg17Yz1/aydKxlA4unlprH+uw9ZsvRezdUmamLjNlvsXQsw+pok4PoMeCKtWs2OSVFnX0VO3eSacCPglQrAQA== +snowflake-sdk@^1.6.11: + version "1.6.17" + resolved "https://registry.yarnpkg.com/snowflake-sdk/-/snowflake-sdk-1.6.17.tgz#dc002b44b6545d6a036a2ce3c1f34dd4f4353c42" + integrity sha512-ui2zRPbXIK3qaW8zuQCy+mHTieN6U5yeoJmKkyhdnh/8qKOqfR9Csj5nv+bXm8Y64kG24sybtIfCzf8C9abUMQ== dependencies: - "@azure/storage-blob" "^12.5.0" + "@azure/storage-blob" "^12.11.0" "@techteamer/ocsp" "1.0.0" agent-base "^6.0.2" asn1.js-rfc2560 "^5.0.0" @@ -4880,7 +4880,7 @@ snowflake-sdk@^1.5.0: mime-types "^2.1.29" mkdirp "^1.0.3" mock-require "^3.0.3" - moment "^2.29.3" + moment "^2.29.4" moment-timezone "^0.5.15" open "^7.3.1" python-struct "^1.1.3" From 77de4f681cfe8749faad64ede4e634b56741a2a2 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Thu, 26 Jan 2023 17:11:22 -0500 Subject: [PATCH 18/29] good version of LRU --- lib/pools.js | 10 +++++----- package.json | 2 +- yarn.lock | 12 ++++++++++++ 3 files changed, 18 insertions(+), 6 deletions(-) diff --git a/lib/pools.js b/lib/pools.js index d9f0f9e..e616f60 100644 --- a/lib/pools.js +++ b/lib/pools.js @@ -1,14 +1,14 @@ import LRU from "lru-cache"; import * as Sentry from "@sentry/node"; -const ttl = 1000 * 60 * 10; // 10m +const maxAge = 1000 * 60 * 10; // 10m export default class Pools { constructor(createPool) { this.createPool = createPool; this.cache = new LRU({ max: 100, - ttl, + maxAge, updateAgeOnGet: true, dispose(_key, pool) { pool.end(); @@ -17,8 +17,8 @@ export default class Pools { let loop; (loop = () => { - this.cache.purgeStale(); - this.timeout = setTimeout(loop, ttl / 2); + this.cache.prune(); + this.timeout = setTimeout(loop, maxAge / 2); })(); } @@ -45,7 +45,7 @@ export default class Pools { } del(credentials) { - this.cache.delete(JSON.stringify(credentials)); + this.cache.del(JSON.stringify(credentials)); } end() { diff --git a/package.json b/package.json index a7c1110..2514251 100644 --- a/package.json +++ b/package.json @@ -22,7 +22,7 @@ "@sentry/node": "^7.33.0", "JSONStream": "^1.3.5", "ajv": "^8.11.0", - "lru-cache": "^7.14.1", + "lru-cache": "^6.0.0", "micro": "^9.3.4", "mssql": "^9.0.1", "mysql2": "^3.0.1", diff --git a/yarn.lock b/yarn.lock index bedd255..87aa7c3 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3619,6 +3619,13 @@ lru-cache@^5.1.1: dependencies: yallist "^3.0.2" +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + lru-cache@^7.14.1: version "7.14.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.14.1.tgz#8da8d2f5f59827edb388e63e459ac23d6d408fea" @@ -5756,6 +5763,11 @@ yallist@^3.0.2: resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + yargs-parser@20.2.4: version "20.2.4" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.4.tgz#b42890f14566796f85ae8e3a25290d205f154a54" From 05fe898aa4df89938de24d03e9d69c09ca7f30cb Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Tue, 31 Jan 2023 10:33:22 -0500 Subject: [PATCH 19/29] fix ajv config --- lib/validate.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/validate.js b/lib/validate.js index 0312380..1b9c48c 100644 --- a/lib/validate.js +++ b/lib/validate.js @@ -1,6 +1,6 @@ import Ajv from "ajv"; -const ajv = new Ajv(); +const ajv = new Ajv({allowUnionTypes: true}); export const validateQueryPayload = ajv.compile({ type: "object", @@ -18,6 +18,6 @@ export const validateDescribeColumnsPayload = ajv.compile({ properties: { catalog: {type: "string", minLength: 1, maxLength: 32 * 1000}, schema: {type: "string", minLength: 1, maxLength: 32 * 1000}, - table: {type: "string", minLength: 1, maxLength: 32 * 1000} + table: {type: "string", minLength: 1, maxLength: 32 * 1000}, }, -}) +}); From 9360cf1644f22685bdb97f86fcf04aa38d80ed80 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 1 Feb 2023 11:13:26 -0500 Subject: [PATCH 20/29] migrated postgres tests --- .env.test | 3 +- .env.test.js | 1 + data/data.sql | 132 +++ data/schema.sql | 1688 ++++++++++++++++++++++++++++++++++++++ docker-compose.local.yml | 6 +- docker-compose.yml | 13 +- lib/postgres.js | 44 +- test/mssql.test.js | 13 +- test/mysql.test.js | 99 ++- test/postgres.test.js | 309 +++++++ 10 files changed, 2225 insertions(+), 83 deletions(-) create mode 100644 data/data.sql create mode 100644 data/schema.sql create mode 100644 test/postgres.test.js diff --git a/.env.test b/.env.test index db2bc58..78d04f2 100644 --- a/.env.test +++ b/.env.test @@ -1,3 +1,4 @@ NODE_ENV=test MYSQL_CREDENTIALS=mysql://root@mysql:3306/mysql?sslMode=DISABLED -MSSQL_CREDENTIALS=Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; \ No newline at end of file +MSSQL_CREDENTIALS=Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; +POSTGRES_CREDENTIALS=postgres://postgres@postgres:5432/postgres?sslmode=disable \ No newline at end of file diff --git a/.env.test.js b/.env.test.js index 17f565b..764aff0 100644 --- a/.env.test.js +++ b/.env.test.js @@ -1,5 +1,6 @@ export const MSSQL_CREDENTIALS = env("MSSQL_CREDENTIALS"); export const MYSQL_CREDENTIALS = env("MYSQL_CREDENTIALS"); +export const POSTGRES_CREDENTIALS = env("POSTGRES_CREDENTIALS"); export const NODE_ENV = env("NODE_ENV"); function env(key, defaultValue) { diff --git a/data/data.sql b/data/data.sql new file mode 100644 index 0000000..879dfe1 --- /dev/null +++ b/data/data.sql @@ -0,0 +1,132 @@ +INSERT INTO users(avatar_url, login, name, type, bio, home_url, github_id, active, stripe_customer_id, email, flag_create_team, flag_data_connectors, github_login) VALUES + ('https://avatars2.githubusercontent.com/u/43?v=4', 'example', 'Example User', 'individual', 'An example user.', '', 43, TRUE, NULL, '', FALSE, FALSE, NULL), + ('https://avatars2.githubusercontent.com/u/32314?v=4', 'tmcw', 'Tom MacWright', 'individual', 'creator of open source, like @documentationjs @simple-statistics & more', 'https://macwright.org/', 32314, TRUE, NULL, 'tom@observablehq.com', TRUE, TRUE, 'tmcw'), + ('https://avatars2.githubusercontent.com/u/230541?v=4', 'mbostock', 'Mike Bostock', 'individual', 'Code and data for humans. Founder @observablehq. Creator @d3. Former @nytgraphics. Pronounced BOSS-tock.', 'https://bost.ocks.org/mike/', 230541, TRUE, NULL, 'mike@observablehq.com', TRUE, TRUE, 'mbostock'), + ('https://avatars2.githubusercontent.com/u/230542?v=4', 'title-changer', 'Title Changer', 'individual', '', '', 230542, TRUE, NULL, '', TRUE, FALSE, NULL), + ('https://avatars2.githubusercontent.com/u/4001?v=4', 'banny', 'Banny McBannerson', 'individual', 'An example bad, inactive user.', '', 4001, FALSE, NULL, '', FALSE, FALSE, 'banny'), + ('https://avatars2.githubusercontent.com/u/101?v=4', 'alice', 'Alice', 'individual', '', '', 101, TRUE, NULL, 'alice@example.com', TRUE, FALSE, 'alice'), + ('https://avatars2.githubusercontent.com/u/202?v=4', 'bob', 'Bob', 'individual', '', '', 202, TRUE, NULL, 'bob@example.com', TRUE, FALSE, 'bob'), + ('https://avatars2.githubusercontent.com/u/303?v=4', 'carlos', 'Carlos', 'individual', '', '', 303, TRUE, NULL, 'carlos@example.com', TRUE, FALSE, 'carlos'), + ('https://avatars2.githubusercontent.com/u/30080011?v=4', 'observablehq', 'Observable', 'team', 'A better way to code.', 'https://observablehq.com/', NULL, TRUE, 'cus_DJH71LZJ68KEBh', 'billing@observablehq.com', FALSE, TRUE, NULL), + ('https://avatars2.githubusercontent.com/u/30080012?v=4', 'letters', 'Letters', 'team', 'A team for ephemeral users created with createUser()', 'https://letters.com/', NULL, TRUE, 'cus_DJH71LZJ68KEBf', 'letters@letters.com', FALSE, FALSE, 'letters'), + ('https://avatars2.githubusercontent.com/u/303?v=4', 'team', 'Team team', 'team', 'A team with no aggregate tests', 'https://example.com/', NULL, TRUE, 'cus_DJH71LZJ68KEBf', 'team@example.com', FALSE, TRUE, 'example-team'); + +INSERT INTO team_members(team_id, user_id, role) VALUES + ((SELECT id FROM users WHERE login = 'observablehq'), (SELECT id FROM users WHERE login = 'mbostock'), 'owner'), + ((SELECT id FROM users WHERE login = 'observablehq'), (SELECT id FROM users WHERE login = 'tmcw'), 'member'), + ((SELECT id FROM users WHERE login = 'team'), (SELECT id FROM users WHERE login = 'alice'), 'owner'), + ((SELECT id FROM users WHERE login = 'team'), (SELECT id FROM users WHERE login = 'bob'), 'member'), + ((SELECT id FROM users WHERE login = 'team'), (SELECT id FROM users WHERE login = 'carlos'), 'viewer'); + +INSERT INTO documents(user_id, slug, trashed, trash_time, publish_time, likes) VALUES + ((SELECT id FROM users WHERE login = 'mbostock'), 'hello-world', FALSE, NULL, '2017-10-11 01:02', 0), + ((SELECT id FROM users WHERE login = 'mbostock'), 'another-test', FALSE, NULL, '2017-10-11 02:04', 0), + ((SELECT id FROM users WHERE login = 'mbostock'), NULL, FALSE, NULL, NULL, 0), + ((SELECT id FROM users WHERE login = 'tmcw'), 'trash-old', TRUE, NOW() - INTERVAL '1 hours', '2017-10-11 04:02', 0), + ((SELECT id FROM users WHERE login = 'tmcw'), 'trash-new', TRUE, NOW() + INTERVAL '1 hours', '2017-10-11 04:02', 0), + ((SELECT id FROM users WHERE login = 'tmcw'), 'hello-tom', FALSE, NULL, NOW() - INTERVAL '1 hours', 2), + ((SELECT id FROM users WHERE login = 'example'), 'three', FALSE, NULL, '2017-10-11 05:02', 0), + ((SELECT id FROM users WHERE login = 'banny'), 'spam', TRUE, NOW() + INTERVAL '1 hours', NOW() - INTERVAL '10 hours', 0), + ((SELECT id FROM users WHERE login = 'observablehq'), 'team-notebook', FALSE, NULL, '2017-11-11 11:11', 5), + ((SELECT id FROM users WHERE login = 'observablehq'), NULL, FALSE, NULL, NULL, 5), + ((SELECT id FROM users WHERE login = 'title-changer'), NULL, FALSE, NULL, NULL, 0); + +INSERT INTO documents(user_id, title, slug, trashed, trash_time, fork_id, fork_version) VALUES + ((SELECT id FROM users WHERE login = 'tmcw'), 'Hello, world!', 'hello-fork', FALSE, NULL, (SELECT id FROM documents WHERE slug = 'hello-world'), 3), + ((SELECT id FROM users WHERE login = 'example'), 'Hello, world!', 'trivial-fork', FALSE, NULL, (SELECT id FROM documents WHERE slug = 'hello-world'), 3); + +INSERT INTO document_events(id, user_id, version, time, type, node_id, new_node_value, new_node_pinned) VALUES + ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 0, '2017-10-11 01:00', 'create', NULL, NULL, NULL), + ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 1, '2017-10-11 01:01', 'insert_node', 1, 'md`# Hello, world!`', FALSE), + ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 2, '2017-10-11 01:02', 'modify_title', NULL, 'Hello, world!', NULL), + ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 3, '2017-10-11 01:03', 'insert_node', 3, 'md`I am a paragraph.`', FALSE), + ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 4, '2017-10-11 01:04', 'modify_node', 3, 'md`I am a new paragraph.`', NULL), + ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 5, '2017-10-11 01:05', 'pin_node', 3, NULL, NULL), + ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 0, '2017-10-11 02:00', 'create', NULL, NULL, NULL), + ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 1, '2017-10-11 02:01', 'insert_node', 1, 'md`# Another Test`', FALSE), + ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 2, '2017-10-11 02:02', 'modify_title', NULL, 'Another Test', NULL), + ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 3, '2017-10-11 02:03', 'insert_node', 3, 'md`First.`', FALSE), + ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 4, '2017-10-11 02:04', 'insert_node', 4, 'md`I like D3.js.`', FALSE), + ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'mbostock')), (SELECT id FROM users WHERE login = 'mbostock'), 0, '2017-10-11 03:00', 'create', NULL, NULL, NULL), + ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'mbostock')), (SELECT id FROM users WHERE login = 'mbostock'), 1, '2017-10-11 03:01', 'insert_node', 1, 'md`# Hello World`', FALSE), + ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'mbostock')), (SELECT id FROM users WHERE login = 'mbostock'), 2, '2017-10-11 03:02', 'modify_title', NULL, 'Hello World', NULL), + ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'mbostock')), (SELECT id FROM users WHERE login = 'mbostock'), 3, '2017-10-11 03:03', 'insert_node', 3, 'md`I am a paragraph.`', FALSE), + ((SELECT id FROM documents WHERE slug = 'trash-old'), (SELECT id FROM users WHERE login = 'tmcw'), 0, '2017-10-11 04:00', 'create', NULL, NULL, NULL), + ((SELECT id FROM documents WHERE slug = 'trash-old'), (SELECT id FROM users WHERE login = 'tmcw'), 1, '2017-10-11 04:01', 'modify_title', 1, '`Trash Old`', NULL), + ((SELECT id FROM documents WHERE slug = 'trash-new'), (SELECT id FROM users WHERE login = 'tmcw'), 0, '2017-10-11 04:00', 'create', NULL, NULL, NULL), + ((SELECT id FROM documents WHERE slug = 'trash-new'), (SELECT id FROM users WHERE login = 'tmcw'), 1, '2017-10-11 04:01', 'modify_title', 1, '`Trash New`', NULL), + ((SELECT id FROM documents WHERE slug = 'hello-tom'), (SELECT id FROM users WHERE login = 'tmcw'), 0, '2017-10-11 04:00', 'create', NULL, NULL, NULL), + ((SELECT id FROM documents WHERE slug = 'hello-tom'), (SELECT id FROM users WHERE login = 'tmcw'), 1, '2017-10-11 04:01', 'modify_title', 1, 'Hello, Tom!', NULL), + ((SELECT id FROM documents WHERE slug = 'three'), (SELECT id FROM users WHERE login = 'example'), 0, '2017-10-11 05:00', 'create', NULL, NULL, NULL), + ((SELECT id FROM documents WHERE slug = 'three'), (SELECT id FROM users WHERE login = 'example'), 1, '2017-10-11 05:01', 'insert_node', 1, 'md`# Three`', FALSE), + ((SELECT id FROM documents WHERE slug = 'three'), (SELECT id FROM users WHERE login = 'example'), 2, '2017-10-11 05:02', 'modify_title', 2, 'Three', NULL), + ((SELECT id FROM documents WHERE slug = 'hello-fork'), (SELECT id FROM users WHERE login = 'tmcw'), 3, '2017-10-11 05:04', 'create', NULL, NULL, NULL), + ((SELECT id FROM documents WHERE slug = 'hello-fork'), (SELECT id FROM users WHERE login = 'tmcw'), 4, '2017-10-11 05:03', 'insert_node', 4, 'md`I am a forked document.`', FALSE), + ((SELECT id FROM documents WHERE slug = 'trivial-fork'), (SELECT id FROM users WHERE login = 'example'), 3, '2017-10-12 05:04', 'create', NULL, NULL, NULL), + ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 0, '2017-10-11 06:00', 'create', NULL, NULL, NULL), + ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 1, '2017-10-11 06:01', 'insert_node', 1, 'md`# Buy Viagra!`', FALSE), + ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 2, '2017-10-11 06:02', 'modify_title', NULL, 'Buy Viagra!', NULL), + ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 3, '2017-10-11 06:03', 'insert_node', 3, 'md`I am completely legitimate content.`', FALSE), + ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 4, '2017-10-11 06:04', 'modify_node', 3, 'md`Please click [here](http://spam.com/).`', NULL), + ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 5, '2017-10-11 06:05', 'pin_node', 3, NULL, NULL), + ((SELECT id FROM documents WHERE slug = 'team-notebook'), (SELECT id FROM users WHERE login = 'tmcw'), 0, '2017-10-11 05:00', 'create', NULL, NULL, NULL), + ((SELECT id FROM documents WHERE slug = 'team-notebook'), (SELECT id FROM users WHERE login = 'tmcw'), 1, '2017-10-11 05:01', 'insert_node', 1, 'md`# Team Notebook`', FALSE), + ((SELECT id FROM documents WHERE slug = 'team-notebook'), (SELECT id FROM users WHERE login = 'tmcw'), 2, '2017-10-11 05:02', 'modify_title', 2, 'Team Notebook', NULL), + ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'observablehq')), (SELECT id FROM users WHERE login = 'mbostock'), 0, '2017-10-11 05:00', 'create', NULL, NULL, NULL), + ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'observablehq')), (SELECT id FROM users WHERE login = 'mbostock'), 1, '2017-10-11 05:01', 'insert_node', 1, 'md`# Team Unpublished`', FALSE), + ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'observablehq')), (SELECT id FROM users WHERE login = 'mbostock'), 2, '2017-10-11 05:02', 'modify_title', 2, 'Team Unpublished', NULL), + ((SELECT id FROM documents WHERE user_id = (SELECT id FROM users WHERE login = 'title-changer')), (SELECT id FROM users WHERE login = 'title-changer'), 0, '2018-11-12 00:00', 'create', NULL, NULL, NULL), + ((SELECT id FROM documents WHERE user_id = (SELECT id FROM users WHERE login = 'title-changer')), (SELECT id FROM users WHERE login = 'title-changer'), 1, '2018-11-12 00:00', 'modify_title', NULL, 'first', NULL), + ((SELECT id FROM documents WHERE user_id = (SELECT id FROM users WHERE login = 'title-changer')), (SELECT id FROM users WHERE login = 'title-changer'), 2, '2018-11-12 00:00', 'modify_title', NULL, 'second', NULL); + +INSERT INTO document_publishes(id, user_id, version, title, time) VALUES + ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 2, 'Hello, world!', '2017-10-11 01:02'), + ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 3, 'Hello, world!', '2017-10-11 01:03'), + ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 4, 'Another Test', '2017-10-11 02:04'), + ((SELECT id FROM documents WHERE slug = 'trash-old'), (SELECT id FROM users WHERE login = 'tmcw'), 1, 'Trash Old', '2017-10-11 04:02'), + ((SELECT id FROM documents WHERE slug = 'trash-new'), (SELECT id FROM users WHERE login = 'tmcw'), 1, 'Trash New', '2017-10-11 04:02'), + ((SELECT id FROM documents WHERE slug = 'hello-tom'), (SELECT id FROM users WHERE login = 'tmcw'), 1, 'Hello, Tom!', '2017-10-11 05:02'), + ((SELECT id FROM documents WHERE slug = 'trivial-fork'), (SELECT id FROM users WHERE login = 'example'), 3, 'Hello, world!', '2017-10-12 05:02'), + ((SELECT id FROM documents WHERE slug = 'three'), (SELECT id FROM users WHERE login = 'example'), 2, 'Three', '2017-10-11 05:02'), + ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 5, 'Buy Viagra!', '2017-10-11 06:10'), + ((SELECT id FROM documents WHERE slug = 'team-notebook'), (SELECT id FROM users WHERE login = 'mbostock'), 2, 'Team Notebook', '2017-11-11 12:11'), + ((SELECT id FROM documents WHERE user_id = (SELECT id FROM users WHERE login = 'title-changer')), (SELECT id FROM users WHERE login = 'title-changer'), 1, 'first', '2018-11-12 00:01'); + +INSERT INTO document_aliases(id, user_id, slug) VALUES + ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 'hello-world'), + ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 'another-test'), + ((SELECT id FROM documents WHERE slug = 'trash-old'), (SELECT id FROM users WHERE login = 'tmcw'), 'trash-old'), + ((SELECT id FROM documents WHERE slug = 'trash-new'), (SELECT id FROM users WHERE login = 'tmcw'), 'trash-new'), + ((SELECT id FROM documents WHERE slug = 'hello-tom'), (SELECT id FROM users WHERE login = 'tmcw'), 'hello-tom'), + ((SELECT id FROM documents WHERE slug = 'three'), (SELECT id FROM users WHERE login = 'example'), 'three'), + ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 'spam'), + ((SELECT id FROM documents WHERE slug = 'team-notebook'), (SELECT id FROM users WHERE login = 'observablehq'), 'team-notebook'), + ((SELECT id FROM documents WHERE slug = 'hello-fork'), (SELECT id FROM users WHERE login = 'tmcw'), 'hello-fork'), + ((SELECT id FROM documents WHERE slug = 'trivial-fork'), (SELECT id FROM users WHERE login = 'example'), 'trivial-fork'); + +UPDATE document_thumbnails + SET assigned = TRUE; + +UPDATE document_thumbnails + SET hash = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' + WHERE id IN (SELECT id FROM documents WHERE slug IN ('hello-world', 'another-test')); + +INSERT INTO collections(slug, title, description, update_time, user_id, type) VALUES + ('examples', 'Examples', 'A collection for tests', '2017-11-23 06:00', (SELECT id FROM users WHERE login = 'observablehq'), 'public'), + ('kittens', 'Kittens', 'Like cats, but cute', '2017-11-23 06:01', (SELECT id FROM users WHERE login = 'observablehq'), 'public'), + ('private-kittens', 'Private Kittens', 'Like cats, but cute, and also private', '2017-11-23 06:10', (SELECT id FROM users WHERE login = 'observablehq'), 'private'), + ('empty', 'Empty', 'An empty collection', '2017-11-23 06:03', (SELECT id FROM users WHERE login = 'observablehq'), 'public'), + ('pizza', 'Pizzas', 'Everything is pizza', '2017-11-23 06:04', (SELECT id FROM users WHERE login = 'observablehq'), 'public'); + +INSERT INTO collection_documents(id, document_id, update_time) VALUES + ((SELECT id FROM collections WHERE slug = 'examples'), (SELECT id FROM documents WHERE slug = 'hello-world'), '2017-10-11 01:01'), + ((SELECT id FROM collections WHERE slug = 'examples'), (SELECT id FROM documents WHERE slug = 'another-test'), '2017-10-11 01:02'), + ((SELECT id FROM collections WHERE slug = 'examples'), (SELECT id FROM documents WHERE slug = 'hello-tom'), '2017-10-11 01:03'), + ((SELECT id FROM collections WHERE slug = 'examples'), (SELECT id FROM documents WHERE slug = 'trash-new'), '2017-10-11 01:04'), + ((SELECT id FROM collections WHERE slug = 'examples'), (SELECT id FROM documents WHERE slug = 'team-notebook'), '2017-10-11 01:05'), + ((SELECT id FROM collections WHERE slug = 'kittens'), (SELECT id FROM documents WHERE slug = 'hello-world'), '2017-10-11 01:06'), + ((SELECT id FROM collections WHERE slug = 'kittens'), (SELECT id FROM documents WHERE slug = 'another-test'), '2017-10-11 01:07'), + ((SELECT id FROM collections WHERE slug = 'kittens'), (SELECT id FROM documents WHERE slug = 'hello-tom'), '2017-10-11 01:08'), + ((SELECT id FROM collections WHERE slug = 'private-kittens'), (SELECT id FROM documents WHERE slug = 'hello-tom'), '2017-10-11 01:08'), + ((SELECT id FROM collections WHERE slug = 'private-kittens'), (SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'observablehq')), '2017-10-11 01:08'), + ((SELECT id FROM collections WHERE slug = 'private-kittens'), (SELECT id FROM documents WHERE slug = 'spam'), '2017-10-11 01:08'); diff --git a/data/schema.sql b/data/schema.sql new file mode 100644 index 0000000..ca8874c --- /dev/null +++ b/data/schema.sql @@ -0,0 +1,1688 @@ +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 9.6.12 +-- Dumped by pg_dump version 9.6.12 + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- Name: plpgsql; Type: EXTENSION; Schema: -; Owner: - +-- + +CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; + + +-- +-- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner: - +-- + +COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language'; + + +-- +-- Name: pg_stat_statements; Type: EXTENSION; Schema: -; Owner: - +-- + +CREATE EXTENSION IF NOT EXISTS pg_stat_statements WITH SCHEMA public; + + +-- +-- Name: EXTENSION pg_stat_statements; Type: COMMENT; Schema: -; Owner: - +-- + +COMMENT ON EXTENSION pg_stat_statements IS 'track execution statistics of all SQL statements executed'; + + +-- +-- Name: pgcrypto; Type: EXTENSION; Schema: -; Owner: - +-- + +CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public; + + +-- +-- Name: EXTENSION pgcrypto; Type: COMMENT; Schema: -; Owner: - +-- + +COMMENT ON EXTENSION pgcrypto IS 'cryptographic functions'; + + +-- +-- Name: collection_type; Type: TYPE; Schema: public; Owner: - +-- + +CREATE TYPE public.collection_type AS ENUM ( + 'public', + 'private' +); + + +-- +-- Name: data_connector_type; Type: TYPE; Schema: public; Owner: - +-- + +CREATE TYPE public.data_connector_type AS ENUM ( + 'mysql', + 'postgres' +); + + +-- +-- Name: document_event_type; Type: TYPE; Schema: public; Owner: - +-- + +CREATE TYPE public.document_event_type AS ENUM ( + 'create', + 'modify_title', + 'insert_node', + 'remove_node', + 'modify_node', + 'move_node', + 'noop', + 'pin_node', + 'unpin_node', + 'revert', + 'insert_comment', + 'modify_comment', + 'remove_comment' +); + + +-- +-- Name: team_role; Type: TYPE; Schema: public; Owner: - +-- + +CREATE TYPE public.team_role AS ENUM ( + 'member', + 'owner', + 'viewer' +); + + +-- +-- Name: user_type; Type: TYPE; Schema: public; Owner: - +-- + +CREATE TYPE public.user_type AS ENUM ( + 'individual', + 'team' +); + + +-- +-- Name: ban(character varying); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.ban(user_login character varying) RETURNS boolean + LANGUAGE plpgsql + AS $$ + BEGIN + UPDATE users + SET active = TRUE + WHERE login = user_login; + UPDATE documents + SET trashed = TRUE, trash_time = NOW() + INTERVAL '45 days' + WHERE user_id = (SELECT id FROM users WHERE login = user_login) + AND trashed = FALSE; + RETURN true; + END +$$; + + +-- +-- Name: cancel_document_thumbnails(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.cancel_document_thumbnails() RETURNS trigger + LANGUAGE plpgsql + AS $$ + BEGIN + DELETE FROM document_thumbnails + WHERE assigned = FALSE + AND id = NEW.id + AND version <> NEW.version + AND version NOT IN (SELECT p.version FROM document_publishes p WHERE p.id = NEW.id ORDER BY time DESC LIMIT 1) + AND version NOT IN (SELECT d.version FROM documents d WHERE d.id = NEW.id); + RETURN NEW; + END; +$$; + + +-- +-- Name: compute_document_version_ranges(character, integer, integer); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.compute_document_version_ranges(document_id character, start_version integer, end_version integer) RETURNS TABLE(from_version integer, to_version integer) + LANGUAGE plpgsql + AS $$ +DECLARE +revert_from_version int; +revert_to_version int; +BEGIN + to_version := end_version; + LOOP + SELECT e.version, e.node_id + INTO revert_from_version, revert_to_version + FROM document_events e + WHERE e.id = document_id + AND e.type = 'revert' + AND e.version <= to_version + AND e.version >= start_version + ORDER BY e.version DESC + LIMIT 1; + IF revert_to_version IS NULL THEN + from_version := start_version; + RETURN NEXT; + RETURN; + END IF; + IF revert_from_version < end_version THEN + from_version := revert_from_version + 1; + RETURN NEXT; + END IF; + to_version := revert_to_version; + END LOOP; +END +$$; + + +-- +-- Name: decrement_document_likes(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.decrement_document_likes() RETURNS trigger + LANGUAGE plpgsql + AS $$ + BEGIN + UPDATE documents + SET likes = likes - 1 + WHERE id = OLD.document_id; + RETURN OLD; + END; +$$; + + +-- +-- Name: disallow_document_data_connectors(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.disallow_document_data_connectors() RETURNS trigger + LANGUAGE plpgsql + AS $$ + BEGIN + DELETE FROM data_connectors_documents + WHERE document_id = NEW.id; + RETURN NEW; + END; +$$; + + +-- +-- Name: disallow_document_secrets(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.disallow_document_secrets() RETURNS trigger + LANGUAGE plpgsql + AS $$ + BEGIN + UPDATE documents + SET allow_secrets = FALSE + WHERE id = NEW.id + AND allow_secrets = TRUE; + RETURN NEW; + END; +$$; + + +-- +-- Name: global_id(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.global_id() RETURNS trigger + LANGUAGE plpgsql + AS $$ + DECLARE + id CHAR(16); + query TEXT; + found INT; + BEGIN + IF NEW.id IS NULL THEN + query := 'SELECT 1 FROM ' || quote_ident(TG_TABLE_NAME) || ' WHERE id='; + LOOP + id := encode(gen_random_bytes(8), 'hex'); + EXECUTE query || quote_literal(id) INTO found; + IF found IS NULL THEN EXIT; END IF; + END LOOP; + NEW.id = id; + END IF; + RETURN NEW; + END; +$$; + + +-- +-- Name: increment_document_likes(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.increment_document_likes() RETURNS trigger + LANGUAGE plpgsql + AS $$ + BEGIN + UPDATE documents + SET likes = likes + 1 + WHERE id = NEW.document_id; + RETURN NEW; + END; +$$; + + +-- +-- Name: index_document(character, integer); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.index_document(doc_id character, doc_version integer) RETURNS void + LANGUAGE plpgsql + AS $$ + DECLARE + is_published BOOLEAN; + BEGIN + SELECT TRUE + INTO is_published + FROM document_publishes p + JOIN documents d ON (p.id = d.id) + WHERE p.id = doc_id + AND p.version = doc_version + AND d.slug IS NOT NULL; + IF EXISTS(SELECT 1 FROM document_vectors v WHERE v.id = doc_id AND v.version = doc_version) THEN + IF is_published IS TRUE THEN + DELETE FROM document_vectors WHERE id = doc_id AND version <> doc_version AND published IS TRUE; + UPDATE document_vectors SET published = TRUE WHERE id = doc_id AND version = doc_version; + END IF; + RETURN; + END IF; + WITH RECURSIVE ancestors AS ( + (SELECT d.id, d.version AS ancestor_version, d.fork_version, d.fork_id FROM documents d WHERE d.id = doc_id) + UNION (SELECT d.id, a.fork_version AS ancestor_version, d.fork_version, d.fork_id FROM ancestors a JOIN documents d ON (d.id = a.fork_id))), + ranges AS ( + SELECT a.id, r.from_version, r.to_version FROM ancestors a + LEFT JOIN LATERAL (SELECT from_version, to_version FROM compute_document_version_ranges(a.id, COALESCE(a.fork_version, 0), LEAST(doc_version, a.ancestor_version))) r ON TRUE), + texts AS (SELECT + COALESCE(string_agg(e.new_node_value, CHR(10) || CHR(10)), '') AS value + FROM ( + WITH events AS (SELECT e.type, e.version, e.node_id, e.new_node_value + FROM ancestors a + JOIN document_events e ON (e.id = a.id) + JOIN ranges r ON (e.id = r.id AND r.from_version <= e.version AND r.to_version >= e.version) + WHERE e.type IN ('insert_node', 'remove_node', 'modify_node') + ) + SELECT e.* + FROM events e + WHERE NOT EXISTS ( + SELECT 1 + FROM events o + WHERE e.type IN ('insert_node', 'modify_node') + AND o.type IN ('remove_node', 'modify_node') + AND o.node_id = e.node_id + AND o.version > e.version + ) + ORDER BY e.version ASC + ) e) + INSERT INTO document_vectors(id, version, published, vector) + SELECT + d.id, doc_version, is_published IS TRUE, + setweight(to_tsvector('simple', COALESCE((CASE WHEN is_published THEN p.title ELSE d.title END), '')), 'A') || + setweight(to_tsvector('simple', COALESCE(u.name, '') || ' ' || COALESCE(u.login, '')), 'A') || + setweight(to_tsvector('simple', translate(t.value, '.[]{}()*/+', ' ')), 'B') AS vector + FROM texts t + JOIN documents d ON (d.id = doc_id) + JOIN users u ON (u.id = d.user_id) + LEFT JOIN document_publishes p ON (d.slug IS NOT NULL AND d.id = p.id AND p.version = doc_version) + ON CONFLICT(id, published) DO UPDATE SET version = EXCLUDED.version, vector = EXCLUDED.vector; + RETURN; + END; +$$; + + +-- +-- Name: index_published_document(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.index_published_document() RETURNS trigger + LANGUAGE plpgsql + AS $$ + BEGIN + PERFORM index_document(NEW.id, NEW.version); + RETURN NEW; + END; +$$; + + +-- +-- Name: insert_document_alias(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.insert_document_alias() RETURNS trigger + LANGUAGE plpgsql + AS $$ + BEGIN + INSERT INTO document_aliases(id, user_id, slug) + VALUES (NEW.id, NEW.user_id, NEW.slug) + ON CONFLICT DO NOTHING; + RETURN NEW; + END; +$$; + + +-- +-- Name: insert_document_publish_thumbnail(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.insert_document_publish_thumbnail() RETURNS trigger + LANGUAGE plpgsql + AS $$ + BEGIN + DELETE FROM document_thumbnails + WHERE assigned = FALSE + AND id = NEW.id + AND version <> NEW.version + AND version NOT IN (SELECT d.version FROM documents d WHERE d.id = NEW.id); + INSERT INTO document_thumbnails(id, version, event_time) + SELECT id, version, time + FROM document_events + WHERE id = NEW.id + AND version = NEW.version + ON CONFLICT (id, version) DO NOTHING; + RETURN NEW; + END; +$$; + + +-- +-- Name: notify_document_comments(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.notify_document_comments() RETURNS trigger + LANGUAGE plpgsql + AS $$ + DECLARE + subject document_comments%ROWTYPE; + deleted BOOLEAN; + BEGIN + CASE TG_OP + WHEN 'INSERT' THEN + subject = NEW; + deleted = FALSE; + WHEN 'UPDATE' THEN + subject = NEW; + deleted = FALSE; + WHEN 'DELETE' THEN + subject = OLD; + deleted = TRUE; + END CASE; + PERFORM pg_notify('document_comments', json_build_object('id', subject.document_id, 'comment_id', subject.id, 'deleted', deleted)::TEXT); + RETURN subject; + END; +$$; + + +-- +-- Name: notify_document_presence(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.notify_document_presence() RETURNS trigger + LANGUAGE plpgsql + AS $$ + DECLARE + subject document_presence%ROWTYPE; + BEGIN + CASE TG_OP + WHEN 'INSERT' THEN subject = NEW; + WHEN 'UPDATE' THEN subject = NEW; + WHEN 'DELETE' THEN subject = OLD; + END CASE; + PERFORM pg_notify('document_presence', json_build_object('id', subject.id, 'user_id', subject.user_id)::TEXT); + RETURN subject; + END; +$$; + + +-- +-- Name: notify_document_publish(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.notify_document_publish() RETURNS trigger + LANGUAGE plpgsql + AS $$ + BEGIN + PERFORM pg_notify('document_publishes', json_build_object('id', NEW.id, 'version', NEW.version)::TEXT); + RETURN NEW; + END; +$$; + + +-- +-- Name: title_score(text, text); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.title_score(query text, title text) RETURNS double precision + LANGUAGE plpgsql + AS $$ + DECLARE + count integer; + query_words text[] := string_to_array(regexp_replace(query, '\W', ' ', 'g'), ' '); + title_words text[] := string_to_array(regexp_replace(title, '\W', ' ', 'g'), ' '); + BEGIN + count := (SELECT COUNT(*) FROM (SELECT UNNEST(query_words) INTERSECT SELECT UNNEST(title_words)) s); + RETURN count / ARRAY_LENGTH(query_words, 1)::FLOAT; +END +$$; + + +-- +-- Name: unban(character varying); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.unban(user_login character varying) RETURNS boolean + LANGUAGE plpgsql + AS $$ + BEGIN + UPDATE users + SET active = FALSE + WHERE login = user_login; + UPDATE documents + SET trashed = FALSE, trash_time = NULL + WHERE user_id = (SELECT id FROM users WHERE login = user_login) + AND trash_time > NOW() + INTERVAL '30 days'; + RETURN true; + END +$$; + + +-- +-- Name: update_document_head(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.update_document_head() RETURNS trigger + LANGUAGE plpgsql + AS $$ + BEGIN + UPDATE documents + SET update_time = NEW.time, version = NEW.version, title = CASE + WHEN NEW.type = 'modify_title' THEN NEW.new_node_value + WHEN NEW.type = 'revert' THEN ( + WITH RECURSIVE lineage AS ( + (SELECT d.id, d.version + 1 AS version, d.fork_id, d.fork_version + FROM documents d WHERE d.id = NEW.id) + UNION (SELECT d.id, d.version, d.fork_id, d.fork_version + FROM lineage l JOIN documents d ON (d.id = l.fork_id))), + ranges AS ( + SELECT l.id, r.from_version, r.to_version FROM lineage l + LEFT JOIN LATERAL (SELECT from_version, to_version FROM compute_document_version_ranges(l.id, COALESCE(l.fork_version, 0), l.version)) r ON TRUE) + SELECT e.new_node_value + FROM document_events e + JOIN lineage l ON ( + (e.id = l.id AND l.id = NEW.id) + OR (e.id = l.fork_id AND e.version <= l.fork_version) + ) + JOIN ranges r ON (e.id = r.id AND r.from_version <= e.version AND r.to_version >= e.version) + WHERE e.type = 'modify_title' + ORDER BY e.version DESC + LIMIT 1) + ELSE title + END + WHERE id = NEW.id + AND version < NEW.version; + RETURN NEW; + END; +$$; + + +-- +-- Name: update_document_publish_time(); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.update_document_publish_time() RETURNS trigger + LANGUAGE plpgsql + AS $$ + BEGIN + UPDATE documents + SET publish_time = NEW.time + WHERE slug IS NULL + AND id = NEW.id; + RETURN NEW; + END; +$$; + + +-- +-- Name: user_is_type(character, public.user_type); Type: FUNCTION; Schema: public; Owner: - +-- + +CREATE FUNCTION public.user_is_type(character, public.user_type) RETURNS boolean + LANGUAGE sql + AS $_$ + SELECT EXISTS ( + SELECT 1 FROM users WHERE id = $1 AND type = $2 + ); +$_$; + + +SET default_tablespace = ''; + +SET default_with_oids = false; + +-- +-- Name: collection_documents; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.collection_documents ( + id character(16) NOT NULL, + document_id character(16) NOT NULL, + update_time timestamp without time zone DEFAULT now() +); + + +-- +-- Name: collections; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.collections ( + id character(16) NOT NULL, + slug character varying(255) NOT NULL, + title character varying(255) NOT NULL, + description character varying(255) NOT NULL, + update_time timestamp without time zone DEFAULT now(), + chronological boolean DEFAULT false NOT NULL, + user_id character(16) NOT NULL, + pinned boolean DEFAULT false NOT NULL, + type public.collection_type DEFAULT 'private'::public.collection_type NOT NULL, + custom_thumbnail character varying(64) DEFAULT NULL::character varying +); + + +-- +-- Name: data_connectors; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.data_connectors ( + user_id character(16) NOT NULL, + name character varying(255) NOT NULL, + type public.data_connector_type NOT NULL, + credentials_iv bytea NOT NULL, + credentials_red bytea, + credentials_blue bytea +); + + +-- +-- Name: data_connectors_documents; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.data_connectors_documents ( + document_id character(16) NOT NULL, + data_connector_user_id character(16) NOT NULL, + data_connector_name character varying(255) NOT NULL +); + + +-- +-- Name: document_aliases; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.document_aliases ( + id character(16) NOT NULL, + user_id character(16) NOT NULL, + slug character varying(255) NOT NULL +); + + +-- +-- Name: document_comments; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.document_comments ( + id character(16) NOT NULL, + user_id character(16) NOT NULL, + document_id character(16) NOT NULL, + node_id integer NOT NULL, + document_version integer NOT NULL, + content text NOT NULL, + create_time timestamp without time zone DEFAULT now() NOT NULL, + update_time timestamp without time zone, + resolved boolean DEFAULT false NOT NULL +); + + +-- +-- Name: document_events; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.document_events ( + id character(16) NOT NULL, + version integer NOT NULL, + type public.document_event_type NOT NULL, + "time" timestamp without time zone DEFAULT now(), + node_id integer, + new_node_value text, + new_next_node_id integer, + user_id character(16), + original_document_id character(16), + original_node_id integer, + new_node_pinned boolean +); + + +-- +-- Name: document_merges; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.document_merges ( + from_id character(16) NOT NULL, + from_version integer NOT NULL, + to_id character(16) NOT NULL, + to_start_version integer NOT NULL, + to_end_version integer NOT NULL, + user_id character(16) NOT NULL, + "time" timestamp without time zone DEFAULT now() +); + + +-- +-- Name: document_presence; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.document_presence ( + id character(16) NOT NULL, + user_id character(16), + "time" timestamp without time zone DEFAULT now() NOT NULL, + client_id bigint NOT NULL +); + + +-- +-- Name: document_presence_client_id_seq; Type: SEQUENCE; Schema: public; Owner: - +-- + +CREATE SEQUENCE public.document_presence_client_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +-- +-- Name: document_presence_client_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - +-- + +ALTER SEQUENCE public.document_presence_client_id_seq OWNED BY public.document_presence.client_id; + + +-- +-- Name: document_publishes; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.document_publishes ( + id character(16) NOT NULL, + version integer NOT NULL, + user_id character(16), + "time" timestamp without time zone DEFAULT now(), + title character varying(255) DEFAULT ''::character varying, + public boolean DEFAULT false NOT NULL +); + + +-- +-- Name: document_suggestions; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.document_suggestions ( + id character(16) NOT NULL, + user_id character(16) NOT NULL, + from_id character(16) NOT NULL, + to_id character(16) NOT NULL, + create_time timestamp without time zone DEFAULT now() NOT NULL, + close_time timestamp without time zone, + description character varying(255) DEFAULT ''::character varying NOT NULL, + closer_id character(16), + to_merge_end_version integer +); + + +-- +-- Name: document_thumbnails; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.document_thumbnails ( + id character(16) NOT NULL, + version integer NOT NULL, + hash character varying(64) DEFAULT NULL::character varying, + event_time timestamp without time zone NOT NULL, + assigned boolean DEFAULT false +); + + +-- +-- Name: document_vectors; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.document_vectors ( + id character(16) NOT NULL, + version integer NOT NULL, + vector tsvector NOT NULL, + published boolean DEFAULT false NOT NULL +); + + +-- +-- Name: documents; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.documents ( + id character(16) NOT NULL, + user_id character(16) NOT NULL, + version integer DEFAULT '-1'::integer NOT NULL, + slug character varying(255) DEFAULT NULL::character varying, + trashed boolean DEFAULT false, + title character varying(255) DEFAULT ''::character varying, + update_time timestamp without time zone DEFAULT now(), + trash_time timestamp without time zone, + publish_time timestamp without time zone, + fork_id character(16) DEFAULT NULL::bpchar, + fork_version integer, + access_key character(16) DEFAULT encode(public.gen_random_bytes(8), 'hex'::text) NOT NULL, + likes integer DEFAULT 0 NOT NULL, + allow_secrets boolean DEFAULT false NOT NULL, + custom_thumbnail character varying(64) DEFAULT NULL::character varying +); + + +-- +-- Name: schema_migrations; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.schema_migrations ( + name character varying(255) NOT NULL, + start_time timestamp without time zone DEFAULT now(), + end_time timestamp without time zone +); + + +-- +-- Name: team_invitations; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.team_invitations ( + team_id character(16) NOT NULL, + owner_id character(16) NOT NULL, + email character varying(255) NOT NULL, + create_time timestamp without time zone DEFAULT now() NOT NULL, + accept_time timestamp without time zone, + expire_time timestamp without time zone DEFAULT (now() + '3 days'::interval) NOT NULL, + id character(16) NOT NULL, + role public.team_role DEFAULT 'member'::public.team_role, + CONSTRAINT check_invitation_owner CHECK (public.user_is_type(owner_id, 'individual'::public.user_type)), + CONSTRAINT check_invitation_team CHECK (public.user_is_type(team_id, 'team'::public.user_type)) +); + + +-- +-- Name: team_members; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.team_members ( + team_id character(16) NOT NULL, + user_id character(16) NOT NULL, + role public.team_role DEFAULT 'member'::public.team_role NOT NULL, + CONSTRAINT check_membership_team CHECK (public.user_is_type(team_id, 'team'::public.user_type)), + CONSTRAINT check_membership_user CHECK (public.user_is_type(user_id, 'individual'::public.user_type)) +); + + +-- +-- Name: user_email_confirmations; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.user_email_confirmations ( + id character(16) NOT NULL, + email character varying(255) NOT NULL, + create_time timestamp without time zone DEFAULT now(), + accept_time timestamp without time zone, + user_id character(16), + expire_time timestamp without time zone DEFAULT (now() + '3 days'::interval) NOT NULL +); + + +-- +-- Name: user_likes; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.user_likes ( + id character(16) NOT NULL, + document_id character(16) NOT NULL, + "time" timestamp without time zone DEFAULT now() +); + + +-- +-- Name: user_secrets; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.user_secrets ( + id character(16) NOT NULL, + name character varying(255) NOT NULL, + iv bytea NOT NULL, + value_red bytea, + value_blue bytea +); + + +-- +-- Name: users; Type: TABLE; Schema: public; Owner: - +-- + +CREATE TABLE public.users ( + id character(16) NOT NULL, + avatar_url character varying(255) DEFAULT NULL::character varying, + login character varying(40), + name character varying(255) DEFAULT ''::character varying NOT NULL, + create_time timestamp without time zone DEFAULT now(), + bio text DEFAULT ''::text NOT NULL, + home_url character varying(255) DEFAULT ''::character varying NOT NULL, + github_id bigint, + update_time timestamp without time zone DEFAULT now(), + email character varying(255) DEFAULT ''::character varying NOT NULL, + type public.user_type DEFAULT 'individual'::public.user_type NOT NULL, + setting_dark_mode boolean DEFAULT false NOT NULL, + stripe_customer_id character varying, + delinquent boolean DEFAULT false NOT NULL, + flag_create_team boolean DEFAULT false, + active boolean DEFAULT true, + setting_autoclose_pairs boolean DEFAULT false NOT NULL, + twitter_id bigint, + google_id numeric(50,0), + github_login character varying(40), + setting_always_on_autocomplete boolean DEFAULT true NOT NULL, + flag_data_connectors boolean DEFAULT false NOT NULL, + CONSTRAINT user_login_lower CHECK (((login)::text = lower((login)::text))) +); + + +-- +-- Name: document_presence client_id; Type: DEFAULT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_presence ALTER COLUMN client_id SET DEFAULT nextval('public.document_presence_client_id_seq'::regclass); + + +-- +-- Name: collection_documents collection_documents_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.collection_documents + ADD CONSTRAINT collection_documents_pkey PRIMARY KEY (id, document_id); + + +-- +-- Name: collections collections_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.collections + ADD CONSTRAINT collections_pkey PRIMARY KEY (id); + + +-- +-- Name: collections collections_user_slug; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.collections + ADD CONSTRAINT collections_user_slug UNIQUE (user_id, slug); + + +-- +-- Name: data_connectors_documents data_connectors_documents_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.data_connectors_documents + ADD CONSTRAINT data_connectors_documents_pkey PRIMARY KEY (document_id, data_connector_user_id, data_connector_name); + + +-- +-- Name: data_connectors data_connectors_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.data_connectors + ADD CONSTRAINT data_connectors_pkey PRIMARY KEY (user_id, name); + + +-- +-- Name: document_aliases document_aliases_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_aliases + ADD CONSTRAINT document_aliases_pkey PRIMARY KEY (user_id, slug); + + +-- +-- Name: document_comments document_comments_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_comments + ADD CONSTRAINT document_comments_pkey PRIMARY KEY (id); + + +-- +-- Name: document_events document_events_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_events + ADD CONSTRAINT document_events_pkey PRIMARY KEY (id, version); + + +-- +-- Name: document_merges document_merges_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_merges + ADD CONSTRAINT document_merges_pkey PRIMARY KEY (to_id, to_end_version); + + +-- +-- Name: document_presence document_presence_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_presence + ADD CONSTRAINT document_presence_pkey PRIMARY KEY (client_id); + + +-- +-- Name: document_publishes document_publishes_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_publishes + ADD CONSTRAINT document_publishes_pkey PRIMARY KEY (id, version); + + +-- +-- Name: document_suggestions document_suggestions_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_suggestions + ADD CONSTRAINT document_suggestions_pkey PRIMARY KEY (id); + + +-- +-- Name: document_thumbnails document_thumbnails_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_thumbnails + ADD CONSTRAINT document_thumbnails_pkey PRIMARY KEY (id, version); + + +-- +-- Name: documents document_user_slug; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.documents + ADD CONSTRAINT document_user_slug UNIQUE (user_id, slug); + + +-- +-- Name: document_vectors document_vectors_published; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_vectors + ADD CONSTRAINT document_vectors_published UNIQUE (id, published); + + +-- +-- Name: document_vectors document_vectors_version; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_vectors + ADD CONSTRAINT document_vectors_version PRIMARY KEY (id, version); + + +-- +-- Name: documents documents_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.documents + ADD CONSTRAINT documents_pkey PRIMARY KEY (id); + + +-- +-- Name: schema_migrations schema_migrations_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.schema_migrations + ADD CONSTRAINT schema_migrations_pkey PRIMARY KEY (name); + + +-- +-- Name: team_invitations team_invitations_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.team_invitations + ADD CONSTRAINT team_invitations_pkey PRIMARY KEY (id); + + +-- +-- Name: team_members team_members_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.team_members + ADD CONSTRAINT team_members_pkey PRIMARY KEY (team_id, user_id); + + +-- +-- Name: user_email_confirmations user_email_confirmations_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.user_email_confirmations + ADD CONSTRAINT user_email_confirmations_pkey PRIMARY KEY (id); + + +-- +-- Name: users user_github_id; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT user_github_id UNIQUE (github_id); + + +-- +-- Name: users user_google_id; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT user_google_id UNIQUE (google_id); + + +-- +-- Name: user_likes user_likes_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.user_likes + ADD CONSTRAINT user_likes_pkey PRIMARY KEY (id, document_id); + + +-- +-- Name: users user_login; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT user_login UNIQUE (login); + + +-- +-- Name: user_secrets user_secrets_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.user_secrets + ADD CONSTRAINT user_secrets_pkey PRIMARY KEY (id, name); + + +-- +-- Name: users user_twitter_id; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT user_twitter_id UNIQUE (twitter_id); + + +-- +-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT users_pkey PRIMARY KEY (id); + + +-- +-- Name: collection_documents_id_update_time; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX collection_documents_id_update_time ON public.collection_documents USING btree (id, update_time); + + +-- +-- Name: document_creator_index; Type: INDEX; Schema: public; Owner: - +-- + +CREATE UNIQUE INDEX document_creator_index ON public.document_events USING btree (id, type) WHERE (type = 'create'::public.document_event_type); + + +-- +-- Name: document_event_id_time; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX document_event_id_time ON public.document_events USING btree (id, "time"); + + +-- +-- Name: document_events_time; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX document_events_time ON public.document_events USING btree ("time"); + + +-- +-- Name: document_events_user_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX document_events_user_id ON public.document_events USING btree (user_id); + + +-- +-- Name: document_publish_id_time; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX document_publish_id_time ON public.document_publishes USING btree (id, "time"); + + +-- +-- Name: document_publish_time; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX document_publish_time ON public.documents USING btree (publish_time) WHERE ((publish_time IS NOT NULL) AND (trashed = false)); + + +-- +-- Name: document_suggestions_from_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE UNIQUE INDEX document_suggestions_from_id ON public.document_suggestions USING btree (from_id) WHERE (close_time IS NULL); + + +-- +-- Name: document_suggestions_to_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX document_suggestions_to_id ON public.document_suggestions USING btree (to_id) WHERE (close_time IS NULL); + + +-- +-- Name: document_suggestions_user_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX document_suggestions_user_id ON public.document_suggestions USING btree (user_id); + + +-- +-- Name: document_thumbnail_unassigned_event_time; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX document_thumbnail_unassigned_event_time ON public.document_thumbnails USING btree (event_time) WHERE (assigned = false); + + +-- +-- Name: document_user_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX document_user_id ON public.documents USING btree (user_id); + + +-- +-- Name: document_vectors_gin_index; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX document_vectors_gin_index ON public.document_vectors USING gin (vector); + + +-- +-- Name: documents_fork_id; Type: INDEX; Schema: public; Owner: - +-- + +CREATE INDEX documents_fork_id ON public.documents USING btree (fork_id) WHERE (fork_id IS NOT NULL); + + +-- +-- Name: document_comments notify_document_comments; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER notify_document_comments AFTER INSERT OR DELETE OR UPDATE ON public.document_comments FOR EACH ROW EXECUTE PROCEDURE public.notify_document_comments(); + + +-- +-- Name: document_presence notify_document_presence; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER notify_document_presence BEFORE INSERT OR DELETE OR UPDATE ON public.document_presence FOR EACH ROW EXECUTE PROCEDURE public.notify_document_presence(); + + +-- +-- Name: document_thumbnails trigger_cancel_document_thumbnails; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_cancel_document_thumbnails BEFORE INSERT ON public.document_thumbnails FOR EACH ROW EXECUTE PROCEDURE public.cancel_document_thumbnails(); + + +-- +-- Name: collections trigger_collections_id; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_collections_id BEFORE INSERT ON public.collections FOR EACH ROW EXECUTE PROCEDURE public.global_id(); + + +-- +-- Name: documents trigger_document_alias; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_document_alias AFTER UPDATE ON public.documents FOR EACH ROW WHEN (((new.slug IS NOT NULL) AND (((old.slug)::text IS DISTINCT FROM (new.slug)::text) OR (old.user_id IS DISTINCT FROM new.user_id)))) EXECUTE PROCEDURE public.insert_document_alias(); + + +-- +-- Name: document_comments trigger_document_comments_id; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_document_comments_id BEFORE INSERT ON public.document_comments FOR EACH ROW EXECUTE PROCEDURE public.global_id(); + + +-- +-- Name: document_publishes trigger_document_disallow_data_connectors; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_document_disallow_data_connectors BEFORE INSERT ON public.document_publishes FOR EACH ROW EXECUTE PROCEDURE public.disallow_document_data_connectors(); + + +-- +-- Name: document_publishes trigger_document_disallow_secrets; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_document_disallow_secrets BEFORE INSERT ON public.document_publishes FOR EACH ROW EXECUTE PROCEDURE public.disallow_document_secrets(); + + +-- +-- Name: document_events trigger_document_head; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_document_head AFTER INSERT ON public.document_events FOR EACH ROW EXECUTE PROCEDURE public.update_document_head(); + + +-- +-- Name: documents trigger_document_id; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_document_id BEFORE INSERT ON public.documents FOR EACH ROW EXECUTE PROCEDURE public.global_id(); + + +-- +-- Name: document_publishes trigger_document_publish; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE CONSTRAINT TRIGGER trigger_document_publish AFTER INSERT ON public.document_publishes DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE PROCEDURE public.notify_document_publish(); + + +-- +-- Name: document_publishes trigger_document_publish_thumbnail; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_document_publish_thumbnail BEFORE INSERT ON public.document_publishes FOR EACH ROW EXECUTE PROCEDURE public.insert_document_publish_thumbnail(); + + +-- +-- Name: document_publishes trigger_document_publish_time; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_document_publish_time BEFORE INSERT ON public.document_publishes FOR EACH ROW EXECUTE PROCEDURE public.update_document_publish_time(); + + +-- +-- Name: document_suggestions trigger_document_suggestions_id; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_document_suggestions_id BEFORE INSERT ON public.document_suggestions FOR EACH ROW EXECUTE PROCEDURE public.global_id(); + + +-- +-- Name: document_publishes trigger_index_published_document; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE CONSTRAINT TRIGGER trigger_index_published_document AFTER INSERT ON public.document_publishes DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE PROCEDURE public.index_published_document(); + + +-- +-- Name: team_invitations trigger_team_invitation_id; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_team_invitation_id BEFORE INSERT ON public.team_invitations FOR EACH ROW EXECUTE PROCEDURE public.global_id(); + + +-- +-- Name: user_email_confirmations trigger_user_email_confirmations_id; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_user_email_confirmations_id BEFORE INSERT ON public.user_email_confirmations FOR EACH ROW EXECUTE PROCEDURE public.global_id(); + + +-- +-- Name: users trigger_user_id; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_user_id BEFORE INSERT ON public.users FOR EACH ROW EXECUTE PROCEDURE public.global_id(); + + +-- +-- Name: user_likes trigger_user_like_delete; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_user_like_delete AFTER DELETE ON public.user_likes FOR EACH ROW EXECUTE PROCEDURE public.decrement_document_likes(); + + +-- +-- Name: user_likes trigger_user_like_insert; Type: TRIGGER; Schema: public; Owner: - +-- + +CREATE TRIGGER trigger_user_like_insert AFTER INSERT ON public.user_likes FOR EACH ROW EXECUTE PROCEDURE public.increment_document_likes(); + + +-- +-- Name: collection_documents collection_documents_document_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.collection_documents + ADD CONSTRAINT collection_documents_document_id_fkey FOREIGN KEY (document_id) REFERENCES public.documents(id); + + +-- +-- Name: collection_documents collection_documents_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.collection_documents + ADD CONSTRAINT collection_documents_id_fkey FOREIGN KEY (id) REFERENCES public.collections(id) ON DELETE CASCADE; + + +-- +-- Name: data_connectors_documents data_connectors_documents_data_connector_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.data_connectors_documents + ADD CONSTRAINT data_connectors_documents_data_connector_user_id_fkey FOREIGN KEY (data_connector_user_id) REFERENCES public.users(id); + + +-- +-- Name: data_connectors_documents data_connectors_documents_data_connector_user_id_fkey1; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.data_connectors_documents + ADD CONSTRAINT data_connectors_documents_data_connector_user_id_fkey1 FOREIGN KEY (data_connector_user_id, data_connector_name) REFERENCES public.data_connectors(user_id, name) ON DELETE CASCADE; + + +-- +-- Name: data_connectors_documents data_connectors_documents_document_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.data_connectors_documents + ADD CONSTRAINT data_connectors_documents_document_id_fkey FOREIGN KEY (document_id) REFERENCES public.documents(id) ON DELETE CASCADE; + + +-- +-- Name: data_connectors data_connectors_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.data_connectors + ADD CONSTRAINT data_connectors_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; + + +-- +-- Name: document_aliases document_aliases_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_aliases + ADD CONSTRAINT document_aliases_id_fkey FOREIGN KEY (id) REFERENCES public.documents(id); + + +-- +-- Name: document_aliases document_aliases_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_aliases + ADD CONSTRAINT document_aliases_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- Name: document_comments document_comments_document_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_comments + ADD CONSTRAINT document_comments_document_id_fkey FOREIGN KEY (document_id) REFERENCES public.documents(id); + + +-- +-- Name: document_comments document_comments_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_comments + ADD CONSTRAINT document_comments_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- Name: document_events document_events_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_events + ADD CONSTRAINT document_events_id_fkey FOREIGN KEY (id) REFERENCES public.documents(id); + + +-- +-- Name: document_events document_events_original_document_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_events + ADD CONSTRAINT document_events_original_document_id_fkey FOREIGN KEY (original_document_id) REFERENCES public.documents(id); + + +-- +-- Name: document_events document_events_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_events + ADD CONSTRAINT document_events_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- Name: document_merges document_merges_from_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_merges + ADD CONSTRAINT document_merges_from_id_fkey FOREIGN KEY (from_id) REFERENCES public.documents(id); + + +-- +-- Name: document_merges document_merges_to_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_merges + ADD CONSTRAINT document_merges_to_id_fkey FOREIGN KEY (to_id) REFERENCES public.documents(id); + + +-- +-- Name: document_merges document_merges_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_merges + ADD CONSTRAINT document_merges_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- Name: document_presence document_presence_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_presence + ADD CONSTRAINT document_presence_id_fkey FOREIGN KEY (id) REFERENCES public.documents(id); + + +-- +-- Name: document_presence document_presence_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_presence + ADD CONSTRAINT document_presence_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- Name: document_publishes document_publishes_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_publishes + ADD CONSTRAINT document_publishes_id_fkey FOREIGN KEY (id) REFERENCES public.documents(id); + + +-- +-- Name: document_publishes document_publishes_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_publishes + ADD CONSTRAINT document_publishes_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- Name: document_suggestions document_suggestions_closer_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_suggestions + ADD CONSTRAINT document_suggestions_closer_id_fkey FOREIGN KEY (closer_id) REFERENCES public.users(id); + + +-- +-- Name: document_suggestions document_suggestions_from_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_suggestions + ADD CONSTRAINT document_suggestions_from_id_fkey FOREIGN KEY (from_id) REFERENCES public.documents(id); + + +-- +-- Name: document_suggestions document_suggestions_to_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_suggestions + ADD CONSTRAINT document_suggestions_to_id_fkey FOREIGN KEY (to_id) REFERENCES public.documents(id); + + +-- +-- Name: document_suggestions document_suggestions_to_id_fkey1; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_suggestions + ADD CONSTRAINT document_suggestions_to_id_fkey1 FOREIGN KEY (to_id, to_merge_end_version) REFERENCES public.document_merges(to_id, to_end_version); + + +-- +-- Name: document_suggestions document_suggestions_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_suggestions + ADD CONSTRAINT document_suggestions_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- Name: document_thumbnails document_thumbnails_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_thumbnails + ADD CONSTRAINT document_thumbnails_id_fkey FOREIGN KEY (id) REFERENCES public.documents(id); + + +-- +-- Name: document_vectors document_vectors_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.document_vectors + ADD CONSTRAINT document_vectors_id_fkey FOREIGN KEY (id) REFERENCES public.documents(id); + + +-- +-- Name: documents documents_fork_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.documents + ADD CONSTRAINT documents_fork_id_fkey FOREIGN KEY (fork_id) REFERENCES public.documents(id); + + +-- +-- Name: documents documents_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.documents + ADD CONSTRAINT documents_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- Name: team_invitations team_invitations_owner_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.team_invitations + ADD CONSTRAINT team_invitations_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES public.users(id); + + +-- +-- Name: team_invitations team_invitations_team_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.team_invitations + ADD CONSTRAINT team_invitations_team_id_fkey FOREIGN KEY (team_id) REFERENCES public.users(id); + + +-- +-- Name: team_members team_members_team_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.team_members + ADD CONSTRAINT team_members_team_id_fkey FOREIGN KEY (team_id) REFERENCES public.users(id); + + +-- +-- Name: team_members team_members_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.team_members + ADD CONSTRAINT team_members_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- Name: user_email_confirmations user_email_confirmations_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.user_email_confirmations + ADD CONSTRAINT user_email_confirmations_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); + + +-- +-- Name: user_likes user_likes_document_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.user_likes + ADD CONSTRAINT user_likes_document_id_fkey FOREIGN KEY (document_id) REFERENCES public.documents(id); + + +-- +-- Name: user_likes user_likes_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.user_likes + ADD CONSTRAINT user_likes_id_fkey FOREIGN KEY (id) REFERENCES public.users(id); + + +-- +-- Name: user_secrets user_secrets_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - +-- + +ALTER TABLE ONLY public.user_secrets + ADD CONSTRAINT user_secrets_id_fkey FOREIGN KEY (id) REFERENCES public.users(id); + + +-- +-- PostgreSQL database dump complete +-- + diff --git a/docker-compose.local.yml b/docker-compose.local.yml index 0f44c34..2c99138 100644 --- a/docker-compose.local.yml +++ b/docker-compose.local.yml @@ -8,4 +8,8 @@ services: mysql: ports: - - "3306:3306" \ No newline at end of file + - "3306:3306" + + postgres: + ports: + - "5432:5432" \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 9a8352a..08da47c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,11 +10,12 @@ services: depends_on: - mssql - mysql + - postgres env_file: - .env.test networks: - db_proxy_test - command: sh -c "set -o pipefail && wait-on -d 15000 -t 30000 tcp:mysql:3306 tcp:mssql:1433 && node ./data/seed.mssql.js && TZ=UTC NODE_ENV=TEST node_modules/.bin/mocha --exit" + command: sh -c "set -o pipefail && wait-on -d 15000 -t 30000 tcp:mysql:3306 tcp:mssql:1433 tcp:postgres:5432 && node ./data/seed.mssql.js && TZ=UTC NODE_ENV=TEST node_modules/.bin/mocha --exit" mssql: image: mcr.microsoft.com/mssql/server:2019-latest @@ -35,6 +36,16 @@ services: networks: - db_proxy_test + postgres: + image: postgres:13.8-alpine3.16 + volumes: + - ./data/schema.sql:/docker-entrypoint-initdb.d/00-schema.sql:ro + - ./data/data.sql:/docker-entrypoint-initdb.d/01-data.sql:ro + environment: + - POSTGRES_HOST_AUTH_METHOD=trust + networks: + - db_proxy_test + networks: db_proxy_test: name: db_proxy_test diff --git a/lib/postgres.js b/lib/postgres.js index b519a08..df93622 100644 --- a/lib/postgres.js +++ b/lib/postgres.js @@ -3,7 +3,7 @@ import pg from "pg"; import QueryStream from "pg-query-stream"; import JSONStream from "JSONStream"; import {validateQueryPayload} from "./validate.js"; -import {badRequest, failedCheck} from "./errors.js"; +import {badRequest, failedCheck, notFound} from "./errors.js"; import Pools from "./pools.js"; @@ -24,42 +24,16 @@ export const pools = new Pools( }) ); -export default (url) => { - const pool = new Pool({connectionString: url}); +export default (url) => async (req, res) => { + const connection = new Pool({connectionString: url}); - return async function query(req, res) { - const {sql, params} = await json(req); - const client = await pool.connect(); - - try { - const queryStream = new QueryStream(sql, params); - const stream = await client.query(queryStream); - - await new Promise((resolve, reject) => { - stream - .on("end", resolve) - .on("error", reject) - .pipe(JSONStream.stringify(`{"data":[`, ",", "]")) - .pipe(res, {end: false}); - }); + if (req.method === "POST") { + if (req.url === "/query") return query(req, res, connection); + if (req.url === "/query-stream") return queryStream(req, res, connection); + if (req.url === "/check") return check(req, res, connection); + } - const schema = { - type: "array", - items: { - type: "object", - properties: queryStream.cursor._result.fields.reduce( - (schema, {name, dataTypeID}) => ( - (schema[name] = dataTypeSchema(dataTypeID)), schema - ), - {} - ), - }, - }; - res.end(`,"schema":${JSON.stringify(schema)}}`); - } finally { - client.release(); - } - }; + throw notFound(); }; export async function query(req, res, pool) { diff --git a/test/mssql.test.js b/test/mssql.test.js index c3e8b07..069a4c4 100644 --- a/test/mssql.test.js +++ b/test/mssql.test.js @@ -3,10 +3,12 @@ import MockReq from "mock-req"; import MockRes from "mock-res"; import {MSSQL_CREDENTIALS} from "../.env.test.js"; -import mssql, {dataTypeSchema} from "../lib/mssql.js"; +import mssql, {dataTypeSchema, pools} from "../lib/mssql.js"; const credentials = MSSQL_CREDENTIALS; describe("SQL Server", () => { + after(() => pools.end()); + describe("when checking", () => { it("should throw a too permissive error", async () => { const req = new MockReq({ @@ -60,8 +62,7 @@ describe("SQL Server", () => { it("should handle parameter graciously", (done) => { const testCustomerId = 3; const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: - "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", + sql: "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", params: [testCustomerId], }); @@ -92,8 +93,7 @@ describe("SQL Server", () => { it("should replace cell reference in the SQL query", (done) => { const testCustomerId = 5; const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: - "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", + sql: "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", params: [testCustomerId], }); @@ -148,8 +148,7 @@ describe("SQL Server", () => { it("should select the last value of any detected duplicated columns", (done) => { const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: - "SELECT TOP 1 ModifiedDate, ModifiedDate FROM test.SalesLT.SalesOrderDetail", + sql: "SELECT TOP 1 ModifiedDate, ModifiedDate FROM test.SalesLT.SalesOrderDetail", params: [], }); diff --git a/test/mysql.test.js b/test/mysql.test.js index c84b62a..57a5b9e 100644 --- a/test/mysql.test.js +++ b/test/mysql.test.js @@ -2,12 +2,14 @@ import assert from "node:assert"; import MockReq from "mock-req"; import MockRes from "mock-res"; import logger from "../middleware/logger.js"; -import mysql from "../lib/mysql.js"; +import mysql, {pools} from "../lib/mysql.js"; import {MYSQL_CREDENTIALS} from "../.env.test.js"; const index = logger(mysql(MYSQL_CREDENTIALS)); describe("MySQL", () => { + after(() => pools.end()); + describe("when checking", () => { it("should do MySQL credential check", async () => { const req = new MockReq({method: "POST", url: "/check"}); @@ -96,43 +98,6 @@ describe("MySQL", () => { }); }); - describe("when check the dataTypeSchema", () => { - it("should provide the right MySQL types", async () => { - const req = new MockReq({method: "POST", url: "/query"}).end({ - sql: - "select 1 as c1, 3.14 as c2, 0xdeadbeef as c3, 'hello' as c4, DATE '2019-01-01' as c5, 1234567890 as c6", - }); - const res = new MockRes(); - - await index(req, res); - const {data, schema} = res._getJSON(); - assert.deepEqual(data, [ - { - c1: 1, - c2: 3.14, - c3: {type: "Buffer", data: [222, 173, 190, 239]}, - c4: "hello", - c5: "2019-01-01T00:00:00.000Z", - c6: 1234567890, - }, - ]); - assert.deepEqual(schema, { - type: "array", - items: { - type: "object", - properties: { - c1: {type: ["null", "integer"], long: true}, - c2: {type: ["null", "number"], newdecimal: true}, - c3: {type: ["null", "object"], buffer: true}, - c4: {type: ["null", "string"]}, - c5: {type: ["null", "string"], date: true}, - c6: {type: ["null", "string"], bigint: true}, - }, - }, - }); - }); - }); - describe("when streaming", () => { it("should handle MySQL stream requests", async () => { const req = new MockReq({method: "POST", url: "/query-stream"}).end({ @@ -212,4 +177,62 @@ describe("MySQL", () => { ); }); }); + + describe("when check the dataTypeSchema", () => { + it("should provide the right MySQL types", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "select 1 as c1, 3.14 as c2, 0xdeadbeef as c3, 'hello' as c4, DATE '2019-01-01' as c5, 1234567890 as c6", + }); + const res = new MockRes(); + + await index(req, res); + const {data, schema} = res._getJSON(); + assert.deepEqual(data, [ + { + c1: 1, + c2: 3.14, + c3: {type: "Buffer", data: [222, 173, 190, 239]}, + c4: "hello", + c5: "2019-01-01T00:00:00.000Z", + c6: 1234567890, + }, + ]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: { + c1: {type: ["null", "integer"], long: true}, + c2: {type: ["null", "number"], newdecimal: true}, + c3: {type: ["null", "object"], buffer: true}, + c4: {type: ["null", "string"]}, + c5: {type: ["null", "string"], date: true}, + c6: {type: ["null", "string"], bigint: true}, + }, + }, + }); + }); + + it("should handle query not returning any fields", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "FLUSH PRIVILEGES", + }); + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + const [schema] = response.split("\n"); + + assert.deepEqual( + schema, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {}, + }, + }) + ); + }); + }); }); diff --git a/test/postgres.test.js b/test/postgres.test.js new file mode 100644 index 0000000..51e5087 --- /dev/null +++ b/test/postgres.test.js @@ -0,0 +1,309 @@ +import assert from "node:assert"; +import MockReq from "mock-req"; +import MockRes from "mock-res"; +import logger from "../middleware/logger.js"; +import pg, {pools} from "../lib/postgres.js"; + +import {POSTGRES_CREDENTIALS} from "../.env.test.js"; +const index = logger(pg(POSTGRES_CREDENTIALS)); + +describe("postgreSQL", () => { + after(() => pools.end()); + + describe("when checking", () => { + it("should perform Postgres credential check", async () => { + const req = new MockReq({method: "POST", url: "/check"}); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal( + error.message, + "User has too permissive privileges: DELETE, INSERT, REFERENCES, TRIGGER, TRUNCATE, UPDATE" + ); + } + }); + }); + + describe("when querying", () => { + it("should resolve Postgres requests", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: ` + with foo as ( + select 1 as c1 union all select 2 as c1 + ) + select c1 + from foo + where c1 = $1`, + params: [1], + }); + const res = new MockRes(); + + await index(req, res); + + const {data, schema} = res._getJSON(); + assert.deepEqual(data, [{c1: 1}]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "integer"], int32: true}}, + }, + }); + }); + + it("should handle Postgres errors", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "SELECT * FROM gibberish", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, 'relation "gibberish" does not exist'); + } + }); + + it("should handle Postgres empty query", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Bad request"); + } + }); + + it("should handle Postgres empty results", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `SELECT 1 AS c1 LIMIT 0`, + }); + const res = new MockRes(); + + await index(req, res); + + const {data, schema} = res._getJSON(); + assert.deepEqual(data, []); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "integer"], int32: true}}, + }, + }); + }); + }); + + describe("when streaming", () => { + it("should handle Postgres stream requests", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: ` + with foo as ( + select 1 as c1 union all select 2 as c1 + ) + select c1 + from foo + where c1 = $1`, + params: [1], + }); + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + + assert.equal( + response, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "integer"], int32: true}}, + }, + }) + + "\n" + + `{"c1":1}\n` + ); + }); + + it("should handle Postgres stream empty query", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Bad request"); + } + }); + + it("should handle Postgres stream errors", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT * FROM gibberish", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, 'relation "gibberish" does not exist'); + } + }); + + it("should handle Postgres stream empty query", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Bad request"); + } + }); + + it("should handle Postgres stream empty results", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT 1 AS c1 LIMIT 0", + }); + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + + assert.equal( + response, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "integer"], int32: true}}, + }, + }) + "\n\n" + ); + }); + }); + + describe("when inferring the dataTypeSchema", () => { + it("should handle Postgres simple types", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `select + 1 as c1, + 3.14 as c2, + E'\\\\xDEADBEEF'::bytea as c3, + 'hello' as c4, + DATE '2019-01-01' as c5, + true as c6, + '{"a": 1}'::json as c7, + '{"b": 2}'::jsonb as c8 + `, + }); + const res = new MockRes(); + + await index(req, res); + const {data, schema} = res._getJSON(); + assert.deepEqual(data, [ + { + c1: 1, + c2: "3.14", + c3: {type: "Buffer", data: [222, 173, 190, 239]}, + c4: "hello", + c5: "2019-01-01T00:00:00.000Z", + c6: true, + c7: {a: 1}, + c8: {b: 2}, + }, + ]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: { + c1: {type: ["null", "integer"], int32: true}, + c2: {type: ["null", "string"], numeric: true}, + c3: {type: ["null", "object"], buffer: true}, + c4: {type: ["null", "string"], text: true}, + c5: {type: ["null", "string"], date: true}, + c6: {type: ["null", "boolean"]}, + c7: {type: ["null", "object"]}, + c8: {type: ["null", "object"]}, + }, + }, + }); + }); + + it("should handle Postgres array types", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `select + '{1, 2, 3}'::int[] as c1, + '{2.18, 3.14, 6.22}'::float[] as c2, + '{"\\\\xDEADBEEF", "\\\\xFACEFEED"}'::bytea[] as c3, + '{"hello", "goodbye"}'::varchar[] as c4, + '{"2019-01-01"}'::timestamp[] as c5, + '{true, false, true}'::bool[] as c6, + '{"{\\"a\\": 1}", "{\\"b\\": 2}"}'::json[] as c7 + `, + }); + const res = new MockRes(); + + await index(req, res); + const {data, schema} = res._getJSON(); + assert.deepEqual(data, [ + { + c1: [1, 2, 3], + c2: [2.18, 3.14, 6.22], + c3: [ + {type: "Buffer", data: [222, 173, 190, 239]}, + {type: "Buffer", data: [250, 206, 254, 237]}, + ], + c4: ["hello", "goodbye"], + c5: ["2019-01-01T00:00:00.000Z"], + c6: [true, false, true], + c7: [{a: 1}, {b: 2}], + }, + ]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: { + c1: { + type: ["null", "array"], + items: {type: ["null", "integer"], int32: true}, + }, + c2: { + type: ["null", "array"], + items: {type: ["null", "number"], float64: true}, + }, + c3: { + type: ["null", "array"], + items: {type: ["null", "object"], buffer: true}, + }, + c4: { + type: ["null", "array"], + items: {type: ["null", "string"], varchar: true}, + }, + c5: { + type: ["null", "array"], + items: {type: ["null", "string"], date: true}, + }, + c6: {type: ["null", "array"], items: {type: ["null", "boolean"]}}, + c7: {type: ["null", "array"], items: {type: ["null", "object"]}}, + }, + }, + }); + }); + }); +}); From f17c4c3438a66b90c218369d8aabfabffcf6e2bd Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 1 Feb 2023 11:38:48 -0500 Subject: [PATCH 21/29] no need to seed postgres --- data/data.sql | 132 ---- data/schema.sql | 1688 -------------------------------------------- docker-compose.yml | 3 - 3 files changed, 1823 deletions(-) delete mode 100644 data/data.sql delete mode 100644 data/schema.sql diff --git a/data/data.sql b/data/data.sql deleted file mode 100644 index 879dfe1..0000000 --- a/data/data.sql +++ /dev/null @@ -1,132 +0,0 @@ -INSERT INTO users(avatar_url, login, name, type, bio, home_url, github_id, active, stripe_customer_id, email, flag_create_team, flag_data_connectors, github_login) VALUES - ('https://avatars2.githubusercontent.com/u/43?v=4', 'example', 'Example User', 'individual', 'An example user.', '', 43, TRUE, NULL, '', FALSE, FALSE, NULL), - ('https://avatars2.githubusercontent.com/u/32314?v=4', 'tmcw', 'Tom MacWright', 'individual', 'creator of open source, like @documentationjs @simple-statistics & more', 'https://macwright.org/', 32314, TRUE, NULL, 'tom@observablehq.com', TRUE, TRUE, 'tmcw'), - ('https://avatars2.githubusercontent.com/u/230541?v=4', 'mbostock', 'Mike Bostock', 'individual', 'Code and data for humans. Founder @observablehq. Creator @d3. Former @nytgraphics. Pronounced BOSS-tock.', 'https://bost.ocks.org/mike/', 230541, TRUE, NULL, 'mike@observablehq.com', TRUE, TRUE, 'mbostock'), - ('https://avatars2.githubusercontent.com/u/230542?v=4', 'title-changer', 'Title Changer', 'individual', '', '', 230542, TRUE, NULL, '', TRUE, FALSE, NULL), - ('https://avatars2.githubusercontent.com/u/4001?v=4', 'banny', 'Banny McBannerson', 'individual', 'An example bad, inactive user.', '', 4001, FALSE, NULL, '', FALSE, FALSE, 'banny'), - ('https://avatars2.githubusercontent.com/u/101?v=4', 'alice', 'Alice', 'individual', '', '', 101, TRUE, NULL, 'alice@example.com', TRUE, FALSE, 'alice'), - ('https://avatars2.githubusercontent.com/u/202?v=4', 'bob', 'Bob', 'individual', '', '', 202, TRUE, NULL, 'bob@example.com', TRUE, FALSE, 'bob'), - ('https://avatars2.githubusercontent.com/u/303?v=4', 'carlos', 'Carlos', 'individual', '', '', 303, TRUE, NULL, 'carlos@example.com', TRUE, FALSE, 'carlos'), - ('https://avatars2.githubusercontent.com/u/30080011?v=4', 'observablehq', 'Observable', 'team', 'A better way to code.', 'https://observablehq.com/', NULL, TRUE, 'cus_DJH71LZJ68KEBh', 'billing@observablehq.com', FALSE, TRUE, NULL), - ('https://avatars2.githubusercontent.com/u/30080012?v=4', 'letters', 'Letters', 'team', 'A team for ephemeral users created with createUser()', 'https://letters.com/', NULL, TRUE, 'cus_DJH71LZJ68KEBf', 'letters@letters.com', FALSE, FALSE, 'letters'), - ('https://avatars2.githubusercontent.com/u/303?v=4', 'team', 'Team team', 'team', 'A team with no aggregate tests', 'https://example.com/', NULL, TRUE, 'cus_DJH71LZJ68KEBf', 'team@example.com', FALSE, TRUE, 'example-team'); - -INSERT INTO team_members(team_id, user_id, role) VALUES - ((SELECT id FROM users WHERE login = 'observablehq'), (SELECT id FROM users WHERE login = 'mbostock'), 'owner'), - ((SELECT id FROM users WHERE login = 'observablehq'), (SELECT id FROM users WHERE login = 'tmcw'), 'member'), - ((SELECT id FROM users WHERE login = 'team'), (SELECT id FROM users WHERE login = 'alice'), 'owner'), - ((SELECT id FROM users WHERE login = 'team'), (SELECT id FROM users WHERE login = 'bob'), 'member'), - ((SELECT id FROM users WHERE login = 'team'), (SELECT id FROM users WHERE login = 'carlos'), 'viewer'); - -INSERT INTO documents(user_id, slug, trashed, trash_time, publish_time, likes) VALUES - ((SELECT id FROM users WHERE login = 'mbostock'), 'hello-world', FALSE, NULL, '2017-10-11 01:02', 0), - ((SELECT id FROM users WHERE login = 'mbostock'), 'another-test', FALSE, NULL, '2017-10-11 02:04', 0), - ((SELECT id FROM users WHERE login = 'mbostock'), NULL, FALSE, NULL, NULL, 0), - ((SELECT id FROM users WHERE login = 'tmcw'), 'trash-old', TRUE, NOW() - INTERVAL '1 hours', '2017-10-11 04:02', 0), - ((SELECT id FROM users WHERE login = 'tmcw'), 'trash-new', TRUE, NOW() + INTERVAL '1 hours', '2017-10-11 04:02', 0), - ((SELECT id FROM users WHERE login = 'tmcw'), 'hello-tom', FALSE, NULL, NOW() - INTERVAL '1 hours', 2), - ((SELECT id FROM users WHERE login = 'example'), 'three', FALSE, NULL, '2017-10-11 05:02', 0), - ((SELECT id FROM users WHERE login = 'banny'), 'spam', TRUE, NOW() + INTERVAL '1 hours', NOW() - INTERVAL '10 hours', 0), - ((SELECT id FROM users WHERE login = 'observablehq'), 'team-notebook', FALSE, NULL, '2017-11-11 11:11', 5), - ((SELECT id FROM users WHERE login = 'observablehq'), NULL, FALSE, NULL, NULL, 5), - ((SELECT id FROM users WHERE login = 'title-changer'), NULL, FALSE, NULL, NULL, 0); - -INSERT INTO documents(user_id, title, slug, trashed, trash_time, fork_id, fork_version) VALUES - ((SELECT id FROM users WHERE login = 'tmcw'), 'Hello, world!', 'hello-fork', FALSE, NULL, (SELECT id FROM documents WHERE slug = 'hello-world'), 3), - ((SELECT id FROM users WHERE login = 'example'), 'Hello, world!', 'trivial-fork', FALSE, NULL, (SELECT id FROM documents WHERE slug = 'hello-world'), 3); - -INSERT INTO document_events(id, user_id, version, time, type, node_id, new_node_value, new_node_pinned) VALUES - ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 0, '2017-10-11 01:00', 'create', NULL, NULL, NULL), - ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 1, '2017-10-11 01:01', 'insert_node', 1, 'md`# Hello, world!`', FALSE), - ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 2, '2017-10-11 01:02', 'modify_title', NULL, 'Hello, world!', NULL), - ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 3, '2017-10-11 01:03', 'insert_node', 3, 'md`I am a paragraph.`', FALSE), - ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 4, '2017-10-11 01:04', 'modify_node', 3, 'md`I am a new paragraph.`', NULL), - ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 5, '2017-10-11 01:05', 'pin_node', 3, NULL, NULL), - ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 0, '2017-10-11 02:00', 'create', NULL, NULL, NULL), - ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 1, '2017-10-11 02:01', 'insert_node', 1, 'md`# Another Test`', FALSE), - ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 2, '2017-10-11 02:02', 'modify_title', NULL, 'Another Test', NULL), - ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 3, '2017-10-11 02:03', 'insert_node', 3, 'md`First.`', FALSE), - ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 4, '2017-10-11 02:04', 'insert_node', 4, 'md`I like D3.js.`', FALSE), - ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'mbostock')), (SELECT id FROM users WHERE login = 'mbostock'), 0, '2017-10-11 03:00', 'create', NULL, NULL, NULL), - ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'mbostock')), (SELECT id FROM users WHERE login = 'mbostock'), 1, '2017-10-11 03:01', 'insert_node', 1, 'md`# Hello World`', FALSE), - ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'mbostock')), (SELECT id FROM users WHERE login = 'mbostock'), 2, '2017-10-11 03:02', 'modify_title', NULL, 'Hello World', NULL), - ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'mbostock')), (SELECT id FROM users WHERE login = 'mbostock'), 3, '2017-10-11 03:03', 'insert_node', 3, 'md`I am a paragraph.`', FALSE), - ((SELECT id FROM documents WHERE slug = 'trash-old'), (SELECT id FROM users WHERE login = 'tmcw'), 0, '2017-10-11 04:00', 'create', NULL, NULL, NULL), - ((SELECT id FROM documents WHERE slug = 'trash-old'), (SELECT id FROM users WHERE login = 'tmcw'), 1, '2017-10-11 04:01', 'modify_title', 1, '`Trash Old`', NULL), - ((SELECT id FROM documents WHERE slug = 'trash-new'), (SELECT id FROM users WHERE login = 'tmcw'), 0, '2017-10-11 04:00', 'create', NULL, NULL, NULL), - ((SELECT id FROM documents WHERE slug = 'trash-new'), (SELECT id FROM users WHERE login = 'tmcw'), 1, '2017-10-11 04:01', 'modify_title', 1, '`Trash New`', NULL), - ((SELECT id FROM documents WHERE slug = 'hello-tom'), (SELECT id FROM users WHERE login = 'tmcw'), 0, '2017-10-11 04:00', 'create', NULL, NULL, NULL), - ((SELECT id FROM documents WHERE slug = 'hello-tom'), (SELECT id FROM users WHERE login = 'tmcw'), 1, '2017-10-11 04:01', 'modify_title', 1, 'Hello, Tom!', NULL), - ((SELECT id FROM documents WHERE slug = 'three'), (SELECT id FROM users WHERE login = 'example'), 0, '2017-10-11 05:00', 'create', NULL, NULL, NULL), - ((SELECT id FROM documents WHERE slug = 'three'), (SELECT id FROM users WHERE login = 'example'), 1, '2017-10-11 05:01', 'insert_node', 1, 'md`# Three`', FALSE), - ((SELECT id FROM documents WHERE slug = 'three'), (SELECT id FROM users WHERE login = 'example'), 2, '2017-10-11 05:02', 'modify_title', 2, 'Three', NULL), - ((SELECT id FROM documents WHERE slug = 'hello-fork'), (SELECT id FROM users WHERE login = 'tmcw'), 3, '2017-10-11 05:04', 'create', NULL, NULL, NULL), - ((SELECT id FROM documents WHERE slug = 'hello-fork'), (SELECT id FROM users WHERE login = 'tmcw'), 4, '2017-10-11 05:03', 'insert_node', 4, 'md`I am a forked document.`', FALSE), - ((SELECT id FROM documents WHERE slug = 'trivial-fork'), (SELECT id FROM users WHERE login = 'example'), 3, '2017-10-12 05:04', 'create', NULL, NULL, NULL), - ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 0, '2017-10-11 06:00', 'create', NULL, NULL, NULL), - ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 1, '2017-10-11 06:01', 'insert_node', 1, 'md`# Buy Viagra!`', FALSE), - ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 2, '2017-10-11 06:02', 'modify_title', NULL, 'Buy Viagra!', NULL), - ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 3, '2017-10-11 06:03', 'insert_node', 3, 'md`I am completely legitimate content.`', FALSE), - ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 4, '2017-10-11 06:04', 'modify_node', 3, 'md`Please click [here](http://spam.com/).`', NULL), - ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 5, '2017-10-11 06:05', 'pin_node', 3, NULL, NULL), - ((SELECT id FROM documents WHERE slug = 'team-notebook'), (SELECT id FROM users WHERE login = 'tmcw'), 0, '2017-10-11 05:00', 'create', NULL, NULL, NULL), - ((SELECT id FROM documents WHERE slug = 'team-notebook'), (SELECT id FROM users WHERE login = 'tmcw'), 1, '2017-10-11 05:01', 'insert_node', 1, 'md`# Team Notebook`', FALSE), - ((SELECT id FROM documents WHERE slug = 'team-notebook'), (SELECT id FROM users WHERE login = 'tmcw'), 2, '2017-10-11 05:02', 'modify_title', 2, 'Team Notebook', NULL), - ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'observablehq')), (SELECT id FROM users WHERE login = 'mbostock'), 0, '2017-10-11 05:00', 'create', NULL, NULL, NULL), - ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'observablehq')), (SELECT id FROM users WHERE login = 'mbostock'), 1, '2017-10-11 05:01', 'insert_node', 1, 'md`# Team Unpublished`', FALSE), - ((SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'observablehq')), (SELECT id FROM users WHERE login = 'mbostock'), 2, '2017-10-11 05:02', 'modify_title', 2, 'Team Unpublished', NULL), - ((SELECT id FROM documents WHERE user_id = (SELECT id FROM users WHERE login = 'title-changer')), (SELECT id FROM users WHERE login = 'title-changer'), 0, '2018-11-12 00:00', 'create', NULL, NULL, NULL), - ((SELECT id FROM documents WHERE user_id = (SELECT id FROM users WHERE login = 'title-changer')), (SELECT id FROM users WHERE login = 'title-changer'), 1, '2018-11-12 00:00', 'modify_title', NULL, 'first', NULL), - ((SELECT id FROM documents WHERE user_id = (SELECT id FROM users WHERE login = 'title-changer')), (SELECT id FROM users WHERE login = 'title-changer'), 2, '2018-11-12 00:00', 'modify_title', NULL, 'second', NULL); - -INSERT INTO document_publishes(id, user_id, version, title, time) VALUES - ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 2, 'Hello, world!', '2017-10-11 01:02'), - ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 3, 'Hello, world!', '2017-10-11 01:03'), - ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 4, 'Another Test', '2017-10-11 02:04'), - ((SELECT id FROM documents WHERE slug = 'trash-old'), (SELECT id FROM users WHERE login = 'tmcw'), 1, 'Trash Old', '2017-10-11 04:02'), - ((SELECT id FROM documents WHERE slug = 'trash-new'), (SELECT id FROM users WHERE login = 'tmcw'), 1, 'Trash New', '2017-10-11 04:02'), - ((SELECT id FROM documents WHERE slug = 'hello-tom'), (SELECT id FROM users WHERE login = 'tmcw'), 1, 'Hello, Tom!', '2017-10-11 05:02'), - ((SELECT id FROM documents WHERE slug = 'trivial-fork'), (SELECT id FROM users WHERE login = 'example'), 3, 'Hello, world!', '2017-10-12 05:02'), - ((SELECT id FROM documents WHERE slug = 'three'), (SELECT id FROM users WHERE login = 'example'), 2, 'Three', '2017-10-11 05:02'), - ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 5, 'Buy Viagra!', '2017-10-11 06:10'), - ((SELECT id FROM documents WHERE slug = 'team-notebook'), (SELECT id FROM users WHERE login = 'mbostock'), 2, 'Team Notebook', '2017-11-11 12:11'), - ((SELECT id FROM documents WHERE user_id = (SELECT id FROM users WHERE login = 'title-changer')), (SELECT id FROM users WHERE login = 'title-changer'), 1, 'first', '2018-11-12 00:01'); - -INSERT INTO document_aliases(id, user_id, slug) VALUES - ((SELECT id FROM documents WHERE slug = 'hello-world'), (SELECT id FROM users WHERE login = 'mbostock'), 'hello-world'), - ((SELECT id FROM documents WHERE slug = 'another-test'), (SELECT id FROM users WHERE login = 'mbostock'), 'another-test'), - ((SELECT id FROM documents WHERE slug = 'trash-old'), (SELECT id FROM users WHERE login = 'tmcw'), 'trash-old'), - ((SELECT id FROM documents WHERE slug = 'trash-new'), (SELECT id FROM users WHERE login = 'tmcw'), 'trash-new'), - ((SELECT id FROM documents WHERE slug = 'hello-tom'), (SELECT id FROM users WHERE login = 'tmcw'), 'hello-tom'), - ((SELECT id FROM documents WHERE slug = 'three'), (SELECT id FROM users WHERE login = 'example'), 'three'), - ((SELECT id FROM documents WHERE slug = 'spam'), (SELECT id FROM users WHERE login = 'banny'), 'spam'), - ((SELECT id FROM documents WHERE slug = 'team-notebook'), (SELECT id FROM users WHERE login = 'observablehq'), 'team-notebook'), - ((SELECT id FROM documents WHERE slug = 'hello-fork'), (SELECT id FROM users WHERE login = 'tmcw'), 'hello-fork'), - ((SELECT id FROM documents WHERE slug = 'trivial-fork'), (SELECT id FROM users WHERE login = 'example'), 'trivial-fork'); - -UPDATE document_thumbnails - SET assigned = TRUE; - -UPDATE document_thumbnails - SET hash = 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855' - WHERE id IN (SELECT id FROM documents WHERE slug IN ('hello-world', 'another-test')); - -INSERT INTO collections(slug, title, description, update_time, user_id, type) VALUES - ('examples', 'Examples', 'A collection for tests', '2017-11-23 06:00', (SELECT id FROM users WHERE login = 'observablehq'), 'public'), - ('kittens', 'Kittens', 'Like cats, but cute', '2017-11-23 06:01', (SELECT id FROM users WHERE login = 'observablehq'), 'public'), - ('private-kittens', 'Private Kittens', 'Like cats, but cute, and also private', '2017-11-23 06:10', (SELECT id FROM users WHERE login = 'observablehq'), 'private'), - ('empty', 'Empty', 'An empty collection', '2017-11-23 06:03', (SELECT id FROM users WHERE login = 'observablehq'), 'public'), - ('pizza', 'Pizzas', 'Everything is pizza', '2017-11-23 06:04', (SELECT id FROM users WHERE login = 'observablehq'), 'public'); - -INSERT INTO collection_documents(id, document_id, update_time) VALUES - ((SELECT id FROM collections WHERE slug = 'examples'), (SELECT id FROM documents WHERE slug = 'hello-world'), '2017-10-11 01:01'), - ((SELECT id FROM collections WHERE slug = 'examples'), (SELECT id FROM documents WHERE slug = 'another-test'), '2017-10-11 01:02'), - ((SELECT id FROM collections WHERE slug = 'examples'), (SELECT id FROM documents WHERE slug = 'hello-tom'), '2017-10-11 01:03'), - ((SELECT id FROM collections WHERE slug = 'examples'), (SELECT id FROM documents WHERE slug = 'trash-new'), '2017-10-11 01:04'), - ((SELECT id FROM collections WHERE slug = 'examples'), (SELECT id FROM documents WHERE slug = 'team-notebook'), '2017-10-11 01:05'), - ((SELECT id FROM collections WHERE slug = 'kittens'), (SELECT id FROM documents WHERE slug = 'hello-world'), '2017-10-11 01:06'), - ((SELECT id FROM collections WHERE slug = 'kittens'), (SELECT id FROM documents WHERE slug = 'another-test'), '2017-10-11 01:07'), - ((SELECT id FROM collections WHERE slug = 'kittens'), (SELECT id FROM documents WHERE slug = 'hello-tom'), '2017-10-11 01:08'), - ((SELECT id FROM collections WHERE slug = 'private-kittens'), (SELECT id FROM documents WHERE slug = 'hello-tom'), '2017-10-11 01:08'), - ((SELECT id FROM collections WHERE slug = 'private-kittens'), (SELECT id FROM documents WHERE slug IS NULL AND user_id = (SELECT id FROM users WHERE login = 'observablehq')), '2017-10-11 01:08'), - ((SELECT id FROM collections WHERE slug = 'private-kittens'), (SELECT id FROM documents WHERE slug = 'spam'), '2017-10-11 01:08'); diff --git a/data/schema.sql b/data/schema.sql deleted file mode 100644 index ca8874c..0000000 --- a/data/schema.sql +++ /dev/null @@ -1,1688 +0,0 @@ --- --- PostgreSQL database dump --- - --- Dumped from database version 9.6.12 --- Dumped by pg_dump version 9.6.12 - -SET statement_timeout = 0; -SET lock_timeout = 0; -SET idle_in_transaction_session_timeout = 0; -SET client_encoding = 'UTF8'; -SET standard_conforming_strings = on; -SELECT pg_catalog.set_config('search_path', '', false); -SET check_function_bodies = false; -SET client_min_messages = warning; -SET row_security = off; - --- --- Name: plpgsql; Type: EXTENSION; Schema: -; Owner: - --- - -CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog; - - --- --- Name: EXTENSION plpgsql; Type: COMMENT; Schema: -; Owner: - --- - -COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language'; - - --- --- Name: pg_stat_statements; Type: EXTENSION; Schema: -; Owner: - --- - -CREATE EXTENSION IF NOT EXISTS pg_stat_statements WITH SCHEMA public; - - --- --- Name: EXTENSION pg_stat_statements; Type: COMMENT; Schema: -; Owner: - --- - -COMMENT ON EXTENSION pg_stat_statements IS 'track execution statistics of all SQL statements executed'; - - --- --- Name: pgcrypto; Type: EXTENSION; Schema: -; Owner: - --- - -CREATE EXTENSION IF NOT EXISTS pgcrypto WITH SCHEMA public; - - --- --- Name: EXTENSION pgcrypto; Type: COMMENT; Schema: -; Owner: - --- - -COMMENT ON EXTENSION pgcrypto IS 'cryptographic functions'; - - --- --- Name: collection_type; Type: TYPE; Schema: public; Owner: - --- - -CREATE TYPE public.collection_type AS ENUM ( - 'public', - 'private' -); - - --- --- Name: data_connector_type; Type: TYPE; Schema: public; Owner: - --- - -CREATE TYPE public.data_connector_type AS ENUM ( - 'mysql', - 'postgres' -); - - --- --- Name: document_event_type; Type: TYPE; Schema: public; Owner: - --- - -CREATE TYPE public.document_event_type AS ENUM ( - 'create', - 'modify_title', - 'insert_node', - 'remove_node', - 'modify_node', - 'move_node', - 'noop', - 'pin_node', - 'unpin_node', - 'revert', - 'insert_comment', - 'modify_comment', - 'remove_comment' -); - - --- --- Name: team_role; Type: TYPE; Schema: public; Owner: - --- - -CREATE TYPE public.team_role AS ENUM ( - 'member', - 'owner', - 'viewer' -); - - --- --- Name: user_type; Type: TYPE; Schema: public; Owner: - --- - -CREATE TYPE public.user_type AS ENUM ( - 'individual', - 'team' -); - - --- --- Name: ban(character varying); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.ban(user_login character varying) RETURNS boolean - LANGUAGE plpgsql - AS $$ - BEGIN - UPDATE users - SET active = TRUE - WHERE login = user_login; - UPDATE documents - SET trashed = TRUE, trash_time = NOW() + INTERVAL '45 days' - WHERE user_id = (SELECT id FROM users WHERE login = user_login) - AND trashed = FALSE; - RETURN true; - END -$$; - - --- --- Name: cancel_document_thumbnails(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.cancel_document_thumbnails() RETURNS trigger - LANGUAGE plpgsql - AS $$ - BEGIN - DELETE FROM document_thumbnails - WHERE assigned = FALSE - AND id = NEW.id - AND version <> NEW.version - AND version NOT IN (SELECT p.version FROM document_publishes p WHERE p.id = NEW.id ORDER BY time DESC LIMIT 1) - AND version NOT IN (SELECT d.version FROM documents d WHERE d.id = NEW.id); - RETURN NEW; - END; -$$; - - --- --- Name: compute_document_version_ranges(character, integer, integer); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.compute_document_version_ranges(document_id character, start_version integer, end_version integer) RETURNS TABLE(from_version integer, to_version integer) - LANGUAGE plpgsql - AS $$ -DECLARE -revert_from_version int; -revert_to_version int; -BEGIN - to_version := end_version; - LOOP - SELECT e.version, e.node_id - INTO revert_from_version, revert_to_version - FROM document_events e - WHERE e.id = document_id - AND e.type = 'revert' - AND e.version <= to_version - AND e.version >= start_version - ORDER BY e.version DESC - LIMIT 1; - IF revert_to_version IS NULL THEN - from_version := start_version; - RETURN NEXT; - RETURN; - END IF; - IF revert_from_version < end_version THEN - from_version := revert_from_version + 1; - RETURN NEXT; - END IF; - to_version := revert_to_version; - END LOOP; -END -$$; - - --- --- Name: decrement_document_likes(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.decrement_document_likes() RETURNS trigger - LANGUAGE plpgsql - AS $$ - BEGIN - UPDATE documents - SET likes = likes - 1 - WHERE id = OLD.document_id; - RETURN OLD; - END; -$$; - - --- --- Name: disallow_document_data_connectors(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.disallow_document_data_connectors() RETURNS trigger - LANGUAGE plpgsql - AS $$ - BEGIN - DELETE FROM data_connectors_documents - WHERE document_id = NEW.id; - RETURN NEW; - END; -$$; - - --- --- Name: disallow_document_secrets(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.disallow_document_secrets() RETURNS trigger - LANGUAGE plpgsql - AS $$ - BEGIN - UPDATE documents - SET allow_secrets = FALSE - WHERE id = NEW.id - AND allow_secrets = TRUE; - RETURN NEW; - END; -$$; - - --- --- Name: global_id(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.global_id() RETURNS trigger - LANGUAGE plpgsql - AS $$ - DECLARE - id CHAR(16); - query TEXT; - found INT; - BEGIN - IF NEW.id IS NULL THEN - query := 'SELECT 1 FROM ' || quote_ident(TG_TABLE_NAME) || ' WHERE id='; - LOOP - id := encode(gen_random_bytes(8), 'hex'); - EXECUTE query || quote_literal(id) INTO found; - IF found IS NULL THEN EXIT; END IF; - END LOOP; - NEW.id = id; - END IF; - RETURN NEW; - END; -$$; - - --- --- Name: increment_document_likes(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.increment_document_likes() RETURNS trigger - LANGUAGE plpgsql - AS $$ - BEGIN - UPDATE documents - SET likes = likes + 1 - WHERE id = NEW.document_id; - RETURN NEW; - END; -$$; - - --- --- Name: index_document(character, integer); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.index_document(doc_id character, doc_version integer) RETURNS void - LANGUAGE plpgsql - AS $$ - DECLARE - is_published BOOLEAN; - BEGIN - SELECT TRUE - INTO is_published - FROM document_publishes p - JOIN documents d ON (p.id = d.id) - WHERE p.id = doc_id - AND p.version = doc_version - AND d.slug IS NOT NULL; - IF EXISTS(SELECT 1 FROM document_vectors v WHERE v.id = doc_id AND v.version = doc_version) THEN - IF is_published IS TRUE THEN - DELETE FROM document_vectors WHERE id = doc_id AND version <> doc_version AND published IS TRUE; - UPDATE document_vectors SET published = TRUE WHERE id = doc_id AND version = doc_version; - END IF; - RETURN; - END IF; - WITH RECURSIVE ancestors AS ( - (SELECT d.id, d.version AS ancestor_version, d.fork_version, d.fork_id FROM documents d WHERE d.id = doc_id) - UNION (SELECT d.id, a.fork_version AS ancestor_version, d.fork_version, d.fork_id FROM ancestors a JOIN documents d ON (d.id = a.fork_id))), - ranges AS ( - SELECT a.id, r.from_version, r.to_version FROM ancestors a - LEFT JOIN LATERAL (SELECT from_version, to_version FROM compute_document_version_ranges(a.id, COALESCE(a.fork_version, 0), LEAST(doc_version, a.ancestor_version))) r ON TRUE), - texts AS (SELECT - COALESCE(string_agg(e.new_node_value, CHR(10) || CHR(10)), '') AS value - FROM ( - WITH events AS (SELECT e.type, e.version, e.node_id, e.new_node_value - FROM ancestors a - JOIN document_events e ON (e.id = a.id) - JOIN ranges r ON (e.id = r.id AND r.from_version <= e.version AND r.to_version >= e.version) - WHERE e.type IN ('insert_node', 'remove_node', 'modify_node') - ) - SELECT e.* - FROM events e - WHERE NOT EXISTS ( - SELECT 1 - FROM events o - WHERE e.type IN ('insert_node', 'modify_node') - AND o.type IN ('remove_node', 'modify_node') - AND o.node_id = e.node_id - AND o.version > e.version - ) - ORDER BY e.version ASC - ) e) - INSERT INTO document_vectors(id, version, published, vector) - SELECT - d.id, doc_version, is_published IS TRUE, - setweight(to_tsvector('simple', COALESCE((CASE WHEN is_published THEN p.title ELSE d.title END), '')), 'A') || - setweight(to_tsvector('simple', COALESCE(u.name, '') || ' ' || COALESCE(u.login, '')), 'A') || - setweight(to_tsvector('simple', translate(t.value, '.[]{}()*/+', ' ')), 'B') AS vector - FROM texts t - JOIN documents d ON (d.id = doc_id) - JOIN users u ON (u.id = d.user_id) - LEFT JOIN document_publishes p ON (d.slug IS NOT NULL AND d.id = p.id AND p.version = doc_version) - ON CONFLICT(id, published) DO UPDATE SET version = EXCLUDED.version, vector = EXCLUDED.vector; - RETURN; - END; -$$; - - --- --- Name: index_published_document(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.index_published_document() RETURNS trigger - LANGUAGE plpgsql - AS $$ - BEGIN - PERFORM index_document(NEW.id, NEW.version); - RETURN NEW; - END; -$$; - - --- --- Name: insert_document_alias(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.insert_document_alias() RETURNS trigger - LANGUAGE plpgsql - AS $$ - BEGIN - INSERT INTO document_aliases(id, user_id, slug) - VALUES (NEW.id, NEW.user_id, NEW.slug) - ON CONFLICT DO NOTHING; - RETURN NEW; - END; -$$; - - --- --- Name: insert_document_publish_thumbnail(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.insert_document_publish_thumbnail() RETURNS trigger - LANGUAGE plpgsql - AS $$ - BEGIN - DELETE FROM document_thumbnails - WHERE assigned = FALSE - AND id = NEW.id - AND version <> NEW.version - AND version NOT IN (SELECT d.version FROM documents d WHERE d.id = NEW.id); - INSERT INTO document_thumbnails(id, version, event_time) - SELECT id, version, time - FROM document_events - WHERE id = NEW.id - AND version = NEW.version - ON CONFLICT (id, version) DO NOTHING; - RETURN NEW; - END; -$$; - - --- --- Name: notify_document_comments(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.notify_document_comments() RETURNS trigger - LANGUAGE plpgsql - AS $$ - DECLARE - subject document_comments%ROWTYPE; - deleted BOOLEAN; - BEGIN - CASE TG_OP - WHEN 'INSERT' THEN - subject = NEW; - deleted = FALSE; - WHEN 'UPDATE' THEN - subject = NEW; - deleted = FALSE; - WHEN 'DELETE' THEN - subject = OLD; - deleted = TRUE; - END CASE; - PERFORM pg_notify('document_comments', json_build_object('id', subject.document_id, 'comment_id', subject.id, 'deleted', deleted)::TEXT); - RETURN subject; - END; -$$; - - --- --- Name: notify_document_presence(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.notify_document_presence() RETURNS trigger - LANGUAGE plpgsql - AS $$ - DECLARE - subject document_presence%ROWTYPE; - BEGIN - CASE TG_OP - WHEN 'INSERT' THEN subject = NEW; - WHEN 'UPDATE' THEN subject = NEW; - WHEN 'DELETE' THEN subject = OLD; - END CASE; - PERFORM pg_notify('document_presence', json_build_object('id', subject.id, 'user_id', subject.user_id)::TEXT); - RETURN subject; - END; -$$; - - --- --- Name: notify_document_publish(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.notify_document_publish() RETURNS trigger - LANGUAGE plpgsql - AS $$ - BEGIN - PERFORM pg_notify('document_publishes', json_build_object('id', NEW.id, 'version', NEW.version)::TEXT); - RETURN NEW; - END; -$$; - - --- --- Name: title_score(text, text); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.title_score(query text, title text) RETURNS double precision - LANGUAGE plpgsql - AS $$ - DECLARE - count integer; - query_words text[] := string_to_array(regexp_replace(query, '\W', ' ', 'g'), ' '); - title_words text[] := string_to_array(regexp_replace(title, '\W', ' ', 'g'), ' '); - BEGIN - count := (SELECT COUNT(*) FROM (SELECT UNNEST(query_words) INTERSECT SELECT UNNEST(title_words)) s); - RETURN count / ARRAY_LENGTH(query_words, 1)::FLOAT; -END -$$; - - --- --- Name: unban(character varying); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.unban(user_login character varying) RETURNS boolean - LANGUAGE plpgsql - AS $$ - BEGIN - UPDATE users - SET active = FALSE - WHERE login = user_login; - UPDATE documents - SET trashed = FALSE, trash_time = NULL - WHERE user_id = (SELECT id FROM users WHERE login = user_login) - AND trash_time > NOW() + INTERVAL '30 days'; - RETURN true; - END -$$; - - --- --- Name: update_document_head(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.update_document_head() RETURNS trigger - LANGUAGE plpgsql - AS $$ - BEGIN - UPDATE documents - SET update_time = NEW.time, version = NEW.version, title = CASE - WHEN NEW.type = 'modify_title' THEN NEW.new_node_value - WHEN NEW.type = 'revert' THEN ( - WITH RECURSIVE lineage AS ( - (SELECT d.id, d.version + 1 AS version, d.fork_id, d.fork_version - FROM documents d WHERE d.id = NEW.id) - UNION (SELECT d.id, d.version, d.fork_id, d.fork_version - FROM lineage l JOIN documents d ON (d.id = l.fork_id))), - ranges AS ( - SELECT l.id, r.from_version, r.to_version FROM lineage l - LEFT JOIN LATERAL (SELECT from_version, to_version FROM compute_document_version_ranges(l.id, COALESCE(l.fork_version, 0), l.version)) r ON TRUE) - SELECT e.new_node_value - FROM document_events e - JOIN lineage l ON ( - (e.id = l.id AND l.id = NEW.id) - OR (e.id = l.fork_id AND e.version <= l.fork_version) - ) - JOIN ranges r ON (e.id = r.id AND r.from_version <= e.version AND r.to_version >= e.version) - WHERE e.type = 'modify_title' - ORDER BY e.version DESC - LIMIT 1) - ELSE title - END - WHERE id = NEW.id - AND version < NEW.version; - RETURN NEW; - END; -$$; - - --- --- Name: update_document_publish_time(); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.update_document_publish_time() RETURNS trigger - LANGUAGE plpgsql - AS $$ - BEGIN - UPDATE documents - SET publish_time = NEW.time - WHERE slug IS NULL - AND id = NEW.id; - RETURN NEW; - END; -$$; - - --- --- Name: user_is_type(character, public.user_type); Type: FUNCTION; Schema: public; Owner: - --- - -CREATE FUNCTION public.user_is_type(character, public.user_type) RETURNS boolean - LANGUAGE sql - AS $_$ - SELECT EXISTS ( - SELECT 1 FROM users WHERE id = $1 AND type = $2 - ); -$_$; - - -SET default_tablespace = ''; - -SET default_with_oids = false; - --- --- Name: collection_documents; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.collection_documents ( - id character(16) NOT NULL, - document_id character(16) NOT NULL, - update_time timestamp without time zone DEFAULT now() -); - - --- --- Name: collections; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.collections ( - id character(16) NOT NULL, - slug character varying(255) NOT NULL, - title character varying(255) NOT NULL, - description character varying(255) NOT NULL, - update_time timestamp without time zone DEFAULT now(), - chronological boolean DEFAULT false NOT NULL, - user_id character(16) NOT NULL, - pinned boolean DEFAULT false NOT NULL, - type public.collection_type DEFAULT 'private'::public.collection_type NOT NULL, - custom_thumbnail character varying(64) DEFAULT NULL::character varying -); - - --- --- Name: data_connectors; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.data_connectors ( - user_id character(16) NOT NULL, - name character varying(255) NOT NULL, - type public.data_connector_type NOT NULL, - credentials_iv bytea NOT NULL, - credentials_red bytea, - credentials_blue bytea -); - - --- --- Name: data_connectors_documents; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.data_connectors_documents ( - document_id character(16) NOT NULL, - data_connector_user_id character(16) NOT NULL, - data_connector_name character varying(255) NOT NULL -); - - --- --- Name: document_aliases; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.document_aliases ( - id character(16) NOT NULL, - user_id character(16) NOT NULL, - slug character varying(255) NOT NULL -); - - --- --- Name: document_comments; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.document_comments ( - id character(16) NOT NULL, - user_id character(16) NOT NULL, - document_id character(16) NOT NULL, - node_id integer NOT NULL, - document_version integer NOT NULL, - content text NOT NULL, - create_time timestamp without time zone DEFAULT now() NOT NULL, - update_time timestamp without time zone, - resolved boolean DEFAULT false NOT NULL -); - - --- --- Name: document_events; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.document_events ( - id character(16) NOT NULL, - version integer NOT NULL, - type public.document_event_type NOT NULL, - "time" timestamp without time zone DEFAULT now(), - node_id integer, - new_node_value text, - new_next_node_id integer, - user_id character(16), - original_document_id character(16), - original_node_id integer, - new_node_pinned boolean -); - - --- --- Name: document_merges; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.document_merges ( - from_id character(16) NOT NULL, - from_version integer NOT NULL, - to_id character(16) NOT NULL, - to_start_version integer NOT NULL, - to_end_version integer NOT NULL, - user_id character(16) NOT NULL, - "time" timestamp without time zone DEFAULT now() -); - - --- --- Name: document_presence; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.document_presence ( - id character(16) NOT NULL, - user_id character(16), - "time" timestamp without time zone DEFAULT now() NOT NULL, - client_id bigint NOT NULL -); - - --- --- Name: document_presence_client_id_seq; Type: SEQUENCE; Schema: public; Owner: - --- - -CREATE SEQUENCE public.document_presence_client_id_seq - START WITH 1 - INCREMENT BY 1 - NO MINVALUE - NO MAXVALUE - CACHE 1; - - --- --- Name: document_presence_client_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: - --- - -ALTER SEQUENCE public.document_presence_client_id_seq OWNED BY public.document_presence.client_id; - - --- --- Name: document_publishes; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.document_publishes ( - id character(16) NOT NULL, - version integer NOT NULL, - user_id character(16), - "time" timestamp without time zone DEFAULT now(), - title character varying(255) DEFAULT ''::character varying, - public boolean DEFAULT false NOT NULL -); - - --- --- Name: document_suggestions; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.document_suggestions ( - id character(16) NOT NULL, - user_id character(16) NOT NULL, - from_id character(16) NOT NULL, - to_id character(16) NOT NULL, - create_time timestamp without time zone DEFAULT now() NOT NULL, - close_time timestamp without time zone, - description character varying(255) DEFAULT ''::character varying NOT NULL, - closer_id character(16), - to_merge_end_version integer -); - - --- --- Name: document_thumbnails; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.document_thumbnails ( - id character(16) NOT NULL, - version integer NOT NULL, - hash character varying(64) DEFAULT NULL::character varying, - event_time timestamp without time zone NOT NULL, - assigned boolean DEFAULT false -); - - --- --- Name: document_vectors; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.document_vectors ( - id character(16) NOT NULL, - version integer NOT NULL, - vector tsvector NOT NULL, - published boolean DEFAULT false NOT NULL -); - - --- --- Name: documents; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.documents ( - id character(16) NOT NULL, - user_id character(16) NOT NULL, - version integer DEFAULT '-1'::integer NOT NULL, - slug character varying(255) DEFAULT NULL::character varying, - trashed boolean DEFAULT false, - title character varying(255) DEFAULT ''::character varying, - update_time timestamp without time zone DEFAULT now(), - trash_time timestamp without time zone, - publish_time timestamp without time zone, - fork_id character(16) DEFAULT NULL::bpchar, - fork_version integer, - access_key character(16) DEFAULT encode(public.gen_random_bytes(8), 'hex'::text) NOT NULL, - likes integer DEFAULT 0 NOT NULL, - allow_secrets boolean DEFAULT false NOT NULL, - custom_thumbnail character varying(64) DEFAULT NULL::character varying -); - - --- --- Name: schema_migrations; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.schema_migrations ( - name character varying(255) NOT NULL, - start_time timestamp without time zone DEFAULT now(), - end_time timestamp without time zone -); - - --- --- Name: team_invitations; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.team_invitations ( - team_id character(16) NOT NULL, - owner_id character(16) NOT NULL, - email character varying(255) NOT NULL, - create_time timestamp without time zone DEFAULT now() NOT NULL, - accept_time timestamp without time zone, - expire_time timestamp without time zone DEFAULT (now() + '3 days'::interval) NOT NULL, - id character(16) NOT NULL, - role public.team_role DEFAULT 'member'::public.team_role, - CONSTRAINT check_invitation_owner CHECK (public.user_is_type(owner_id, 'individual'::public.user_type)), - CONSTRAINT check_invitation_team CHECK (public.user_is_type(team_id, 'team'::public.user_type)) -); - - --- --- Name: team_members; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.team_members ( - team_id character(16) NOT NULL, - user_id character(16) NOT NULL, - role public.team_role DEFAULT 'member'::public.team_role NOT NULL, - CONSTRAINT check_membership_team CHECK (public.user_is_type(team_id, 'team'::public.user_type)), - CONSTRAINT check_membership_user CHECK (public.user_is_type(user_id, 'individual'::public.user_type)) -); - - --- --- Name: user_email_confirmations; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.user_email_confirmations ( - id character(16) NOT NULL, - email character varying(255) NOT NULL, - create_time timestamp without time zone DEFAULT now(), - accept_time timestamp without time zone, - user_id character(16), - expire_time timestamp without time zone DEFAULT (now() + '3 days'::interval) NOT NULL -); - - --- --- Name: user_likes; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.user_likes ( - id character(16) NOT NULL, - document_id character(16) NOT NULL, - "time" timestamp without time zone DEFAULT now() -); - - --- --- Name: user_secrets; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.user_secrets ( - id character(16) NOT NULL, - name character varying(255) NOT NULL, - iv bytea NOT NULL, - value_red bytea, - value_blue bytea -); - - --- --- Name: users; Type: TABLE; Schema: public; Owner: - --- - -CREATE TABLE public.users ( - id character(16) NOT NULL, - avatar_url character varying(255) DEFAULT NULL::character varying, - login character varying(40), - name character varying(255) DEFAULT ''::character varying NOT NULL, - create_time timestamp without time zone DEFAULT now(), - bio text DEFAULT ''::text NOT NULL, - home_url character varying(255) DEFAULT ''::character varying NOT NULL, - github_id bigint, - update_time timestamp without time zone DEFAULT now(), - email character varying(255) DEFAULT ''::character varying NOT NULL, - type public.user_type DEFAULT 'individual'::public.user_type NOT NULL, - setting_dark_mode boolean DEFAULT false NOT NULL, - stripe_customer_id character varying, - delinquent boolean DEFAULT false NOT NULL, - flag_create_team boolean DEFAULT false, - active boolean DEFAULT true, - setting_autoclose_pairs boolean DEFAULT false NOT NULL, - twitter_id bigint, - google_id numeric(50,0), - github_login character varying(40), - setting_always_on_autocomplete boolean DEFAULT true NOT NULL, - flag_data_connectors boolean DEFAULT false NOT NULL, - CONSTRAINT user_login_lower CHECK (((login)::text = lower((login)::text))) -); - - --- --- Name: document_presence client_id; Type: DEFAULT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_presence ALTER COLUMN client_id SET DEFAULT nextval('public.document_presence_client_id_seq'::regclass); - - --- --- Name: collection_documents collection_documents_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.collection_documents - ADD CONSTRAINT collection_documents_pkey PRIMARY KEY (id, document_id); - - --- --- Name: collections collections_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.collections - ADD CONSTRAINT collections_pkey PRIMARY KEY (id); - - --- --- Name: collections collections_user_slug; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.collections - ADD CONSTRAINT collections_user_slug UNIQUE (user_id, slug); - - --- --- Name: data_connectors_documents data_connectors_documents_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.data_connectors_documents - ADD CONSTRAINT data_connectors_documents_pkey PRIMARY KEY (document_id, data_connector_user_id, data_connector_name); - - --- --- Name: data_connectors data_connectors_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.data_connectors - ADD CONSTRAINT data_connectors_pkey PRIMARY KEY (user_id, name); - - --- --- Name: document_aliases document_aliases_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_aliases - ADD CONSTRAINT document_aliases_pkey PRIMARY KEY (user_id, slug); - - --- --- Name: document_comments document_comments_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_comments - ADD CONSTRAINT document_comments_pkey PRIMARY KEY (id); - - --- --- Name: document_events document_events_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_events - ADD CONSTRAINT document_events_pkey PRIMARY KEY (id, version); - - --- --- Name: document_merges document_merges_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_merges - ADD CONSTRAINT document_merges_pkey PRIMARY KEY (to_id, to_end_version); - - --- --- Name: document_presence document_presence_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_presence - ADD CONSTRAINT document_presence_pkey PRIMARY KEY (client_id); - - --- --- Name: document_publishes document_publishes_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_publishes - ADD CONSTRAINT document_publishes_pkey PRIMARY KEY (id, version); - - --- --- Name: document_suggestions document_suggestions_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_suggestions - ADD CONSTRAINT document_suggestions_pkey PRIMARY KEY (id); - - --- --- Name: document_thumbnails document_thumbnails_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_thumbnails - ADD CONSTRAINT document_thumbnails_pkey PRIMARY KEY (id, version); - - --- --- Name: documents document_user_slug; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.documents - ADD CONSTRAINT document_user_slug UNIQUE (user_id, slug); - - --- --- Name: document_vectors document_vectors_published; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_vectors - ADD CONSTRAINT document_vectors_published UNIQUE (id, published); - - --- --- Name: document_vectors document_vectors_version; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_vectors - ADD CONSTRAINT document_vectors_version PRIMARY KEY (id, version); - - --- --- Name: documents documents_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.documents - ADD CONSTRAINT documents_pkey PRIMARY KEY (id); - - --- --- Name: schema_migrations schema_migrations_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.schema_migrations - ADD CONSTRAINT schema_migrations_pkey PRIMARY KEY (name); - - --- --- Name: team_invitations team_invitations_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.team_invitations - ADD CONSTRAINT team_invitations_pkey PRIMARY KEY (id); - - --- --- Name: team_members team_members_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.team_members - ADD CONSTRAINT team_members_pkey PRIMARY KEY (team_id, user_id); - - --- --- Name: user_email_confirmations user_email_confirmations_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.user_email_confirmations - ADD CONSTRAINT user_email_confirmations_pkey PRIMARY KEY (id); - - --- --- Name: users user_github_id; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.users - ADD CONSTRAINT user_github_id UNIQUE (github_id); - - --- --- Name: users user_google_id; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.users - ADD CONSTRAINT user_google_id UNIQUE (google_id); - - --- --- Name: user_likes user_likes_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.user_likes - ADD CONSTRAINT user_likes_pkey PRIMARY KEY (id, document_id); - - --- --- Name: users user_login; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.users - ADD CONSTRAINT user_login UNIQUE (login); - - --- --- Name: user_secrets user_secrets_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.user_secrets - ADD CONSTRAINT user_secrets_pkey PRIMARY KEY (id, name); - - --- --- Name: users user_twitter_id; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.users - ADD CONSTRAINT user_twitter_id UNIQUE (twitter_id); - - --- --- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.users - ADD CONSTRAINT users_pkey PRIMARY KEY (id); - - --- --- Name: collection_documents_id_update_time; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX collection_documents_id_update_time ON public.collection_documents USING btree (id, update_time); - - --- --- Name: document_creator_index; Type: INDEX; Schema: public; Owner: - --- - -CREATE UNIQUE INDEX document_creator_index ON public.document_events USING btree (id, type) WHERE (type = 'create'::public.document_event_type); - - --- --- Name: document_event_id_time; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX document_event_id_time ON public.document_events USING btree (id, "time"); - - --- --- Name: document_events_time; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX document_events_time ON public.document_events USING btree ("time"); - - --- --- Name: document_events_user_id; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX document_events_user_id ON public.document_events USING btree (user_id); - - --- --- Name: document_publish_id_time; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX document_publish_id_time ON public.document_publishes USING btree (id, "time"); - - --- --- Name: document_publish_time; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX document_publish_time ON public.documents USING btree (publish_time) WHERE ((publish_time IS NOT NULL) AND (trashed = false)); - - --- --- Name: document_suggestions_from_id; Type: INDEX; Schema: public; Owner: - --- - -CREATE UNIQUE INDEX document_suggestions_from_id ON public.document_suggestions USING btree (from_id) WHERE (close_time IS NULL); - - --- --- Name: document_suggestions_to_id; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX document_suggestions_to_id ON public.document_suggestions USING btree (to_id) WHERE (close_time IS NULL); - - --- --- Name: document_suggestions_user_id; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX document_suggestions_user_id ON public.document_suggestions USING btree (user_id); - - --- --- Name: document_thumbnail_unassigned_event_time; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX document_thumbnail_unassigned_event_time ON public.document_thumbnails USING btree (event_time) WHERE (assigned = false); - - --- --- Name: document_user_id; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX document_user_id ON public.documents USING btree (user_id); - - --- --- Name: document_vectors_gin_index; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX document_vectors_gin_index ON public.document_vectors USING gin (vector); - - --- --- Name: documents_fork_id; Type: INDEX; Schema: public; Owner: - --- - -CREATE INDEX documents_fork_id ON public.documents USING btree (fork_id) WHERE (fork_id IS NOT NULL); - - --- --- Name: document_comments notify_document_comments; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER notify_document_comments AFTER INSERT OR DELETE OR UPDATE ON public.document_comments FOR EACH ROW EXECUTE PROCEDURE public.notify_document_comments(); - - --- --- Name: document_presence notify_document_presence; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER notify_document_presence BEFORE INSERT OR DELETE OR UPDATE ON public.document_presence FOR EACH ROW EXECUTE PROCEDURE public.notify_document_presence(); - - --- --- Name: document_thumbnails trigger_cancel_document_thumbnails; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_cancel_document_thumbnails BEFORE INSERT ON public.document_thumbnails FOR EACH ROW EXECUTE PROCEDURE public.cancel_document_thumbnails(); - - --- --- Name: collections trigger_collections_id; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_collections_id BEFORE INSERT ON public.collections FOR EACH ROW EXECUTE PROCEDURE public.global_id(); - - --- --- Name: documents trigger_document_alias; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_document_alias AFTER UPDATE ON public.documents FOR EACH ROW WHEN (((new.slug IS NOT NULL) AND (((old.slug)::text IS DISTINCT FROM (new.slug)::text) OR (old.user_id IS DISTINCT FROM new.user_id)))) EXECUTE PROCEDURE public.insert_document_alias(); - - --- --- Name: document_comments trigger_document_comments_id; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_document_comments_id BEFORE INSERT ON public.document_comments FOR EACH ROW EXECUTE PROCEDURE public.global_id(); - - --- --- Name: document_publishes trigger_document_disallow_data_connectors; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_document_disallow_data_connectors BEFORE INSERT ON public.document_publishes FOR EACH ROW EXECUTE PROCEDURE public.disallow_document_data_connectors(); - - --- --- Name: document_publishes trigger_document_disallow_secrets; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_document_disallow_secrets BEFORE INSERT ON public.document_publishes FOR EACH ROW EXECUTE PROCEDURE public.disallow_document_secrets(); - - --- --- Name: document_events trigger_document_head; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_document_head AFTER INSERT ON public.document_events FOR EACH ROW EXECUTE PROCEDURE public.update_document_head(); - - --- --- Name: documents trigger_document_id; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_document_id BEFORE INSERT ON public.documents FOR EACH ROW EXECUTE PROCEDURE public.global_id(); - - --- --- Name: document_publishes trigger_document_publish; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE CONSTRAINT TRIGGER trigger_document_publish AFTER INSERT ON public.document_publishes DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE PROCEDURE public.notify_document_publish(); - - --- --- Name: document_publishes trigger_document_publish_thumbnail; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_document_publish_thumbnail BEFORE INSERT ON public.document_publishes FOR EACH ROW EXECUTE PROCEDURE public.insert_document_publish_thumbnail(); - - --- --- Name: document_publishes trigger_document_publish_time; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_document_publish_time BEFORE INSERT ON public.document_publishes FOR EACH ROW EXECUTE PROCEDURE public.update_document_publish_time(); - - --- --- Name: document_suggestions trigger_document_suggestions_id; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_document_suggestions_id BEFORE INSERT ON public.document_suggestions FOR EACH ROW EXECUTE PROCEDURE public.global_id(); - - --- --- Name: document_publishes trigger_index_published_document; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE CONSTRAINT TRIGGER trigger_index_published_document AFTER INSERT ON public.document_publishes DEFERRABLE INITIALLY DEFERRED FOR EACH ROW EXECUTE PROCEDURE public.index_published_document(); - - --- --- Name: team_invitations trigger_team_invitation_id; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_team_invitation_id BEFORE INSERT ON public.team_invitations FOR EACH ROW EXECUTE PROCEDURE public.global_id(); - - --- --- Name: user_email_confirmations trigger_user_email_confirmations_id; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_user_email_confirmations_id BEFORE INSERT ON public.user_email_confirmations FOR EACH ROW EXECUTE PROCEDURE public.global_id(); - - --- --- Name: users trigger_user_id; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_user_id BEFORE INSERT ON public.users FOR EACH ROW EXECUTE PROCEDURE public.global_id(); - - --- --- Name: user_likes trigger_user_like_delete; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_user_like_delete AFTER DELETE ON public.user_likes FOR EACH ROW EXECUTE PROCEDURE public.decrement_document_likes(); - - --- --- Name: user_likes trigger_user_like_insert; Type: TRIGGER; Schema: public; Owner: - --- - -CREATE TRIGGER trigger_user_like_insert AFTER INSERT ON public.user_likes FOR EACH ROW EXECUTE PROCEDURE public.increment_document_likes(); - - --- --- Name: collection_documents collection_documents_document_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.collection_documents - ADD CONSTRAINT collection_documents_document_id_fkey FOREIGN KEY (document_id) REFERENCES public.documents(id); - - --- --- Name: collection_documents collection_documents_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.collection_documents - ADD CONSTRAINT collection_documents_id_fkey FOREIGN KEY (id) REFERENCES public.collections(id) ON DELETE CASCADE; - - --- --- Name: data_connectors_documents data_connectors_documents_data_connector_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.data_connectors_documents - ADD CONSTRAINT data_connectors_documents_data_connector_user_id_fkey FOREIGN KEY (data_connector_user_id) REFERENCES public.users(id); - - --- --- Name: data_connectors_documents data_connectors_documents_data_connector_user_id_fkey1; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.data_connectors_documents - ADD CONSTRAINT data_connectors_documents_data_connector_user_id_fkey1 FOREIGN KEY (data_connector_user_id, data_connector_name) REFERENCES public.data_connectors(user_id, name) ON DELETE CASCADE; - - --- --- Name: data_connectors_documents data_connectors_documents_document_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.data_connectors_documents - ADD CONSTRAINT data_connectors_documents_document_id_fkey FOREIGN KEY (document_id) REFERENCES public.documents(id) ON DELETE CASCADE; - - --- --- Name: data_connectors data_connectors_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.data_connectors - ADD CONSTRAINT data_connectors_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; - - --- --- Name: document_aliases document_aliases_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_aliases - ADD CONSTRAINT document_aliases_id_fkey FOREIGN KEY (id) REFERENCES public.documents(id); - - --- --- Name: document_aliases document_aliases_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_aliases - ADD CONSTRAINT document_aliases_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); - - --- --- Name: document_comments document_comments_document_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_comments - ADD CONSTRAINT document_comments_document_id_fkey FOREIGN KEY (document_id) REFERENCES public.documents(id); - - --- --- Name: document_comments document_comments_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_comments - ADD CONSTRAINT document_comments_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); - - --- --- Name: document_events document_events_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_events - ADD CONSTRAINT document_events_id_fkey FOREIGN KEY (id) REFERENCES public.documents(id); - - --- --- Name: document_events document_events_original_document_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_events - ADD CONSTRAINT document_events_original_document_id_fkey FOREIGN KEY (original_document_id) REFERENCES public.documents(id); - - --- --- Name: document_events document_events_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_events - ADD CONSTRAINT document_events_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); - - --- --- Name: document_merges document_merges_from_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_merges - ADD CONSTRAINT document_merges_from_id_fkey FOREIGN KEY (from_id) REFERENCES public.documents(id); - - --- --- Name: document_merges document_merges_to_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_merges - ADD CONSTRAINT document_merges_to_id_fkey FOREIGN KEY (to_id) REFERENCES public.documents(id); - - --- --- Name: document_merges document_merges_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_merges - ADD CONSTRAINT document_merges_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); - - --- --- Name: document_presence document_presence_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_presence - ADD CONSTRAINT document_presence_id_fkey FOREIGN KEY (id) REFERENCES public.documents(id); - - --- --- Name: document_presence document_presence_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_presence - ADD CONSTRAINT document_presence_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); - - --- --- Name: document_publishes document_publishes_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_publishes - ADD CONSTRAINT document_publishes_id_fkey FOREIGN KEY (id) REFERENCES public.documents(id); - - --- --- Name: document_publishes document_publishes_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_publishes - ADD CONSTRAINT document_publishes_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); - - --- --- Name: document_suggestions document_suggestions_closer_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_suggestions - ADD CONSTRAINT document_suggestions_closer_id_fkey FOREIGN KEY (closer_id) REFERENCES public.users(id); - - --- --- Name: document_suggestions document_suggestions_from_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_suggestions - ADD CONSTRAINT document_suggestions_from_id_fkey FOREIGN KEY (from_id) REFERENCES public.documents(id); - - --- --- Name: document_suggestions document_suggestions_to_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_suggestions - ADD CONSTRAINT document_suggestions_to_id_fkey FOREIGN KEY (to_id) REFERENCES public.documents(id); - - --- --- Name: document_suggestions document_suggestions_to_id_fkey1; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_suggestions - ADD CONSTRAINT document_suggestions_to_id_fkey1 FOREIGN KEY (to_id, to_merge_end_version) REFERENCES public.document_merges(to_id, to_end_version); - - --- --- Name: document_suggestions document_suggestions_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_suggestions - ADD CONSTRAINT document_suggestions_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); - - --- --- Name: document_thumbnails document_thumbnails_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_thumbnails - ADD CONSTRAINT document_thumbnails_id_fkey FOREIGN KEY (id) REFERENCES public.documents(id); - - --- --- Name: document_vectors document_vectors_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.document_vectors - ADD CONSTRAINT document_vectors_id_fkey FOREIGN KEY (id) REFERENCES public.documents(id); - - --- --- Name: documents documents_fork_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.documents - ADD CONSTRAINT documents_fork_id_fkey FOREIGN KEY (fork_id) REFERENCES public.documents(id); - - --- --- Name: documents documents_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.documents - ADD CONSTRAINT documents_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); - - --- --- Name: team_invitations team_invitations_owner_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.team_invitations - ADD CONSTRAINT team_invitations_owner_id_fkey FOREIGN KEY (owner_id) REFERENCES public.users(id); - - --- --- Name: team_invitations team_invitations_team_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.team_invitations - ADD CONSTRAINT team_invitations_team_id_fkey FOREIGN KEY (team_id) REFERENCES public.users(id); - - --- --- Name: team_members team_members_team_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.team_members - ADD CONSTRAINT team_members_team_id_fkey FOREIGN KEY (team_id) REFERENCES public.users(id); - - --- --- Name: team_members team_members_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.team_members - ADD CONSTRAINT team_members_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); - - --- --- Name: user_email_confirmations user_email_confirmations_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.user_email_confirmations - ADD CONSTRAINT user_email_confirmations_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id); - - --- --- Name: user_likes user_likes_document_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.user_likes - ADD CONSTRAINT user_likes_document_id_fkey FOREIGN KEY (document_id) REFERENCES public.documents(id); - - --- --- Name: user_likes user_likes_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.user_likes - ADD CONSTRAINT user_likes_id_fkey FOREIGN KEY (id) REFERENCES public.users(id); - - --- --- Name: user_secrets user_secrets_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: - --- - -ALTER TABLE ONLY public.user_secrets - ADD CONSTRAINT user_secrets_id_fkey FOREIGN KEY (id) REFERENCES public.users(id); - - --- --- PostgreSQL database dump complete --- - diff --git a/docker-compose.yml b/docker-compose.yml index 08da47c..b7f1de0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -38,9 +38,6 @@ services: postgres: image: postgres:13.8-alpine3.16 - volumes: - - ./data/schema.sql:/docker-entrypoint-initdb.d/00-schema.sql:ro - - ./data/data.sql:/docker-entrypoint-initdb.d/01-data.sql:ro environment: - POSTGRES_HOST_AUTH_METHOD=trust networks: From f437aa1ece3a97562383984ca9d7905e56db788d Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 1 Feb 2023 15:34:14 -0500 Subject: [PATCH 22/29] migrated snowflakes method an tests --- .env.test.js | 1 + lib/snowflake.js | 251 ++++++++++++++++++++++++++++++++++++++--------- 2 files changed, 206 insertions(+), 46 deletions(-) diff --git a/.env.test.js b/.env.test.js index 764aff0..49d6b10 100644 --- a/.env.test.js +++ b/.env.test.js @@ -1,6 +1,7 @@ export const MSSQL_CREDENTIALS = env("MSSQL_CREDENTIALS"); export const MYSQL_CREDENTIALS = env("MYSQL_CREDENTIALS"); export const POSTGRES_CREDENTIALS = env("POSTGRES_CREDENTIALS"); +export const SNOWFLAKE_CREDENTIALS = env("SNOWFLAKE_CREDENTIALS"); export const NODE_ENV = env("NODE_ENV"); function env(key, defaultValue) { diff --git a/lib/snowflake.js b/lib/snowflake.js index 5cdaae2..4e2273a 100644 --- a/lib/snowflake.js +++ b/lib/snowflake.js @@ -2,8 +2,11 @@ import {json} from "micro"; import {URL} from "url"; import JSONStream from "JSONStream"; import snowflake from "snowflake-sdk"; +import {Transform} from "stream"; import Pools from "./pools.js"; +import {validateQueryPayload} from "./validate.js"; +import {badRequest, failedCheck} from "./errors.js"; export const pools = new Pools( ({host, user, password, database, schema, warehouse, role}) => @@ -26,23 +29,23 @@ export const pools = new Pools( ) ); -export default (url) => { - url = new URL(url); - const {host, username, password, pathname, searchParams} = new URL(url); - const connection = snowflake.createConnection({ - account: host, - username, - password, - database: pathname.slice(1), - schema: searchParams.get("schema"), - warehouse: searchParams.get("warehouse"), - role: searchParams.get("role"), - }); +export default (url) => async (req, res) => { + if (req.method === "POST") { + url = new URL(url); + + const {host, username, password, pathname, searchParams} = new URL(url); - const connecting = new WeakSet(); - return async function query(req, res) { - const body = await json(req); - const {sql, params} = body; + const connection = snowflake.createConnection({ + account: host, + username, + password, + database: pathname.slice(1), + schema: searchParams.get("schema"), + warehouse: searchParams.get("warehouse"), + role: searchParams.get("role"), + }); + + const connecting = new WeakSet(); const client = await new Promise((resolve, reject) => { if (connection.isUp() || connecting.has(connection)) @@ -55,39 +58,195 @@ export default (url) => { connecting.add(connection); }); - const statement = client.execute({sqlText: sql, binds: params}); - try { - const stream = statement.streamRows(); + if (req.url === "/query") return query(req, res, client); + if (req.url === "/query-stream") return queryStream(req, res, client); + if (req.url === "/check") return check(req, res, client); + } +}; - await new Promise((resolve, reject) => { - stream - .once("end", resolve) - .on("error", reject) - .pipe(JSONStream.stringify(`{"data":[`, ",", "]")) - .pipe(res, {end: false}); - }); - } catch (error) { - if (!error.statusCode) error.statusCode = 400; - throw error; - } - - const schema = { - type: "array", - items: { - type: "object", - properties: statement - .getColumns() - .reduce( - (schema, column) => ( - (schema[column.getName()] = dataTypeSchema(column)), schema - ), - {} - ), +export async function query(req, res, client) { + const body = await json(req); + if (!validateQueryPayload(body)) throw badRequest(); + const {sql, params} = body; + const keepAlive = setInterval(() => res.write("\n"), 25e3); + + const statement = client.execute({sqlText: sql, binds: params}); + try { + let rowCount = 0; + let bytes = 0; + + const stream = statement.streamRows(); + await new Promise((resolve, reject) => { + let dateColumns = []; + stream + .on("end", resolve) + .on("error", reject) + .once("readable", () => clearInterval(keepAlive)) + .once("readable", () => { + res.write(`{"schema":${JSON.stringify(schema(statement))}`); + dateColumns = statement + .getColumns() + .filter((c) => dataTypeSchema(c).date) + .map((c) => c.getName()); + }) + .pipe( + new Transform({ + objectMode: true, + transform(chunk, encoding, cb) { + for (const c of dateColumns) + if (chunk[c] !== null) chunk[c] = new Date(chunk[c]); + cb(null, chunk); + }, + }) + ) + .pipe(JSONStream.stringify(`,"data":[`, ",", "]}")) + .on("data", (chunk) => { + bytes += chunk.length; + rowCount++; + if (rowCount % 2e3 === 0) { + req.log({ + progress: { + rows: rowCount, + fields: statement.getColumns().length, + bytes, + done: false, + }, + }); + } + }) + .pipe(res); + }); + req.log({ + progress: { + rows: rowCount, + fields: statement.getColumns().length, + bytes, + done: true, + }, + }); + } catch (error) { + if (!error.statusCode) error.statusCode = 400; + throw error; + } finally { + clearInterval(keepAlive); + } +} + +export async function queryStream(req, res, client) { + const body = await json(req); + if (!validateQueryPayload(body)) throw badRequest(); + const {sql, params} = body; + res.setHeader("Content-Type", "text/plain"); + const keepAlive = setInterval(() => res.write("\n"), 25e3); + + const statement = client.execute({sqlText: sql, binds: params}); + try { + let rowCount = 0; + let bytes = 0; + + const stream = statement.streamRows(); + await new Promise((resolve, reject) => { + let dateColumns = []; + stream + .on("end", resolve) + .on("error", reject) + .once("readable", () => clearInterval(keepAlive)) + .once("readable", () => { + res.write(JSON.stringify(schema(statement))); + res.write("\n"); + dateColumns = statement + .getColumns() + .filter((c) => dataTypeSchema(c).date) + .map((c) => c.getName()); + }) + .pipe( + new Transform({ + objectMode: true, + transform(chunk, encoding, cb) { + for (const c of dateColumns) + if (chunk[c] !== null) chunk[c] = new Date(chunk[c]); + cb(null, chunk); + }, + }) + ) + .pipe(JSONStream.stringify("", "\n", "\n")) + .on("data", (chunk) => { + bytes += chunk.length; + rowCount++; + if (rowCount % 2e3 === 0) { + req.log({ + progress: { + rows: rowCount, + fields: statement.getColumns().length, + bytes, + done: false, + }, + }); + } + }) + .pipe(res); + }); + req.log({ + progress: { + rows: rowCount, + fields: statement.getColumns().length, + bytes, + done: true, + }, + }); + } catch (error) { + if (!error.statusCode) error.statusCode = 400; + throw error; + } finally { + clearInterval(keepAlive); + } +} + +const READ_ONLY = new Set(["SELECT", "USAGE", "REFERENCE_USAGE"]); +export async function check(req, res, client) { + const [{ROLE: role}] = await new Promise((resolve, reject) => { + client.execute({ + sqlText: `SELECT CURRENT_ROLE() AS ROLE`, + complete(err, _, rows) { + err ? reject(err) : resolve(rows); + }, + }); + }); + const rows = await new Promise((resolve, reject) => { + client.execute({ + sqlText: `SHOW GRANTS TO ROLE ${role}`, + complete(err, _, rows) { + err ? reject(err) : resolve(rows); }, - }; - res.end(`,"schema":${JSON.stringify(schema)}}`); + }); + }); + + const privileges = rows.map((r) => r.privilege); + const permissive = new Set(privileges.filter((p) => !READ_ONLY.has(p))); + if (permissive.size) + throw failedCheck( + `User has too permissive privileges: ${[...permissive].join(", ")}` + ); + + return {ok: true}; +} + +function schema(statement) { + return { + type: "array", + items: { + type: "object", + properties: statement + .getColumns() + .reduce( + (schema, column) => ( + (schema[column.getName()] = dataTypeSchema(column)), schema + ), + {} + ), + }, }; -}; +} // https://github.com/snowflakedb/snowflake-connector-nodejs/blob/master/lib/connection/result/data_types.js const array = ["null", "array"], From 1e1bbb4833ef3bdd59f94af239d02a566c994837 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 1 Feb 2023 16:55:40 -0500 Subject: [PATCH 23/29] snoflake testing infra --- .env.test | 4 +++- .github/workflows/test.yml | 2 +- .gitignore | 2 ++ docker-compose.local.yml | 4 ++++ docker-compose.yml | 2 ++ 5 files changed, 12 insertions(+), 2 deletions(-) diff --git a/.env.test b/.env.test index 78d04f2..fbfcfd0 100644 --- a/.env.test +++ b/.env.test @@ -1,4 +1,6 @@ NODE_ENV=test MYSQL_CREDENTIALS=mysql://root@mysql:3306/mysql?sslMode=DISABLED MSSQL_CREDENTIALS=Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; -POSTGRES_CREDENTIALS=postgres://postgres@postgres:5432/postgres?sslmode=disable \ No newline at end of file +POSTGRES_CREDENTIALS=postgres://postgres@postgres:5432/postgres?sslmode=disable + + diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index d823299..2a010b6 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -16,7 +16,7 @@ jobs: run: echo ${GITHUB_TOKEN} | docker login -u ${GITHUB_ACTOR} --password-stdin ghcr.io env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - + SNOWFLAKE_CREDENTIALS: ${{ secrets.SNOWFLAKE_CREDENTIALS }} - name: Republish id: republish continue-on-error: true diff --git a/.gitignore b/.gitignore index b0bd3c0..932744a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,4 @@ node_modules ssl/localhost.csr + +*.secret \ No newline at end of file diff --git a/docker-compose.local.yml b/docker-compose.local.yml index 2c99138..18d2e09 100644 --- a/docker-compose.local.yml +++ b/docker-compose.local.yml @@ -1,6 +1,10 @@ version: "3.7" services: + test: + env_file: + - .env.secret + mssql: image: mcr.microsoft.com/azure-sql-edge ports: diff --git a/docker-compose.yml b/docker-compose.yml index b7f1de0..0f29f1b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -13,6 +13,8 @@ services: - postgres env_file: - .env.test + environment: + - SNOWFLAKE_CREDENTIALS networks: - db_proxy_test command: sh -c "set -o pipefail && wait-on -d 15000 -t 30000 tcp:mysql:3306 tcp:mssql:1433 tcp:postgres:5432 && node ./data/seed.mssql.js && TZ=UTC NODE_ENV=TEST node_modules/.bin/mocha --exit" From 1fc5fc3599a5616cbc80c2c66e9c1cf44699ca5a Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 1 Feb 2023 18:07:25 -0500 Subject: [PATCH 24/29] SNOWFLAKE_TEST_CREDENTIALS --- .env.test | 2 -- .env.test.js | 2 +- docker-compose.yml | 2 +- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.env.test b/.env.test index fbfcfd0..11d18ce 100644 --- a/.env.test +++ b/.env.test @@ -2,5 +2,3 @@ NODE_ENV=test MYSQL_CREDENTIALS=mysql://root@mysql:3306/mysql?sslMode=DISABLED MSSQL_CREDENTIALS=Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; POSTGRES_CREDENTIALS=postgres://postgres@postgres:5432/postgres?sslmode=disable - - diff --git a/.env.test.js b/.env.test.js index 49d6b10..6b07bf6 100644 --- a/.env.test.js +++ b/.env.test.js @@ -1,7 +1,7 @@ export const MSSQL_CREDENTIALS = env("MSSQL_CREDENTIALS"); export const MYSQL_CREDENTIALS = env("MYSQL_CREDENTIALS"); export const POSTGRES_CREDENTIALS = env("POSTGRES_CREDENTIALS"); -export const SNOWFLAKE_CREDENTIALS = env("SNOWFLAKE_CREDENTIALS"); +export const SNOWFLAKE_TEST_CREDENTIALS = env("SNOWFLAKE_TEST_CREDENTIALS"); export const NODE_ENV = env("NODE_ENV"); function env(key, defaultValue) { diff --git a/docker-compose.yml b/docker-compose.yml index 0f29f1b..8e02f04 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -14,7 +14,7 @@ services: env_file: - .env.test environment: - - SNOWFLAKE_CREDENTIALS + - SNOWFLAKE_TEST_CREDENTIALS networks: - db_proxy_test command: sh -c "set -o pipefail && wait-on -d 15000 -t 30000 tcp:mysql:3306 tcp:mssql:1433 tcp:postgres:5432 && node ./data/seed.mssql.js && TZ=UTC NODE_ENV=TEST node_modules/.bin/mocha --exit" From 1a13eea511ffb12774bd6372c9bf89efc0045323 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 1 Feb 2023 18:07:46 -0500 Subject: [PATCH 25/29] snowflake tests --- test/snowflake.test.js | 344 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 344 insertions(+) create mode 100644 test/snowflake.test.js diff --git a/test/snowflake.test.js b/test/snowflake.test.js new file mode 100644 index 0000000..4d64f48 --- /dev/null +++ b/test/snowflake.test.js @@ -0,0 +1,344 @@ +import assert from "node:assert"; +import MockReq from "mock-req"; +import MockRes from "mock-res"; +import snowflake, {pools} from "../lib/snowflake.js"; +import logger from "../middleware/logger.js"; + +import {SNOWFLAKE_TEST_CREDENTIALS} from "../.env.test.js"; +const index = logger(snowflake(SNOWFLAKE_TEST_CREDENTIALS)); + +describe("Snowflake", function () { + this.timeout(50000); + after(() => pools.end()); + + describe("when checking", () => { + it("should handle Snowflake credential check", async () => { + const req = new MockReq({method: "POST", url: "/check"}); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.match(error.message, /^User has too permissive privileges: /); + } + }); + }); + + describe("when querying", () => { + it("should handle Snowflake requests", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: ` + with foo as ( + select 1 as c1 union all select 2 as c1 + ) + select c1 + from foo + where c1 = ?`, + params: [1], + }); + const res = new MockRes(); + + await index(req, res); + + const {data, schema} = res._getJSON(); + assert.deepEqual(data, [{C1: 1}]); + assert.deepEqual(schema, { + type: "array", + items: {type: "object", properties: {C1: {type: ["null", "integer"]}}}, + }); + }); + + it("should handle Snowflake errors", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "SELECT * FROM gibberish", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal( + error.message, + "SQL compilation error:\nObject 'GIBBERISH' does not exist or not authorized." + ); + } + }); + + it("should handle Snowflake empty query", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Bad request"); + } + }); + + it("should handle Snowflake empty results", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `SELECT 1 AS c1 LIMIT 0`, + }); + const res = new MockRes(); + + await index(req, res); + + const {data, schema} = res._getJSON(); + assert.deepEqual(data, []); + assert.deepEqual(schema, { + type: "array", + items: {type: "object", properties: {C1: {type: ["null", "integer"]}}}, + }); + }); + }); + + describe("when streaming", () => { + it("should handle Snowflake stream requests", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: ` + with foo as ( + select 1 as c1 union all select 2 as c1 + ) + select c1 + from foo + where c1 = ?`, + params: [1], + }); + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + + assert.equal( + response, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {C1: {type: ["null", "integer"]}}, + }, + }) + + "\n" + + JSON.stringify({C1: 1}) + + "\n" + ); + }); + + it("should handle Snowflake stream errors", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT * FROM users", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal( + error.message, + "SQL compilation error:\nObject 'USERS' does not exist or not authorized." + ); + } + }); + + it("should handle Snowflake stream empty query", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Bad request"); + } + }); + + it("should handle Snowflake stream empty results", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT 1 AS c1 LIMIT 0", + }); + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + + assert.equal( + response, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {C1: {type: ["null", "integer"]}}, + }, + }) + "\n\n" + ); + }); + }); + + describe("when inferring the dataTypeSchema", () => { + it("should handle Snowflake simple types", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `select + 1 as c1, + 3.14 as c2, + to_binary('DEADBEEF') as c3, + 'hello' as c4, + TIMESTAMP '2019-01-01' as c5, + true as c6, + to_object(parse_json('{"a": 1}')) as c7 + `, + }); + const res = new MockRes(); + + await index(req, res); + const {data, schema} = res._getJSON(); + assert.deepEqual(data, [ + { + C1: 1, + C2: 3.14, + C3: {type: "Buffer", data: [222, 173, 190, 239]}, + C4: "hello", + C5: "2019-01-01T00:00:00.000Z", + C6: true, + C7: {a: 1}, + }, + ]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: { + C1: {type: ["null", "integer"]}, + C2: {type: ["null", "number"]}, + C3: {type: ["null", "object"], buffer: true}, + C4: {type: ["null", "string"]}, + C5: {type: ["null", "string"], date: true}, + C6: {type: ["null", "boolean"]}, + C7: {type: ["null", "object"]}, + }, + }, + }); + }); + + it("should handle Snowflake date, time, time zones", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `select + TO_DATE('2020-01-01') as date, + TO_TIMESTAMP_NTZ('2020-01-01 01:23:45') as datetime, -- timestamp_ntz + TO_TIME('01:23:45') as time, + TO_TIMESTAMP('2020-01-01 01:23:45') as timestamp, -- timestamp_ntz + TO_TIMESTAMP_LTZ('2020-01-01 01:23:45') as timestamp_ltz, + TO_TIMESTAMP_NTZ('2020-01-01 01:23:45') as timestamp_ntz, + TO_TIMESTAMP_TZ('2020-01-01 01:23:45') as timestamp_tz, + TO_DATE(null) as null_date + `, + }); + const res = new MockRes(); + + await index(req, res); + const {data, schema} = res._getJSON(); + + assert.deepEqual(data, [ + { + DATE: "2020-01-01T00:00:00.000Z", + DATETIME: "2020-01-01T01:23:45.000Z", + TIME: "01:23:45", + TIMESTAMP: "2020-01-01T01:23:45.000Z", + TIMESTAMP_LTZ: "2020-01-01T09:23:45.000Z", + TIMESTAMP_NTZ: "2020-01-01T01:23:45.000Z", + TIMESTAMP_TZ: "2020-01-01T09:23:45.000Z", + NULL_DATE: null, + }, + ]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: { + DATE: {type: ["null", "string"], date: true}, + DATETIME: {type: ["null", "string"], date: true}, + TIME: {type: ["null", "string"]}, + TIMESTAMP: {type: ["null", "string"], date: true}, + TIMESTAMP_LTZ: {type: ["null", "string"], date: true}, + TIMESTAMP_NTZ: {type: ["null", "string"], date: true}, + TIMESTAMP_TZ: {type: ["null", "string"], date: true}, + NULL_DATE: {type: ["null", "string"], date: true}, + }, + }, + }); + }); + }); + + describe("when connecting to Snowflake", () => { + it("shouldn't attempt concurrent connections", async () => { + // Ensure a cold connection state + pools.del(SNOWFLAKE_TEST_CREDENTIALS); + + const req1 = new MockReq({method: "POST", url: "/query"}).end({ + sql: "select 1", + }); + const res1 = new MockRes(); + const req2 = new MockReq({method: "POST", url: "/query"}).end({ + sql: "select 2", + }); + const res2 = new MockRes(); + + await Promise.all([index(req1, res1), index(req2, res2)]); + + const {data: data1, schema: schema1} = res1._getJSON(); + assert.deepEqual(data1, [{1: 1}]); + assert.deepEqual(schema1, { + type: "array", + items: { + type: "object", + properties: { + 1: {type: ["null", "integer"]}, + }, + }, + }); + + const {data: data2, schema: schema2} = res2._getJSON(); + assert.deepEqual(data2, [{2: 2}]); + assert.deepEqual(schema2, { + type: "array", + items: { + type: "object", + properties: { + 2: {type: ["null", "integer"]}, + }, + }, + }); + }); + + it("should recreates connection on connect error (slow)", async () => { + const badCredentials = snowflake("snowflake://hi@hi/hi"); + const req = new MockReq({method: "POST", url: "/check"}); + const res = new MockRes(); + + try { + await badCredentials(req, res); + } catch (error) { + assert.equal( + error.message, + "Request to Snowflake failed.", + "First failure" + ); + } + try { + await badCredentials(req, res); + } catch (error) { + assert.equal( + error.message, + "Request to Snowflake failed.", + "Second failure is identical" + ); + } + }); + }); +}); From bc86874fe04a3488eb4099e16d7ed550920903a9 Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 1 Feb 2023 18:33:17 -0500 Subject: [PATCH 26/29] consistent naming convention for test credentials env variables --- .env.test | 6 +++--- .env.test.js | 6 +++--- data/seed.mssql.js | 4 ++-- test/mssql.test.js | 4 ++-- test/mysql.test.js | 4 ++-- test/postgres.test.js | 4 ++-- 6 files changed, 14 insertions(+), 14 deletions(-) diff --git a/.env.test b/.env.test index 11d18ce..866a9a5 100644 --- a/.env.test +++ b/.env.test @@ -1,4 +1,4 @@ NODE_ENV=test -MYSQL_CREDENTIALS=mysql://root@mysql:3306/mysql?sslMode=DISABLED -MSSQL_CREDENTIALS=Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; -POSTGRES_CREDENTIALS=postgres://postgres@postgres:5432/postgres?sslmode=disable +MYSQL_TEST_CREDENTIALS=mysql://root@mysql:3306/mysql?sslMode=DISABLED +MSSQL_TEST_CREDENTIALS=Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; +POSTGRES_TEST_CREDENTIALS=postgres://postgres@postgres:5432/postgres?sslmode=disable diff --git a/.env.test.js b/.env.test.js index 6b07bf6..d3ccbeb 100644 --- a/.env.test.js +++ b/.env.test.js @@ -1,6 +1,6 @@ -export const MSSQL_CREDENTIALS = env("MSSQL_CREDENTIALS"); -export const MYSQL_CREDENTIALS = env("MYSQL_CREDENTIALS"); -export const POSTGRES_CREDENTIALS = env("POSTGRES_CREDENTIALS"); +export const MSSQL_TEST_CREDENTIALS = env("MSSQL_TEST_CREDENTIALS"); +export const MYSQL_TEST_CREDENTIALS = env("MYSQL_TEST_CREDENTIALS"); +export const POSTGRES_TEST_CREDENTIALS = env("POSTGRES_TEST_CREDENTIALS"); export const SNOWFLAKE_TEST_CREDENTIALS = env("SNOWFLAKE_TEST_CREDENTIALS"); export const NODE_ENV = env("NODE_ENV"); diff --git a/data/seed.mssql.js b/data/seed.mssql.js index c12f934..3e6a598 100644 --- a/data/seed.mssql.js +++ b/data/seed.mssql.js @@ -1,7 +1,7 @@ import mssql from "mssql"; -import {MSSQL_CREDENTIALS} from "../.env.test.js"; +import {MSSQL_TEST_CREDENTIALS} from "../.env.test.js"; -const credentials = MSSQL_CREDENTIALS; +const credentials = MSSQL_TEST_CREDENTIALS; const seed = async () => { await mssql.connect(credentials); diff --git a/test/mssql.test.js b/test/mssql.test.js index 069a4c4..5447977 100644 --- a/test/mssql.test.js +++ b/test/mssql.test.js @@ -2,10 +2,10 @@ import assert from "node:assert"; import MockReq from "mock-req"; import MockRes from "mock-res"; -import {MSSQL_CREDENTIALS} from "../.env.test.js"; +import {MSSQL_TEST_CREDENTIALS} from "../.env.test.js"; import mssql, {dataTypeSchema, pools} from "../lib/mssql.js"; -const credentials = MSSQL_CREDENTIALS; +const credentials = MSSQL_TEST_CREDENTIALS; describe("SQL Server", () => { after(() => pools.end()); diff --git a/test/mysql.test.js b/test/mysql.test.js index 57a5b9e..e4b2d57 100644 --- a/test/mysql.test.js +++ b/test/mysql.test.js @@ -4,8 +4,8 @@ import MockRes from "mock-res"; import logger from "../middleware/logger.js"; import mysql, {pools} from "../lib/mysql.js"; -import {MYSQL_CREDENTIALS} from "../.env.test.js"; -const index = logger(mysql(MYSQL_CREDENTIALS)); +import {MYSQL_TEST_CREDENTIALS} from "../.env.test.js"; +const index = logger(mysql(MYSQL_TEST_CREDENTIALS)); describe("MySQL", () => { after(() => pools.end()); diff --git a/test/postgres.test.js b/test/postgres.test.js index 51e5087..e51e7c3 100644 --- a/test/postgres.test.js +++ b/test/postgres.test.js @@ -4,8 +4,8 @@ import MockRes from "mock-res"; import logger from "../middleware/logger.js"; import pg, {pools} from "../lib/postgres.js"; -import {POSTGRES_CREDENTIALS} from "../.env.test.js"; -const index = logger(pg(POSTGRES_CREDENTIALS)); +import {POSTGRES_TEST_CREDENTIALS} from "../.env.test.js"; +const index = logger(pg(POSTGRES_TEST_CREDENTIALS)); describe("postgreSQL", () => { after(() => pools.end()); From 83dfa5c2f2b4f501f4355cc386afb71db2e0771f Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 1 Feb 2023 18:54:31 -0500 Subject: [PATCH 27/29] fix snowflage credentials name --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2a010b6..b8ac2f3 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -16,7 +16,7 @@ jobs: run: echo ${GITHUB_TOKEN} | docker login -u ${GITHUB_ACTOR} --password-stdin ghcr.io env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SNOWFLAKE_CREDENTIALS: ${{ secrets.SNOWFLAKE_CREDENTIALS }} + SNOWFLAKE_TEST_CREDENTIALS: ${{ secrets.SNOWFLAKE_TEST_CREDENTIALS }} - name: Republish id: republish continue-on-error: true From 9dc999a1560940496d494a58abfbad148b09a48c Mon Sep 17 00:00:00 2001 From: Sylvestre Gug Date: Wed, 1 Feb 2023 18:58:52 -0500 Subject: [PATCH 28/29] env variable at the right place --- .github/workflows/test.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b8ac2f3..339a411 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -16,7 +16,6 @@ jobs: run: echo ${GITHUB_TOKEN} | docker login -u ${GITHUB_ACTOR} --password-stdin ghcr.io env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - SNOWFLAKE_TEST_CREDENTIALS: ${{ secrets.SNOWFLAKE_TEST_CREDENTIALS }} - name: Republish id: republish continue-on-error: true @@ -35,6 +34,8 @@ jobs: - name: Test if: ${{ steps.republish.outcome != 'success' }} run: docker-compose run test + env: + SNOWFLAKE_TEST_CREDENTIALS: ${{ secrets.SNOWFLAKE_TEST_CREDENTIALS }} - name: Container logs if: failure() run: docker-compose logs --no-color --timestamps \ No newline at end of file From 84c7036a3cd426951313ee4f20dc68910ca8b878 Mon Sep 17 00:00:00 2001 From: Wiltse Carpenter Date: Wed, 8 Nov 2023 15:09:22 -0800 Subject: [PATCH 29/29] Merge a few minor changes --- lib/oracle.js | 4 ++-- lib/snowflake.js | 12 +++++++++--- lib/validate.js | 2 +- 3 files changed, 12 insertions(+), 6 deletions(-) diff --git a/lib/oracle.js b/lib/oracle.js index 477dd43..967b68b 100644 --- a/lib/oracle.js +++ b/lib/oracle.js @@ -238,8 +238,8 @@ export default async ({url, username, password}) => { // We do not want to import the oracledb library until we are sure that the user is looking to use Oracle. // Installing the oracledb library is a pain, so we want to avoid it if possible. const config = { - username: username, - password: password, + username, + password, connectionString: decodeURI(url), }; diff --git a/lib/snowflake.js b/lib/snowflake.js index 4e2273a..8c787bb 100644 --- a/lib/snowflake.js +++ b/lib/snowflake.js @@ -6,7 +6,7 @@ import {Transform} from "stream"; import Pools from "./pools.js"; import {validateQueryPayload} from "./validate.js"; -import {badRequest, failedCheck} from "./errors.js"; +import {badRequest, failedCheck, notFound} from "./errors.js"; export const pools = new Pools( ({host, user, password, database, schema, warehouse, role}) => @@ -57,11 +57,12 @@ export default (url) => async (req, res) => { }); connecting.add(connection); }); - if (req.url === "/query") return query(req, res, client); if (req.url === "/query-stream") return queryStream(req, res, client); if (req.url === "/check") return check(req, res, client); } + + throw notFound(); }; export async function query(req, res, client) { @@ -263,7 +264,12 @@ function dataTypeSchema(column) { return {type: boolean}; case "fixed": case "real": - return {type: column.getScale() ? number : integer}; + return { + type: + column.getScale() === null || column.getScale() > 0 + ? number + : integer, + }; case "date": case "timestamp_ltz": case "timestamp_ntz": diff --git a/lib/validate.js b/lib/validate.js index 35cf042..75d2892 100644 --- a/lib/validate.js +++ b/lib/validate.js @@ -8,7 +8,7 @@ export const validateQueryPayload = ajv.compile({ required: ["sql"], properties: { sql: {type: "string", minLength: 1, maxLength: 32 * 1000}, - params: {anyOf: [{type: ["object"]}, {type: ["array"]}]} + params: {anyOf: [{type: ["object"]}, {type: ["array"]}]}, }, }); export const validateDescribeColumnsPayload = ajv.compile({