diff --git a/.env.test b/.env.test index 8929558..866a9a5 100644 --- a/.env.test +++ b/.env.test @@ -1,3 +1,4 @@ NODE_ENV=test -MSSQL_CREDENTIALS:Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; -MSSQL_CREDENTIALS_READ_ONLY:Server=mssql,1433;Database=master;User Id=reader;Password=re@derP@ssw0rd;trustServerCertificate=true; \ No newline at end of file +MYSQL_TEST_CREDENTIALS=mysql://root@mysql:3306/mysql?sslMode=DISABLED +MSSQL_TEST_CREDENTIALS=Server=mssql,1433;Database=master;User Id=sa;Password=Pass@word;trustServerCertificate=true; +POSTGRES_TEST_CREDENTIALS=postgres://postgres@postgres:5432/postgres?sslmode=disable diff --git a/.env.test.js b/.env.test.js index f096681..d3ccbeb 100644 --- a/.env.test.js +++ b/.env.test.js @@ -1,4 +1,7 @@ -export const MSSQL_CREDENTIALS = env("MSSQL_CREDENTIALS"); +export const MSSQL_TEST_CREDENTIALS = env("MSSQL_TEST_CREDENTIALS"); +export const MYSQL_TEST_CREDENTIALS = env("MYSQL_TEST_CREDENTIALS"); +export const POSTGRES_TEST_CREDENTIALS = env("POSTGRES_TEST_CREDENTIALS"); +export const SNOWFLAKE_TEST_CREDENTIALS = env("SNOWFLAKE_TEST_CREDENTIALS"); export const NODE_ENV = env("NODE_ENV"); function env(key, defaultValue) { diff --git a/.eslintrc.json b/.eslintrc.json index 117a86c..d7761a3 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -1,7 +1,7 @@ { "parserOptions": { "sourceType": "module", - "ecmaVersion": 2018 + "ecmaVersion": 2022 }, "env": { "node": true, @@ -18,7 +18,7 @@ { "files": ["*.test.js"], "env": { - "jest": true + "mocha": true } } ], diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..339a411 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,41 @@ +name: Test +on: push + +jobs: + test: + runs-on: ubuntu-20.04 + defaults: + run: + working-directory: . + env: + DOCKER_PACKAGE: ghcr.io/${{ github.repository }}/database-proxy_test + + steps: + - uses: actions/checkout@v3 + - name: Docker login + run: echo ${GITHUB_TOKEN} | docker login -u ${GITHUB_ACTOR} --password-stdin ghcr.io + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Republish + id: republish + continue-on-error: true + if: ${{ needs.Changes.outputs.connector == 'false' }} + run: | + ../.github/retry docker pull ${DOCKER_PACKAGE}:${{ github.event.before }} + docker tag ${DOCKER_PACKAGE}:${{ github.event.before }} ${DOCKER_PACKAGE}:${GITHUB_SHA} + ../.github/retry docker push ${DOCKER_PACKAGE}:${GITHUB_SHA} + + - name: Build + if: ${{ steps.republish.outcome != 'success' }} + run: docker-compose build + - name: Lint + if: ${{ steps.republish.outcome != 'success' }} + run: docker-compose run lint + - name: Test + if: ${{ steps.republish.outcome != 'success' }} + run: docker-compose run test + env: + SNOWFLAKE_TEST_CREDENTIALS: ${{ secrets.SNOWFLAKE_TEST_CREDENTIALS }} + - name: Container logs + if: failure() + run: docker-compose logs --no-color --timestamps \ No newline at end of file diff --git a/.gitignore b/.gitignore index b0bd3c0..932744a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,4 @@ node_modules ssl/localhost.csr + +*.secret \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 0af809b..0d28751 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,7 @@ FROM node:18.12.1-alpine +RUN apk update && apk --no-cache add git + RUN mkdir /app WORKDIR /app diff --git a/data/seed.mssql.js b/data/seed.mssql.js index c12f934..3e6a598 100644 --- a/data/seed.mssql.js +++ b/data/seed.mssql.js @@ -1,7 +1,7 @@ import mssql from "mssql"; -import {MSSQL_CREDENTIALS} from "../.env.test.js"; +import {MSSQL_TEST_CREDENTIALS} from "../.env.test.js"; -const credentials = MSSQL_CREDENTIALS; +const credentials = MSSQL_TEST_CREDENTIALS; const seed = async () => { await mssql.connect(credentials); diff --git a/docker-compose.local.yml b/docker-compose.local.yml index 341dbf2..18d2e09 100644 --- a/docker-compose.local.yml +++ b/docker-compose.local.yml @@ -1,7 +1,19 @@ version: "3.7" services: + test: + env_file: + - .env.secret + mssql: image: mcr.microsoft.com/azure-sql-edge - expose: - - "1433" + ports: + - "1433:1433" + + mysql: + ports: + - "3306:3306" + + postgres: + ports: + - "5432:5432" \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml index 9e87eb9..8e02f04 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,15 +1,23 @@ version: "3.7" services: + lint: + build: . + command: eslint . + test: build: . depends_on: - mssql + - mysql + - postgres env_file: - .env.test + environment: + - SNOWFLAKE_TEST_CREDENTIALS networks: - db_proxy_test - command: sh -c "set -o pipefail && wait-on -d 10000 -t 30000 tcp:mssql:1433 && node ./data/seed.mssql.js && TZ=UTC NODE_ENV=TEST node_modules/.bin/mocha" + command: sh -c "set -o pipefail && wait-on -d 15000 -t 30000 tcp:mysql:3306 tcp:mssql:1433 tcp:postgres:5432 && node ./data/seed.mssql.js && TZ=UTC NODE_ENV=TEST node_modules/.bin/mocha --exit" mssql: image: mcr.microsoft.com/mssql/server:2019-latest @@ -20,8 +28,20 @@ services: - MSSQL_SLEEP=7 volumes: - ./data/AdventureWorks2019.bak:/var/opt/mssql/backup/test.bak - ports: - - "1433:1433" + networks: + - db_proxy_test + + mysql: + image: mariadb:10.6.4 + environment: + - MARIADB_ALLOW_EMPTY_ROOT_PASSWORD=yes + networks: + - db_proxy_test + + postgres: + image: postgres:13.8-alpine3.16 + environment: + - POSTGRES_HOST_AUTH_METHOD=trust networks: - db_proxy_test diff --git a/lib/databricks.js b/lib/databricks.js index 073e169..8efb1c5 100644 --- a/lib/databricks.js +++ b/lib/databricks.js @@ -253,14 +253,14 @@ export async function queryStream(req, res, connection) { res.write(`${JSON.stringify(responseSchema)}`); res.write("\n"); - await new Promise(async (resolve, reject) => { + await new Promise((resolve, reject) => { const stream = new Readable.from(rows); stream.once("data", () => { clearInterval(keepAlive); }); - stream.on("close", (error) => { + stream.on("close", () => { resolve(); stream.destroy(); }); @@ -345,8 +345,6 @@ export async function check(req, res, connection) { }); return {ok: true}; - } catch (e) { - throw e; } finally { if (connection) { try { diff --git a/lib/mssql.js b/lib/mssql.js index ce2c4f1..4dc10b2 100644 --- a/lib/mssql.js +++ b/lib/mssql.js @@ -5,6 +5,7 @@ import {Transform} from "stream"; import {failedCheck, badRequest, notImplemented} from "./errors.js"; import {validateQueryPayload} from "./validate.js"; +import Pools from "./pools.js"; const TYPES = mssql.TYPES; const READ_ONLY = new Set(["SELECT", "USAGE", "CONNECT"]); @@ -124,8 +125,6 @@ export async function check(req, res, pool) { return {ok: true}; } -export const ConnectionPool = mssql.ConnectionPool; - export default (credentials) => { const pool = new mssql.ConnectionPool(credentials); @@ -144,6 +143,14 @@ export default (credentials) => { }; }; +export const pools = new Pools((credentials) => + Object.defineProperty(new mssql.ConnectionPool(credentials), "end", { + value() { + this.close(); + }, + }) +); + // See https://github.com/tediousjs/node-mssql/blob/66587d97c9ce21bffba8ca360c72a540f2bc47a6/lib/datatypes.js#L6 const boolean = ["null", "boolean"], integer = ["null", "integer"], diff --git a/lib/mysql.js b/lib/mysql.js index bc719b4..aa9609b 100644 --- a/lib/mysql.js +++ b/lib/mysql.js @@ -1,11 +1,26 @@ import JSONStream from "JSONStream"; import {json} from "micro"; -import mysql, {createConnection} from "mysql2"; +import mysql, {createConnection, createPool} from "mysql2"; import {failedCheck} from "./errors.js"; import {notFound} from "./errors.js"; +import Pools from "./pools.js"; const {Types, ConnectionConfig} = mysql; +export const pools = new Pools(({host, port, database, user, password, ssl}) => + createPool({ + host, + port, + database, + user, + password, + ssl: ssl === "required" ? {} : false, + connectTimeout: 25e3, + connectionLimit: 30, + decimalNumbers: true, + }) +); + export async function query(req, res, pool) { const {sql, params} = await json(req); const keepAlive = setInterval(() => res.write("\n"), 25e3); diff --git a/lib/oracle.js b/lib/oracle.js index 6acee3c..967b68b 100644 --- a/lib/oracle.js +++ b/lib/oracle.js @@ -4,6 +4,7 @@ import {Transform} from "stream"; import {badRequest, failedCheck} from "./errors.js"; import {validateQueryPayload} from "./validate.js"; +import Pools from "./pools.js"; const READ_ONLY = new Set(["SELECT", "USAGE", "CONNECT"]); export class OracleSingleton { @@ -204,8 +205,6 @@ export async function check(req, res, pool) { ); return {ok: true}; - } catch (e) { - throw e; } finally { if (connection) { try { @@ -217,13 +216,30 @@ export async function check(req, res, pool) { } } +export const pools = new Pools(async (credentials) => { + const oracledb = await OracleSingleton.getInstance(); + credentials.connectionString = decodeURI(credentials.connectionString); + const pool = await oracledb.createPool(credentials); + + Object.defineProperty(pool, "end", { + value() { + // We must ensure there is no query still running before we close the pool. + if (this._connectionsOut === 0) { + this.close(); + } + }, + }); + + return pool; +}); + export default async ({url, username, password}) => { OracleSingleton.initialize(); // We do not want to import the oracledb library until we are sure that the user is looking to use Oracle. // Installing the oracledb library is a pain, so we want to avoid it if possible. const config = { - username: username, - password: password, + username, + password, connectionString: decodeURI(url), }; diff --git a/lib/pools.js b/lib/pools.js new file mode 100644 index 0000000..e616f60 --- /dev/null +++ b/lib/pools.js @@ -0,0 +1,55 @@ +import LRU from "lru-cache"; +import * as Sentry from "@sentry/node"; + +const maxAge = 1000 * 60 * 10; // 10m + +export default class Pools { + constructor(createPool) { + this.createPool = createPool; + this.cache = new LRU({ + max: 100, + maxAge, + updateAgeOnGet: true, + dispose(_key, pool) { + pool.end(); + }, + }); + + let loop; + (loop = () => { + this.cache.prune(); + this.timeout = setTimeout(loop, maxAge / 2); + })(); + } + + async get(credentials) { + const key = JSON.stringify(credentials); + if (this.cache.has(key)) return this.cache.get(key); + const pool = await this.createPool(credentials); + + pool.on("error", (error) => { + // We need to attach a handler otherwise the process could exit, but we + // just don't care about these errors because the client will get cleaned + // up already. For debugging purposes, we'll add a Sentry breadcrumb if + // something else errors more loudly. + Sentry.addBreadcrumb({ + message: error.message, + category: "pool", + level: "error", + data: error, + }); + }); + + this.cache.set(key, pool); + return pool; + } + + del(credentials) { + this.cache.del(JSON.stringify(credentials)); + } + + end() { + if (this.timeout) clearTimeout(this.timeout); + for (const pool of this.cache.values()) pool.end(); + } +} diff --git a/lib/postgres.js b/lib/postgres.js index c6c9ce8..df93622 100644 --- a/lib/postgres.js +++ b/lib/postgres.js @@ -2,46 +2,196 @@ import {json} from "micro"; import pg from "pg"; import QueryStream from "pg-query-stream"; import JSONStream from "JSONStream"; +import {validateQueryPayload} from "./validate.js"; +import {badRequest, failedCheck, notFound} from "./errors.js"; + +import Pools from "./pools.js"; const {Pool} = pg; -export default (url) => { - const pool = new Pool({connectionString: url}); +export const pools = new Pools( + ({host, port, database, user, password, ssl}) => + new pg.Pool({ + host, + port, + database, + user, + password, + ssl: ssl === "required" ? {rejectUnauthorized: false} : false, + connectionTimeoutMillis: 25e3, + statement_timeout: 240e3, + max: 30, + }) +); + +export default (url) => async (req, res) => { + const connection = new Pool({connectionString: url}); + + if (req.method === "POST") { + if (req.url === "/query") return query(req, res, connection); + if (req.url === "/query-stream") return queryStream(req, res, connection); + if (req.url === "/check") return check(req, res, connection); + } + + throw notFound(); +}; - return async function query(req, res) { - const {sql, params} = await json(req); - const client = await pool.connect(); +export async function query(req, res, pool) { + const body = await json(req); + if (!validateQueryPayload(body)) throw badRequest(); + const {sql, params} = body; + const client = await pool.connect(); + const keepAlive = setInterval(() => res.write("\n"), 25e3); + try { + let rowCount = 0; + let bytes = 0; + const queryStream = new QueryStream(sql, params); try { - const queryStream = new QueryStream(sql, params); const stream = await client.query(queryStream); await new Promise((resolve, reject) => { stream .on("end", resolve) .on("error", reject) - .pipe(JSONStream.stringify(`{"data":[`, ",", "]")) - .pipe(res, {end: false}); + .once("readable", () => clearInterval(keepAlive)) + .once("readable", () => { + res.write(`{"schema":${JSON.stringify(schema(queryStream))}`); + }) + .pipe(JSONStream.stringify(`,"data":[`, ",", "]}")) + .on("data", (chunk) => { + bytes += chunk.length; + rowCount++; + if (rowCount && rowCount % 2e3 === 0) + req.log({ + progress: { + rows: rowCount, + fields: queryStream.cursor._result.fields.length, + bytes, + done: false, + }, + }); + }) + .pipe(res); }); + } catch (error) { + if (!error.statusCode) error.statusCode = 400; + throw error; + } + req.log({ + progress: { + rows: rowCount, + fields: queryStream.cursor._result.fields.length, + bytes, + done: true, + }, + }); + } finally { + clearInterval(keepAlive); + client.release(); + } +} + +export async function queryStream(req, res, pool) { + const body = await json(req); + if (!validateQueryPayload(body)) throw badRequest(); + const {sql, params} = body; + const client = await pool.connect(); + res.setHeader("Content-Type", "text/plain"); + const keepAlive = setInterval(() => res.write("\n"), 25e3); - const schema = { - type: "array", - items: { - type: "object", - properties: queryStream.cursor._result.fields.reduce( - (schema, {name, dataTypeID}) => ( - (schema[name] = dataTypeSchema(dataTypeID)), schema - ), - {} - ), - }, - }; - res.end(`,"schema":${JSON.stringify(schema)}}`); - } finally { - client.release(); + try { + let rowCount = 0; + let bytes = 0; + + const queryStream = new QueryStream(sql, params); + req.on("close", () => queryStream.cursor.close()); + try { + const stream = await client.query(queryStream); + + await new Promise((resolve, reject) => { + stream + .on("end", resolve) + .on("error", reject) + .once("readable", () => clearInterval(keepAlive)) + .once("readable", () => { + res.write(JSON.stringify(schema(queryStream))); + res.write("\n"); + }) + .pipe(JSONStream.stringify("", "\n", "\n")) + .on("data", (chunk) => { + bytes += chunk.length; + rowCount++; + if (rowCount % 2e3 === 0) { + req.log({ + progress: { + rows: rowCount, + fields: queryStream.cursor._result.fields.length, + bytes, + done: false, + }, + }); + } + }) + .pipe(res); + }); + } catch (error) { + if (!error.statusCode) error.statusCode = 400; + throw error; } + req.log({ + progress: { + rows: rowCount, + fields: queryStream.cursor._result.fields.length, + bytes, + done: true, + }, + }); + } finally { + clearInterval(keepAlive); + client.release(); + } +} + +export async function check(req, res, pool) { + // TODO: use table_privileges and column_privileges to ensure public + // privileges aren't too permissive? + const {rows} = await pool.query(` + SELECT DISTINCT privilege_type + FROM information_schema.role_table_grants + WHERE grantee = user + + UNION + + SELECT DISTINCT privilege_type + FROM information_schema.role_column_grants + WHERE grantee = user + `); + + const privileges = rows.map((r) => r.privilege_type); + const permissive = privileges.filter((p) => p !== "SELECT"); + if (permissive.length) + throw failedCheck( + `User has too permissive privileges: ${permissive.join(", ")}` + ); + + return {ok: true}; +} + +function schema(queryStream) { + return { + type: "array", + items: { + type: "object", + properties: queryStream.cursor._result.fields.reduce( + (schema, {name, dataTypeID}) => ( + (schema[name] = dataTypeSchema(dataTypeID)), schema + ), + {} + ), + }, }; -}; +} // https://www.postgresql.org/docs/9.6/datatype.html const array = ["null", "array"], @@ -53,15 +203,22 @@ const array = ["null", "array"], function dataTypeSchema(dataTypeID) { switch (dataTypeID) { // https://github.com/brianc/node-pg-types/blob/master/lib/textParsers.js#L166 + case 18: + return {type: string, char: true}; case 20: //parseBigInteger // int8 return {type: string, bigint: true}; case 21: //parseInteger // int2 + return {type: integer, int16: true}; case 23: //parseInteger // int4 + return {type: integer, int32: true}; + case 24: + return {type: string, regproc: true}; case 26: //parseInteger // oid - return {type: integer}; + return {type: integer, oid: true}; case 700: //parseFloat // float4/real + return {type: number, float32: true}; case 701: //parseFloat // float8/double - return {type: number}; + return {type: number, float64: true}; case 16: //parseBool return {type: boolean}; case 1082: //parseDate // date @@ -76,24 +233,34 @@ function dataTypeSchema(dataTypeID) { return {type: object}; case 1000: //parseBoolArray return {type: array, items: {type: boolean}}; - case 1001: //parseByteAArray + case 1001: //parseByteArray return {type: array, items: {type: object, buffer: true}}; + case 1002: + return {type: array, items: {type: string, char: true}}; case 1005: //parseIntegerArray // _int2 + return {type: array, items: {type: integer, int16: true}}; case 1007: //parseIntegerArray // _int4 + return {type: array, items: {type: integer, int32: true}}; case 1028: //parseIntegerArray // oid[] - return {type: array, items: {type: integer}}; + return {type: array, items: {type: integer, oid: true}}; case 1016: //parseBigIntegerArray // _int8 return {type: array, items: {type: string, bigint: true}}; case 1017: //parsePointArray // point[] return {type: array, items: {type: object}}; case 1021: //parseFloatArray // _float4 + return {type: array, items: {type: number, float32: true}}; case 1022: //parseFloatArray // _float8 + return {type: array, items: {type: number, float64: true}}; case 1231: //parseFloatArray // _numeric - return {type: array, items: {type: number}}; + return {type: array, items: {type: string, numeric: true}}; case 1014: //parseStringArray //char + return {type: array, items: {type: string, char: true}}; case 1015: //parseStringArray //varchar + return {type: array, items: {type: string, varchar: true}}; case 1008: //parseStringArray + return {type: array, items: {type: string, regproc: true}}; case 1009: //parseStringArray + return {type: array, items: {type: string, text: true}}; case 1040: //parseStringArray // macaddr[] case 1041: //parseStringArray // inet[] return {type: array, items: {type: string}}; @@ -121,8 +288,9 @@ function dataTypeSchema(dataTypeID) { return {type: array, items: {type: string}}; // https://github.com/brianc/node-pg-types/blob/master/lib/binaryParsers.js#L236 case 1700: //parseNumeric - return {type: number}; + return {type: string, numeric: true}; case 25: //parseText + return {type: string, text: true}; default: return {type: string}; } diff --git a/lib/snowflake.js b/lib/snowflake.js index 5503b65..8c787bb 100644 --- a/lib/snowflake.js +++ b/lib/snowflake.js @@ -2,26 +2,50 @@ import {json} from "micro"; import {URL} from "url"; import JSONStream from "JSONStream"; import snowflake from "snowflake-sdk"; +import {Transform} from "stream"; -export default url => { - url = new URL(url); - const {host, username, password, pathname, searchParams} = new URL( - url - ); - const connection = snowflake.createConnection({ - account: host, - username, - password, - database: pathname.slice(1), - schema: searchParams.get("schema"), - warehouse: searchParams.get("warehouse"), - role: searchParams.get("role") - }); +import Pools from "./pools.js"; +import {validateQueryPayload} from "./validate.js"; +import {badRequest, failedCheck, notFound} from "./errors.js"; + +export const pools = new Pools( + ({host, user, password, database, schema, warehouse, role}) => + Object.defineProperty( + snowflake.createConnection({ + account: host, + username: user, + password, + database, + schema, + warehouse, + role, + }), + "end", + { + value() { + this.destroy(); + }, + } + ) +); - const connecting = new WeakSet(); - return async function query(req, res) { - const body = await json(req); - const {sql, params} = body; +export default (url) => async (req, res) => { + if (req.method === "POST") { + url = new URL(url); + + const {host, username, password, pathname, searchParams} = new URL(url); + + const connection = snowflake.createConnection({ + account: host, + username, + password, + database: pathname.slice(1), + schema: searchParams.get("schema"), + warehouse: searchParams.get("warehouse"), + role: searchParams.get("role"), + }); + + const connecting = new WeakSet(); const client = await new Promise((resolve, reject) => { if (connection.isUp() || connecting.has(connection)) @@ -33,40 +57,197 @@ export default url => { }); connecting.add(connection); }); + if (req.url === "/query") return query(req, res, client); + if (req.url === "/query-stream") return queryStream(req, res, client); + if (req.url === "/check") return check(req, res, client); + } + + throw notFound(); +}; - const statement = client.execute({sqlText: sql, binds: params}); - try { - const stream = statement.streamRows(); +export async function query(req, res, client) { + const body = await json(req); + if (!validateQueryPayload(body)) throw badRequest(); + const {sql, params} = body; + const keepAlive = setInterval(() => res.write("\n"), 25e3); - await new Promise((resolve, reject) => { - stream - .once("end", resolve) - .on("error", reject) - .pipe(JSONStream.stringify(`{"data":[`, ",", "]")) - .pipe(res, {end: false}); - }); - } catch (error) { - if (!error.statusCode) error.statusCode = 400; - throw error; - } - - const schema = { - type: "array", - items: { - type: "object", - properties: statement - .getColumns() - .reduce( - (schema, column) => ( - (schema[column.getName()] = dataTypeSchema(column)), schema - ), - {} - ) - } - }; - res.end(`,"schema":${JSON.stringify(schema)}}`); + const statement = client.execute({sqlText: sql, binds: params}); + try { + let rowCount = 0; + let bytes = 0; + + const stream = statement.streamRows(); + await new Promise((resolve, reject) => { + let dateColumns = []; + stream + .on("end", resolve) + .on("error", reject) + .once("readable", () => clearInterval(keepAlive)) + .once("readable", () => { + res.write(`{"schema":${JSON.stringify(schema(statement))}`); + dateColumns = statement + .getColumns() + .filter((c) => dataTypeSchema(c).date) + .map((c) => c.getName()); + }) + .pipe( + new Transform({ + objectMode: true, + transform(chunk, encoding, cb) { + for (const c of dateColumns) + if (chunk[c] !== null) chunk[c] = new Date(chunk[c]); + cb(null, chunk); + }, + }) + ) + .pipe(JSONStream.stringify(`,"data":[`, ",", "]}")) + .on("data", (chunk) => { + bytes += chunk.length; + rowCount++; + if (rowCount % 2e3 === 0) { + req.log({ + progress: { + rows: rowCount, + fields: statement.getColumns().length, + bytes, + done: false, + }, + }); + } + }) + .pipe(res); + }); + req.log({ + progress: { + rows: rowCount, + fields: statement.getColumns().length, + bytes, + done: true, + }, + }); + } catch (error) { + if (!error.statusCode) error.statusCode = 400; + throw error; + } finally { + clearInterval(keepAlive); + } +} + +export async function queryStream(req, res, client) { + const body = await json(req); + if (!validateQueryPayload(body)) throw badRequest(); + const {sql, params} = body; + res.setHeader("Content-Type", "text/plain"); + const keepAlive = setInterval(() => res.write("\n"), 25e3); + + const statement = client.execute({sqlText: sql, binds: params}); + try { + let rowCount = 0; + let bytes = 0; + + const stream = statement.streamRows(); + await new Promise((resolve, reject) => { + let dateColumns = []; + stream + .on("end", resolve) + .on("error", reject) + .once("readable", () => clearInterval(keepAlive)) + .once("readable", () => { + res.write(JSON.stringify(schema(statement))); + res.write("\n"); + dateColumns = statement + .getColumns() + .filter((c) => dataTypeSchema(c).date) + .map((c) => c.getName()); + }) + .pipe( + new Transform({ + objectMode: true, + transform(chunk, encoding, cb) { + for (const c of dateColumns) + if (chunk[c] !== null) chunk[c] = new Date(chunk[c]); + cb(null, chunk); + }, + }) + ) + .pipe(JSONStream.stringify("", "\n", "\n")) + .on("data", (chunk) => { + bytes += chunk.length; + rowCount++; + if (rowCount % 2e3 === 0) { + req.log({ + progress: { + rows: rowCount, + fields: statement.getColumns().length, + bytes, + done: false, + }, + }); + } + }) + .pipe(res); + }); + req.log({ + progress: { + rows: rowCount, + fields: statement.getColumns().length, + bytes, + done: true, + }, + }); + } catch (error) { + if (!error.statusCode) error.statusCode = 400; + throw error; + } finally { + clearInterval(keepAlive); + } +} + +const READ_ONLY = new Set(["SELECT", "USAGE", "REFERENCE_USAGE"]); +export async function check(req, res, client) { + const [{ROLE: role}] = await new Promise((resolve, reject) => { + client.execute({ + sqlText: `SELECT CURRENT_ROLE() AS ROLE`, + complete(err, _, rows) { + err ? reject(err) : resolve(rows); + }, + }); + }); + const rows = await new Promise((resolve, reject) => { + client.execute({ + sqlText: `SHOW GRANTS TO ROLE ${role}`, + complete(err, _, rows) { + err ? reject(err) : resolve(rows); + }, + }); + }); + + const privileges = rows.map((r) => r.privilege); + const permissive = new Set(privileges.filter((p) => !READ_ONLY.has(p))); + if (permissive.size) + throw failedCheck( + `User has too permissive privileges: ${[...permissive].join(", ")}` + ); + + return {ok: true}; +} + +function schema(statement) { + return { + type: "array", + items: { + type: "object", + properties: statement + .getColumns() + .reduce( + (schema, column) => ( + (schema[column.getName()] = dataTypeSchema(column)), schema + ), + {} + ), + }, }; -}; +} // https://github.com/snowflakedb/snowflake-connector-nodejs/blob/master/lib/connection/result/data_types.js const array = ["null", "array"], @@ -83,7 +264,12 @@ function dataTypeSchema(column) { return {type: boolean}; case "fixed": case "real": - return {type: column.getScale() ? number : integer}; + return { + type: + column.getScale() === null || column.getScale() > 0 + ? number + : integer, + }; case "date": case "timestamp_ltz": case "timestamp_ntz": diff --git a/lib/validate.js b/lib/validate.js index d4661a3..75d2892 100644 --- a/lib/validate.js +++ b/lib/validate.js @@ -1,6 +1,6 @@ import Ajv from "ajv"; -const ajv = new Ajv(); +const ajv = new Ajv({allowUnionTypes: true}); export const validateQueryPayload = ajv.compile({ type: "object", @@ -8,7 +8,7 @@ export const validateQueryPayload = ajv.compile({ required: ["sql"], properties: { sql: {type: "string", minLength: 1, maxLength: 32 * 1000}, - params: {anyOf: [{type: ["object"]}, {type: ["array"]}]} + params: {anyOf: [{type: ["object"]}, {type: ["array"]}]}, }, }); export const validateDescribeColumnsPayload = ajv.compile({ @@ -18,6 +18,6 @@ export const validateDescribeColumnsPayload = ajv.compile({ properties: { catalog: {type: "string", minLength: 1, maxLength: 32 * 1000}, schema: {type: "string", minLength: 1, maxLength: 32 * 1000}, - table: {type: "string", minLength: 1, maxLength: 32 * 1000} + table: {type: "string", minLength: 1, maxLength: 32 * 1000}, }, -}) +}); diff --git a/package.json b/package.json index e17ac9b..59a9aa3 100644 --- a/package.json +++ b/package.json @@ -19,16 +19,18 @@ }, "dependencies": { "@databricks/sql": "https://github.com/observablehq/databricks-sql-nodejs", + "@sentry/node": "^7.33.0", "JSONStream": "^1.3.5", "ajv": "^8.11.0", + "lru-cache": "^6.0.0", "micro": "^9.3.4", "mssql": "^9.1.1", "mysql2": "^3.0.1", "open": "^6.3.0", - "pg": "^8.7.1", + "pg": "^8.7.3", "pg-query-stream": "^4.2.1", "serialize-error": "^4.1.0", - "snowflake-sdk": "^1.5.0", + "snowflake-sdk": "^1.6.11", "yargs": "^13.2.4" }, "devDependencies": { @@ -55,7 +57,7 @@ "test": "mocha", "test:local": "docker-compose -f docker-compose.yml -f docker-compose.local.yml up --build", "test:ci": "docker-compose -f docker-compose.yml up --build --exit-code-from test", - "test:db": "docker-compose -f docker-compose.yml -f docker-compose.local.yml up mssql" + "test:db": "docker-compose -f docker-compose.yml -f docker-compose.local.yml up mssql mysql" }, "author": "Observable, Inc.", "license": "ISC", diff --git a/test/mssql.test.js b/test/mssql.test.js index 3b12f4c..5447977 100644 --- a/test/mssql.test.js +++ b/test/mssql.test.js @@ -1,196 +1,194 @@ -import {expect} from "chai"; +import assert from "node:assert"; import MockReq from "mock-req"; import MockRes from "mock-res"; -import {MSSQL_CREDENTIALS} from "../.env.test.js"; -import mssql, {dataTypeSchema} from "../lib/mssql.js"; +import {MSSQL_TEST_CREDENTIALS} from "../.env.test.js"; +import mssql, {dataTypeSchema, pools} from "../lib/mssql.js"; + +const credentials = MSSQL_TEST_CREDENTIALS; +describe("SQL Server", () => { + after(() => pools.end()); -const credentials = MSSQL_CREDENTIALS; -describe("mssql", () => { describe("when checking", () => { - describe("with system admin user", () => { - it("should throw a too permissive error", () => { - const req = new MockReq({ - method: "POST", - url: "/check", - }); - const res = new MockRes(); - const index = mssql(credentials); - - return index(req, res).then( - () => Promise.reject("Expect call to throw!"), - (err) => { - expect(err.statusCode).to.equal(200); - expect( - err.message.includes("User has too permissive grants") - ).to.equal(true); - } - ); + it("should throw a too permissive error", async () => { + const req = new MockReq({ + method: "POST", + url: "/check", }); + const res = new MockRes(); + const index = mssql(credentials); + + try { + await index(req, res); + } catch (error) { + assert.equal( + /User has too permissive grants/.test(error.message), + true + ); + } }); }); - describe("when querying", () => { - it("should stream the results of simple query", () => { - return new Promise(async (resolve, reject) => { - const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer", - params: [], - }); + describe("when streaming", () => { + it("should stream the results of simple query", (done) => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer", + params: [], + }); - const res = new MockRes(onEnd); + const res = new MockRes(onEnd); - const index = mssql(credentials); - await index(req, res); + const index = mssql(credentials); + index(req, res); - function onEnd() { - const [schema, row] = this._getString().split("\n"); + function onEnd() { + const [schema, row] = this._getString().split("\n"); - expect(schema).to.equal( - JSON.stringify({ - type: "array", - items: { - type: "object", - properties: {CustomerID: {type: ["null", "integer"]}}, - }, - }) - ); - expect(row).to.equal(JSON.stringify({CustomerID: 12})); + assert.equal( + schema, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {CustomerID: {type: ["null", "integer"]}}, + }, + }) + ); + assert.equal(row, JSON.stringify({CustomerID: 12})); + done(); + } + }); - resolve(); - } + it("should handle parameter graciously", (done) => { + const testCustomerId = 3; + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", + params: [testCustomerId], }); - }); - it("should handle parameter graciously", () => { - return new Promise(async (resolve, reject) => { - const testCustomerId = 3; - const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", - params: [testCustomerId], - }); - const res = new MockRes(onEnd); + const res = new MockRes(onEnd); - const index = mssql(credentials); - await index(req, res); + const index = mssql(credentials); + index(req, res); - function onEnd() { - const [schema, row] = this._getString().split("\n"); + function onEnd() { + const [schema, row] = this._getString().split("\n"); - expect(schema).to.equal( - JSON.stringify({ - type: "array", - items: { - type: "object", - properties: {CustomerID: {type: ["null", "integer"]}}, - }, - }) - ); - expect(row).to.equal(JSON.stringify({CustomerID: testCustomerId})); + assert.equal( + schema, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {CustomerID: {type: ["null", "integer"]}}, + }, + }) + ); + assert.equal(row, JSON.stringify({CustomerID: testCustomerId})); - resolve(); - } - }); + done(); + } }); - it("should replace cell reference in the SQL query", () => { - return new Promise(async (resolve, reject) => { - const testCustomerId = 5; - const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", - params: [testCustomerId], - }); - const res = new MockRes(onEnd); + it("should replace cell reference in the SQL query", (done) => { + const testCustomerId = 5; + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT TOP 2 CustomerID FROM test.SalesLT.Customer WHERE CustomerID=@1", + params: [testCustomerId], + }); - const index = mssql(credentials); - await index(req, res); + const res = new MockRes(onEnd); - function onEnd() { - const [schema, row] = this._getString().split("\n"); + const index = mssql(credentials); + index(req, res); - expect(schema).to.equal( - JSON.stringify({ - type: "array", - items: { - type: "object", - properties: {CustomerID: {type: ["null", "integer"]}}, - }, - }) - ); - expect(row).to.equal(JSON.stringify({CustomerID: testCustomerId})); + function onEnd() { + const [schema, row] = this._getString().split("\n"); - resolve(); - } - }); + assert.equal( + schema, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {CustomerID: {type: ["null", "integer"]}}, + }, + }) + ); + assert.equal(row, JSON.stringify({CustomerID: testCustomerId})); + + done(); + } }); - it("should handle duplicated column names", () => { - return new Promise(async (resolve, reject) => { - const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: "SELECT 1 as _a1, 2 as _a1 FROM test.SalesLT.SalesOrderDetail", - params: [], - }); - const res = new MockRes(onEnd); + it("should handle duplicated column names", (done) => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT 1 as _a1, 2 as _a1 FROM test.SalesLT.SalesOrderDetail", + params: [], + }); - const index = mssql(credentials); - await index(req, res); + const res = new MockRes(onEnd); - function onEnd() { - const [schema, row] = this._getString().split("\n"); + const index = mssql(credentials); + index(req, res); - expect(row).to.equal( - JSON.stringify({ - _a1: 2, - }) - ); + function onEnd() { + const [, row] = this._getString().split("\n"); - resolve(); - } - }); + assert.equal( + row, + JSON.stringify({ + _a1: 2, + }) + ); + + done(); + } }); - it("should select the last value of any detected duplicated columns", () => { - return new Promise(async (resolve, reject) => { - const req = new MockReq({method: "POST", url: "/query-stream"}).end({ - sql: "SELECT TOP 1 ModifiedDate, ModifiedDate FROM test.SalesLT.SalesOrderDetail", - params: [], - }); - const res = new MockRes(onEnd); + it("should select the last value of any detected duplicated columns", (done) => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT TOP 1 ModifiedDate, ModifiedDate FROM test.SalesLT.SalesOrderDetail", + params: [], + }); - const index = mssql(credentials); - await index(req, res); + const res = new MockRes(onEnd); + + const index = mssql(credentials); + index(req, res); + + function onEnd() { + const [schema, row] = this._getString().split("\n"); - function onEnd() { - const [schema, row] = this._getString().split("\n"); - - expect(schema).to.equal( - JSON.stringify({ - type: "array", - items: { - type: "object", - properties: { - ModifiedDate: {type: ["null", "string"], date: true}, - }, + assert.equal( + schema, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: { + ModifiedDate: {type: ["null", "string"], date: true}, }, - }) - ); - expect(row).to.equal( - JSON.stringify({ - ModifiedDate: "2008-06-01T00:00:00.000Z", - }) - ); - - resolve(); - } - }); + }, + }) + ); + assert.equal( + row, + JSON.stringify({ + ModifiedDate: "2008-06-01T00:00:00.000Z", + }) + ); + + done(); + } }); }); describe("when check the dataTypeSchema", () => { it("should TYPES.Image.name to object", () => { const {type} = dataTypeSchema({type: "Image"}); - expect(type[0]).to.equal("null"); - expect(type[1]).to.equal("object"); + assert.equal(type[0], "null"); + assert.equal(type[1], "object"); }); }); }); diff --git a/test/mysql.test.js b/test/mysql.test.js new file mode 100644 index 0000000..e4b2d57 --- /dev/null +++ b/test/mysql.test.js @@ -0,0 +1,238 @@ +import assert from "node:assert"; +import MockReq from "mock-req"; +import MockRes from "mock-res"; +import logger from "../middleware/logger.js"; +import mysql, {pools} from "../lib/mysql.js"; + +import {MYSQL_TEST_CREDENTIALS} from "../.env.test.js"; +const index = logger(mysql(MYSQL_TEST_CREDENTIALS)); + +describe("MySQL", () => { + after(() => pools.end()); + + describe("when checking", () => { + it("should do MySQL credential check", async () => { + const req = new MockReq({method: "POST", url: "/check"}); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal( + /User has too permissive grants/.test(error.message), + true + ); + } + }); + }); + + describe("when querying", () => { + it("should resolves MySQL requests", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: ` + select c1 + from (select 'hello' as c1 union all select 2 as c1) as foo + where c1 = ?`, + params: ["hello"], + }); + const res = new MockRes(); + await index(req, res); + + const {data, schema} = res._getJSON(); + + assert.deepEqual(data, [{c1: "hello"}]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "string"]}}, + }, + }); + }); + + it("should handle MySQL errors", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "SELECT * FROM users", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Table 'mysql.users' doesn't exist"); + } + }); + + it("should handle MySQL empty query", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Query was empty"); + } + }); + + it("should handle MySQL empty results", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `SELECT 1 AS c1 LIMIT 0`, + }); + const res = new MockRes(); + + await index(req, res); + + const {data, schema} = res._getJSON(); + assert.deepEqual(data, []); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "integer"], long: true}}, + }, + }); + }); + }); + + describe("when streaming", () => { + it("should handle MySQL stream requests", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: ` + select c1 + from (select 'hello' as c1 union all select 2 as c1) as foo + where c1 = ?`, + params: ["hello"], + }); + + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + + assert.equal( + response, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "string"]}}, + }, + }) + + "\n" + + JSON.stringify({c1: "hello"}) + + "\n" + ); + }); + + it("should handle MySQL stream errors", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT * FROM users", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Table 'mysql.users' doesn't exist"); + } + }); + + it("should hande MySQL stream empty query", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Query was empty"); + } + }); + + it("MySQL stream empty results", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT 1 AS c1 LIMIT 0", + }); + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + + assert.equal( + response, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "integer"], long: true}}, + }, + }) + "\n\n" + ); + }); + }); + + describe("when check the dataTypeSchema", () => { + it("should provide the right MySQL types", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "select 1 as c1, 3.14 as c2, 0xdeadbeef as c3, 'hello' as c4, DATE '2019-01-01' as c5, 1234567890 as c6", + }); + const res = new MockRes(); + + await index(req, res); + const {data, schema} = res._getJSON(); + assert.deepEqual(data, [ + { + c1: 1, + c2: 3.14, + c3: {type: "Buffer", data: [222, 173, 190, 239]}, + c4: "hello", + c5: "2019-01-01T00:00:00.000Z", + c6: 1234567890, + }, + ]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: { + c1: {type: ["null", "integer"], long: true}, + c2: {type: ["null", "number"], newdecimal: true}, + c3: {type: ["null", "object"], buffer: true}, + c4: {type: ["null", "string"]}, + c5: {type: ["null", "string"], date: true}, + c6: {type: ["null", "string"], bigint: true}, + }, + }, + }); + }); + + it("should handle query not returning any fields", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "FLUSH PRIVILEGES", + }); + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + const [schema] = response.split("\n"); + + assert.deepEqual( + schema, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {}, + }, + }) + ); + }); + }); +}); diff --git a/test/postgres.test.js b/test/postgres.test.js new file mode 100644 index 0000000..e51e7c3 --- /dev/null +++ b/test/postgres.test.js @@ -0,0 +1,309 @@ +import assert from "node:assert"; +import MockReq from "mock-req"; +import MockRes from "mock-res"; +import logger from "../middleware/logger.js"; +import pg, {pools} from "../lib/postgres.js"; + +import {POSTGRES_TEST_CREDENTIALS} from "../.env.test.js"; +const index = logger(pg(POSTGRES_TEST_CREDENTIALS)); + +describe("postgreSQL", () => { + after(() => pools.end()); + + describe("when checking", () => { + it("should perform Postgres credential check", async () => { + const req = new MockReq({method: "POST", url: "/check"}); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal( + error.message, + "User has too permissive privileges: DELETE, INSERT, REFERENCES, TRIGGER, TRUNCATE, UPDATE" + ); + } + }); + }); + + describe("when querying", () => { + it("should resolve Postgres requests", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: ` + with foo as ( + select 1 as c1 union all select 2 as c1 + ) + select c1 + from foo + where c1 = $1`, + params: [1], + }); + const res = new MockRes(); + + await index(req, res); + + const {data, schema} = res._getJSON(); + assert.deepEqual(data, [{c1: 1}]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "integer"], int32: true}}, + }, + }); + }); + + it("should handle Postgres errors", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "SELECT * FROM gibberish", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, 'relation "gibberish" does not exist'); + } + }); + + it("should handle Postgres empty query", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Bad request"); + } + }); + + it("should handle Postgres empty results", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `SELECT 1 AS c1 LIMIT 0`, + }); + const res = new MockRes(); + + await index(req, res); + + const {data, schema} = res._getJSON(); + assert.deepEqual(data, []); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "integer"], int32: true}}, + }, + }); + }); + }); + + describe("when streaming", () => { + it("should handle Postgres stream requests", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: ` + with foo as ( + select 1 as c1 union all select 2 as c1 + ) + select c1 + from foo + where c1 = $1`, + params: [1], + }); + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + + assert.equal( + response, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "integer"], int32: true}}, + }, + }) + + "\n" + + `{"c1":1}\n` + ); + }); + + it("should handle Postgres stream empty query", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Bad request"); + } + }); + + it("should handle Postgres stream errors", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT * FROM gibberish", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, 'relation "gibberish" does not exist'); + } + }); + + it("should handle Postgres stream empty query", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Bad request"); + } + }); + + it("should handle Postgres stream empty results", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT 1 AS c1 LIMIT 0", + }); + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + + assert.equal( + response, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {c1: {type: ["null", "integer"], int32: true}}, + }, + }) + "\n\n" + ); + }); + }); + + describe("when inferring the dataTypeSchema", () => { + it("should handle Postgres simple types", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `select + 1 as c1, + 3.14 as c2, + E'\\\\xDEADBEEF'::bytea as c3, + 'hello' as c4, + DATE '2019-01-01' as c5, + true as c6, + '{"a": 1}'::json as c7, + '{"b": 2}'::jsonb as c8 + `, + }); + const res = new MockRes(); + + await index(req, res); + const {data, schema} = res._getJSON(); + assert.deepEqual(data, [ + { + c1: 1, + c2: "3.14", + c3: {type: "Buffer", data: [222, 173, 190, 239]}, + c4: "hello", + c5: "2019-01-01T00:00:00.000Z", + c6: true, + c7: {a: 1}, + c8: {b: 2}, + }, + ]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: { + c1: {type: ["null", "integer"], int32: true}, + c2: {type: ["null", "string"], numeric: true}, + c3: {type: ["null", "object"], buffer: true}, + c4: {type: ["null", "string"], text: true}, + c5: {type: ["null", "string"], date: true}, + c6: {type: ["null", "boolean"]}, + c7: {type: ["null", "object"]}, + c8: {type: ["null", "object"]}, + }, + }, + }); + }); + + it("should handle Postgres array types", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `select + '{1, 2, 3}'::int[] as c1, + '{2.18, 3.14, 6.22}'::float[] as c2, + '{"\\\\xDEADBEEF", "\\\\xFACEFEED"}'::bytea[] as c3, + '{"hello", "goodbye"}'::varchar[] as c4, + '{"2019-01-01"}'::timestamp[] as c5, + '{true, false, true}'::bool[] as c6, + '{"{\\"a\\": 1}", "{\\"b\\": 2}"}'::json[] as c7 + `, + }); + const res = new MockRes(); + + await index(req, res); + const {data, schema} = res._getJSON(); + assert.deepEqual(data, [ + { + c1: [1, 2, 3], + c2: [2.18, 3.14, 6.22], + c3: [ + {type: "Buffer", data: [222, 173, 190, 239]}, + {type: "Buffer", data: [250, 206, 254, 237]}, + ], + c4: ["hello", "goodbye"], + c5: ["2019-01-01T00:00:00.000Z"], + c6: [true, false, true], + c7: [{a: 1}, {b: 2}], + }, + ]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: { + c1: { + type: ["null", "array"], + items: {type: ["null", "integer"], int32: true}, + }, + c2: { + type: ["null", "array"], + items: {type: ["null", "number"], float64: true}, + }, + c3: { + type: ["null", "array"], + items: {type: ["null", "object"], buffer: true}, + }, + c4: { + type: ["null", "array"], + items: {type: ["null", "string"], varchar: true}, + }, + c5: { + type: ["null", "array"], + items: {type: ["null", "string"], date: true}, + }, + c6: {type: ["null", "array"], items: {type: ["null", "boolean"]}}, + c7: {type: ["null", "array"], items: {type: ["null", "object"]}}, + }, + }, + }); + }); + }); +}); diff --git a/test/snowflake.test.js b/test/snowflake.test.js new file mode 100644 index 0000000..4d64f48 --- /dev/null +++ b/test/snowflake.test.js @@ -0,0 +1,344 @@ +import assert from "node:assert"; +import MockReq from "mock-req"; +import MockRes from "mock-res"; +import snowflake, {pools} from "../lib/snowflake.js"; +import logger from "../middleware/logger.js"; + +import {SNOWFLAKE_TEST_CREDENTIALS} from "../.env.test.js"; +const index = logger(snowflake(SNOWFLAKE_TEST_CREDENTIALS)); + +describe("Snowflake", function () { + this.timeout(50000); + after(() => pools.end()); + + describe("when checking", () => { + it("should handle Snowflake credential check", async () => { + const req = new MockReq({method: "POST", url: "/check"}); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.match(error.message, /^User has too permissive privileges: /); + } + }); + }); + + describe("when querying", () => { + it("should handle Snowflake requests", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: ` + with foo as ( + select 1 as c1 union all select 2 as c1 + ) + select c1 + from foo + where c1 = ?`, + params: [1], + }); + const res = new MockRes(); + + await index(req, res); + + const {data, schema} = res._getJSON(); + assert.deepEqual(data, [{C1: 1}]); + assert.deepEqual(schema, { + type: "array", + items: {type: "object", properties: {C1: {type: ["null", "integer"]}}}, + }); + }); + + it("should handle Snowflake errors", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "SELECT * FROM gibberish", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal( + error.message, + "SQL compilation error:\nObject 'GIBBERISH' does not exist or not authorized." + ); + } + }); + + it("should handle Snowflake empty query", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Bad request"); + } + }); + + it("should handle Snowflake empty results", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `SELECT 1 AS c1 LIMIT 0`, + }); + const res = new MockRes(); + + await index(req, res); + + const {data, schema} = res._getJSON(); + assert.deepEqual(data, []); + assert.deepEqual(schema, { + type: "array", + items: {type: "object", properties: {C1: {type: ["null", "integer"]}}}, + }); + }); + }); + + describe("when streaming", () => { + it("should handle Snowflake stream requests", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: ` + with foo as ( + select 1 as c1 union all select 2 as c1 + ) + select c1 + from foo + where c1 = ?`, + params: [1], + }); + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + + assert.equal( + response, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {C1: {type: ["null", "integer"]}}, + }, + }) + + "\n" + + JSON.stringify({C1: 1}) + + "\n" + ); + }); + + it("should handle Snowflake stream errors", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT * FROM users", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal( + error.message, + "SQL compilation error:\nObject 'USERS' does not exist or not authorized." + ); + } + }); + + it("should handle Snowflake stream empty query", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "", + }); + const res = new MockRes(); + + try { + await index(req, res); + } catch (error) { + assert.equal(error.statusCode, 400); + assert.equal(error.message, "Bad request"); + } + }); + + it("should handle Snowflake stream empty results", async () => { + const req = new MockReq({method: "POST", url: "/query-stream"}).end({ + sql: "SELECT 1 AS c1 LIMIT 0", + }); + const res = new MockRes(); + + await index(req, res); + const response = res._getString(); + + assert.equal( + response, + JSON.stringify({ + type: "array", + items: { + type: "object", + properties: {C1: {type: ["null", "integer"]}}, + }, + }) + "\n\n" + ); + }); + }); + + describe("when inferring the dataTypeSchema", () => { + it("should handle Snowflake simple types", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `select + 1 as c1, + 3.14 as c2, + to_binary('DEADBEEF') as c3, + 'hello' as c4, + TIMESTAMP '2019-01-01' as c5, + true as c6, + to_object(parse_json('{"a": 1}')) as c7 + `, + }); + const res = new MockRes(); + + await index(req, res); + const {data, schema} = res._getJSON(); + assert.deepEqual(data, [ + { + C1: 1, + C2: 3.14, + C3: {type: "Buffer", data: [222, 173, 190, 239]}, + C4: "hello", + C5: "2019-01-01T00:00:00.000Z", + C6: true, + C7: {a: 1}, + }, + ]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: { + C1: {type: ["null", "integer"]}, + C2: {type: ["null", "number"]}, + C3: {type: ["null", "object"], buffer: true}, + C4: {type: ["null", "string"]}, + C5: {type: ["null", "string"], date: true}, + C6: {type: ["null", "boolean"]}, + C7: {type: ["null", "object"]}, + }, + }, + }); + }); + + it("should handle Snowflake date, time, time zones", async () => { + const req = new MockReq({method: "POST", url: "/query"}).end({ + sql: `select + TO_DATE('2020-01-01') as date, + TO_TIMESTAMP_NTZ('2020-01-01 01:23:45') as datetime, -- timestamp_ntz + TO_TIME('01:23:45') as time, + TO_TIMESTAMP('2020-01-01 01:23:45') as timestamp, -- timestamp_ntz + TO_TIMESTAMP_LTZ('2020-01-01 01:23:45') as timestamp_ltz, + TO_TIMESTAMP_NTZ('2020-01-01 01:23:45') as timestamp_ntz, + TO_TIMESTAMP_TZ('2020-01-01 01:23:45') as timestamp_tz, + TO_DATE(null) as null_date + `, + }); + const res = new MockRes(); + + await index(req, res); + const {data, schema} = res._getJSON(); + + assert.deepEqual(data, [ + { + DATE: "2020-01-01T00:00:00.000Z", + DATETIME: "2020-01-01T01:23:45.000Z", + TIME: "01:23:45", + TIMESTAMP: "2020-01-01T01:23:45.000Z", + TIMESTAMP_LTZ: "2020-01-01T09:23:45.000Z", + TIMESTAMP_NTZ: "2020-01-01T01:23:45.000Z", + TIMESTAMP_TZ: "2020-01-01T09:23:45.000Z", + NULL_DATE: null, + }, + ]); + assert.deepEqual(schema, { + type: "array", + items: { + type: "object", + properties: { + DATE: {type: ["null", "string"], date: true}, + DATETIME: {type: ["null", "string"], date: true}, + TIME: {type: ["null", "string"]}, + TIMESTAMP: {type: ["null", "string"], date: true}, + TIMESTAMP_LTZ: {type: ["null", "string"], date: true}, + TIMESTAMP_NTZ: {type: ["null", "string"], date: true}, + TIMESTAMP_TZ: {type: ["null", "string"], date: true}, + NULL_DATE: {type: ["null", "string"], date: true}, + }, + }, + }); + }); + }); + + describe("when connecting to Snowflake", () => { + it("shouldn't attempt concurrent connections", async () => { + // Ensure a cold connection state + pools.del(SNOWFLAKE_TEST_CREDENTIALS); + + const req1 = new MockReq({method: "POST", url: "/query"}).end({ + sql: "select 1", + }); + const res1 = new MockRes(); + const req2 = new MockReq({method: "POST", url: "/query"}).end({ + sql: "select 2", + }); + const res2 = new MockRes(); + + await Promise.all([index(req1, res1), index(req2, res2)]); + + const {data: data1, schema: schema1} = res1._getJSON(); + assert.deepEqual(data1, [{1: 1}]); + assert.deepEqual(schema1, { + type: "array", + items: { + type: "object", + properties: { + 1: {type: ["null", "integer"]}, + }, + }, + }); + + const {data: data2, schema: schema2} = res2._getJSON(); + assert.deepEqual(data2, [{2: 2}]); + assert.deepEqual(schema2, { + type: "array", + items: { + type: "object", + properties: { + 2: {type: ["null", "integer"]}, + }, + }, + }); + }); + + it("should recreates connection on connect error (slow)", async () => { + const badCredentials = snowflake("snowflake://hi@hi/hi"); + const req = new MockReq({method: "POST", url: "/check"}); + const res = new MockRes(); + + try { + await badCredentials(req, res); + } catch (error) { + assert.equal( + error.message, + "Request to Snowflake failed.", + "First failure" + ); + } + try { + await badCredentials(req, res); + } catch (error) { + assert.equal( + error.message, + "Request to Snowflake failed.", + "Second failure is identical" + ); + } + }); + }); +}); diff --git a/yarn.lock b/yarn.lock index a178433..287d777 100644 --- a/yarn.lock +++ b/yarn.lock @@ -195,7 +195,7 @@ jsonwebtoken "^8.5.1" uuid "^8.3.0" -"@azure/storage-blob@^12.5.0": +"@azure/storage-blob@^12.11.0": version "12.12.0" resolved "https://registry.yarnpkg.com/@azure/storage-blob/-/storage-blob-12.12.0.tgz#25e277c885692d5adcd8c2a949789b2837a74c59" integrity sha512-o/Mf6lkyYG/eBW4/hXB9864RxVNmAkcKHjsGR6Inlp5hupa3exjSyH2KjO3tLO//YGA+tS+17hM2bxRl9Sn16g== @@ -1280,6 +1280,41 @@ picocolors "^1.0.0" tslib "^2.6.0" +"@sentry/core@7.33.0": + version "7.33.0" + resolved "https://registry.yarnpkg.com/@sentry/core/-/core-7.33.0.tgz#7cba1670c041fae02794729b74e9fb9d1f519755" + integrity sha512-mrSgUnXjxHVi0cVea1lv7gC/Y66ya2a3atCHaPEij/+l+3APg5d0Ixt1zMx5YllMiZKf6wpxlZ0uwXcqdAAw+w== + dependencies: + "@sentry/types" "7.33.0" + "@sentry/utils" "7.33.0" + tslib "^1.9.3" + +"@sentry/node@^7.33.0": + version "7.33.0" + resolved "https://registry.yarnpkg.com/@sentry/node/-/node-7.33.0.tgz#d5c7c7094543dd9819422dfc69952ed40416bfab" + integrity sha512-isQVF9LLSG4EZLHiSJ3chgK6f3ZBdGxm8fX6YGm8HWz07CubJddes3yBPLPRNXrRLd7X3SK8pPcK5oc3LIKqAw== + dependencies: + "@sentry/core" "7.33.0" + "@sentry/types" "7.33.0" + "@sentry/utils" "7.33.0" + cookie "^0.4.1" + https-proxy-agent "^5.0.0" + lru_map "^0.3.3" + tslib "^1.9.3" + +"@sentry/types@7.33.0": + version "7.33.0" + resolved "https://registry.yarnpkg.com/@sentry/types/-/types-7.33.0.tgz#7d4893a783360a868382e5194b50dbf034ba23c0" + integrity sha512-5kkmYjtBWSbPxfYGiXdZFPS6xpFBNlXvDqeX4NpCFXz6/LiEDn6tZ61kuCSFb8MZlyqyCX5WsP3aiI2FJfpGIA== + +"@sentry/utils@7.33.0": + version "7.33.0" + resolved "https://registry.yarnpkg.com/@sentry/utils/-/utils-7.33.0.tgz#e6910139328b49b9cc21186521bdb10390dfd915" + integrity sha512-msp02GV1gOfaN5FjKjWxI00rtbYLXEE5cTGldhs/Dt9KI63dDk1nwPDkSLhg6joqRItAq0thlBh6un717HdWbg== + dependencies: + "@sentry/types" "7.33.0" + tslib "^1.9.3" + "@sideway/address@^4.1.3": version "4.1.4" resolved "https://registry.yarnpkg.com/@sideway/address/-/address-4.1.4.tgz#03dccebc6ea47fdc226f7d3d1ad512955d4783f0" @@ -1308,10 +1343,10 @@ async "^3.2.1" simple-lru-cache "^0.0.2" -"@tediousjs/connection-string@^0.4.1": - version "0.4.1" - resolved "https://registry.yarnpkg.com/@tediousjs/connection-string/-/connection-string-0.4.1.tgz#6c92dcde392ff2401d9e9de628c1cd8f4b2a735f" - integrity sha512-gr1mFN7KMOn+VviQKcrt+z1/7ttn7s9NSMFFyg5GrJylNH6JGrDDNRm7C5vE4PNwhW6hYT67QRUO44Ns2LQijg== +"@tediousjs/connection-string@^0.5.0": + version "0.5.0" + resolved "https://registry.yarnpkg.com/@tediousjs/connection-string/-/connection-string-0.5.0.tgz#9b3d858c040aac6bdf5584bf45370cef5b6522b4" + integrity sha512-7qSgZbincDDDFyRweCIEvZULFAw5iz/DeunhvuxpL31nfntX3P4Yd4HkHBRg9H8CdqY1e5WFN1PZIz/REL9MVQ== "@tootallnate/once@1": version "1.1.2" @@ -2066,7 +2101,12 @@ combined-stream@^1.0.8: dependencies: delayed-stream "~1.0.0" -commander@^9.3.0, commander@^9.4.0: +commander@^11.0.0: + version "11.1.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-11.1.0.tgz#62fdce76006a68e5c1ab3314dc92e800eb83d906" + integrity sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ== + +commander@^9.3.0: version "9.4.1" resolved "https://registry.yarnpkg.com/commander/-/commander-9.4.1.tgz#d1dd8f2ce6faf93147295c0df13c7c21141cfbdd" integrity sha512-5EEkTNyHNGFPD2H+c/dXXfQZYa/scCKasxWcXJaWnNJ99pnQN9Vnmqow+p+PlFPE63Q6mThaZws1T+HxfpgtPw== @@ -2108,6 +2148,11 @@ convert-source-map@^1.7.0: resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== +cookie@^0.4.1: + version "0.4.2" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.2.tgz#0e41f24de5ecf317947c82fc789e06a884824432" + integrity sha512-aSWTXFzaKWkvHO1Ny/s+ePFpvKsPnjc551iI41v3ny/ow6tBG5Vd+FuqGNhh1LxOmVzOlGUriIlOaokOvhaStA== + copy-descriptor@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" @@ -2493,7 +2538,12 @@ eslint-scope@^7.2.2: esrecurse "^4.3.0" estraverse "^5.2.0" -eslint-visitor-keys@^3.3.0, eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4.3: +eslint-visitor-keys@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.3.0.tgz#f6480fa6b1f30efe2d1968aa8ac745b862469826" + integrity sha512-mQ+suqKJVyeuwGYHAdjMFqjCyfl8+Ldnxuyp3ldiMBFKkvytrXUZWaiPCEav8qDHKty44bD+qV1IP4T+w+xXRA== + +eslint-visitor-keys@^3.4.1, eslint-visitor-keys@^3.4.3: version "3.4.3" resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz#0cd72fe8550e3c2eae156a96a4dddcd1c8ac5800" integrity sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag== @@ -2698,9 +2748,9 @@ fast-deep-equal@^3.1.1, fast-deep-equal@^3.1.3: integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== fast-diff@^1.1.2: - version "1.3.0" - resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.3.0.tgz#ece407fa550a64d638536cd727e129c61616e0f0" - integrity sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw== + version "1.2.0" + resolved "https://registry.yarnpkg.com/fast-diff/-/fast-diff-1.2.0.tgz#73ee11982d86caaf7959828d519cfe927fac5f03" + integrity sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w== fast-glob@^3.3.0: version "3.3.1" @@ -2794,12 +2844,11 @@ find-up@^3.0.0: locate-path "^3.0.0" flat-cache@^3.0.4: - version "3.1.1" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.1.1.tgz#a02a15fdec25a8f844ff7cc658f03dd99eb4609b" - integrity sha512-/qM2b3LUIaIgviBQovTLvijfyOQXPtSRnRK26ksj2J7rzPIecePUIpJsZ4T02Qg+xiAEKIs5K8dsHEd+VaKa/Q== + version "3.0.4" + resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-3.0.4.tgz#61b0338302b2fe9f957dcc32fc2a87f1c3048b11" + integrity sha512-dm9s5Pw7Jc0GvMYbshN6zchCA9RgQlzzEZX3vylR9IqFfS8XciblUXOKfW6SiuJ0e13eDYZoZV5wdrev7P3Nwg== dependencies: - flatted "^3.2.9" - keyv "^4.5.3" + flatted "^3.1.0" rimraf "^3.0.2" flat@^5.0.2: @@ -2807,10 +2856,10 @@ flat@^5.0.2: resolved "https://registry.yarnpkg.com/flat/-/flat-5.0.2.tgz#8ca6fe332069ffa9d324c327198c598259ceb241" integrity sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ== -flatted@^3.2.9: - version "3.2.9" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.9.tgz#7eb4c67ca1ba34232ca9d2d93e9886e611ad7daf" - integrity sha512-36yxDn5H7OFZQla0/jFJmbIKTdZAQHngCedGxiMmpNfEZM0sdEeT+WczLQrjK6D7o2aiyLYDnkw0R3JK0Qv1RQ== +flatted@^3.1.0: + version "3.2.7" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== fn.name@1.x.x: version "1.1.0" @@ -3058,9 +3107,9 @@ globals@^11.1.0: integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^13.19.0: - version "13.23.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.23.0.tgz#ef31673c926a0976e1f61dab4dca57e0c0a8af02" - integrity sha512-XAmF0RjlrjY23MA51q3HltdlGxUpXPvg0GioKiD9X6HD28iMjo2dKC8Vqwm7lne4GNr78+RHTfliktR6ZH09wA== + version "13.19.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.19.0.tgz#7a42de8e6ad4f7242fbcca27ea5b23aca367b5c8" + integrity sha512-dkQ957uSRWHw7CFXLUtUHQI3g3aWApYhfNR2O6jn/907riyTYKVBmxYVROkBcY614FSSeSJh7Xm7SrUWCxvJMQ== dependencies: type-fest "^0.20.2" @@ -3765,11 +3814,6 @@ jsesc@~0.5.0: resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" integrity sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA== -json-buffer@3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" - integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== - json-schema-traverse@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" @@ -3852,13 +3896,6 @@ jws@^4.0.0: jwa "^2.0.0" safe-buffer "^5.0.1" -keyv@^4.5.3: - version "4.5.4" - resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.4.tgz#a879a99e29452f942439f2a405e3af8b31d4de93" - integrity sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw== - dependencies: - json-buffer "3.0.1" - kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" @@ -4032,11 +4069,23 @@ lru-cache@^5.1.1: dependencies: yallist "^3.0.2" +lru-cache@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94" + integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA== + dependencies: + yallist "^4.0.0" + lru-cache@^7.14.1: version "7.14.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-7.14.1.tgz#8da8d2f5f59827edb388e63e459ac23d6d408fea" integrity sha512-ysxwsnTKdAx96aTRdhDOCQfDgbHnt8SK0KY8SEjO0wHinhWOFTESbjVCMPbU1uGXg/ch4lifqx0wfjOawU2+WA== +lru_map@^0.3.3: + version "0.3.3" + resolved "https://registry.yarnpkg.com/lru_map/-/lru_map-0.3.3.tgz#b5c8351b9464cbd750335a79650a0ec0e56118dd" + integrity sha512-Pn9cox5CsMYngeDbmChANltQl+5pi6XmTrraMSzhPmMBbmgcxmqWry0U3PGapCU1yB4/LqCcom7qhHZiF/jGfQ== + make-dir@^1.0.0: version "1.3.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.3.0.tgz#79c1033b80515bd6d24ec9933e860ca75ee27f0c" @@ -4233,7 +4282,7 @@ moment-timezone@^0.5.15: dependencies: moment ">= 2.9.0" -"moment@>= 2.9.0", moment@^2.29.3: +"moment@>= 2.9.0", moment@^2.29.4: version "2.29.4" resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108" integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w== @@ -4254,12 +4303,12 @@ ms@2.1.3, ms@^2.0.0, ms@^2.1.1: integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA== mssql@^9.1.1: - version "9.1.1" - resolved "https://registry.yarnpkg.com/mssql/-/mssql-9.1.1.tgz#fcec595834db1ff6e446710794f54082fc74ef02" - integrity sha512-m0yTx9xzUtTvJpWJHqknUXUDPRnJXZYOOFNygnNIXn1PBkLsC/rkXQdquObd+M0ZPlBhGC00Jg28zG0wCl7VWg== + version "9.3.2" + resolved "https://registry.yarnpkg.com/mssql/-/mssql-9.3.2.tgz#c74777e1f1691543649cda16462dee61db5834b4" + integrity sha512-XI5GOGCCSSNL8K2SSXg9HMyugJoCjLmrhiZfcZrJrJ2r3NfTcnz3Cegeg4m+xPkNVd0o3owsSL/NsDCFYfjOlw== dependencies: - "@tediousjs/connection-string" "^0.4.1" - commander "^9.4.0" + "@tediousjs/connection-string" "^0.5.0" + commander "^11.0.0" debug "^4.3.3" rfdc "^1.3.0" tarn "^3.0.2" @@ -4758,7 +4807,7 @@ pg-types@^2.1.0: postgres-date "~1.0.4" postgres-interval "^1.1.0" -pg@^8.7.1: +pg@^8.7.3: version "8.8.0" resolved "https://registry.yarnpkg.com/pg/-/pg-8.8.0.tgz#a77f41f9d9ede7009abfca54667c775a240da686" integrity sha512-UXYN0ziKj+AeNNP7VDMwrehpACThH7LUl/p8TDFpEUuSejCUIwGSfxpHsPvtM6/WXFy6SU4E5RG4IJV/TZAGjw== @@ -5421,12 +5470,12 @@ snapdragon@^0.8.1: source-map-resolve "^0.5.0" use "^3.1.0" -snowflake-sdk@^1.5.0: - version "1.6.14" - resolved "https://registry.yarnpkg.com/snowflake-sdk/-/snowflake-sdk-1.6.14.tgz#7e1fd52df738c3cdb74277b7b2b35a40394c1941" - integrity sha512-sKg17Yz1/aydKxlA4unlprH+uw9ZsvRezdUmamLjNlvsXQsw+pok4PoMeCKtWs2OSVFnX0VO3eSacCPglQrAQA== +snowflake-sdk@^1.6.11: + version "1.6.17" + resolved "https://registry.yarnpkg.com/snowflake-sdk/-/snowflake-sdk-1.6.17.tgz#dc002b44b6545d6a036a2ce3c1f34dd4f4353c42" + integrity sha512-ui2zRPbXIK3qaW8zuQCy+mHTieN6U5yeoJmKkyhdnh/8qKOqfR9Csj5nv+bXm8Y64kG24sybtIfCzf8C9abUMQ== dependencies: - "@azure/storage-blob" "^12.5.0" + "@azure/storage-blob" "^12.11.0" "@techteamer/ocsp" "1.0.0" agent-base "^6.0.2" asn1.js-rfc2560 "^5.0.0" @@ -5446,7 +5495,7 @@ snowflake-sdk@^1.5.0: mime-types "^2.1.29" mkdirp "^1.0.3" mock-require "^3.0.3" - moment "^2.29.3" + moment "^2.29.4" moment-timezone "^0.5.15" open "^7.3.1" python-struct "^1.1.3" @@ -5874,6 +5923,11 @@ triple-beam@^1.3.0: resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.3.0.tgz#a595214c7298db8339eeeee083e4d10bd8cb8dd9" integrity sha512-XrHUvV5HpdLmIj4uVMxHggLbFSZYIn7HEWsqePZcI50pco+MPqJ50wMGY794X7AOOhxOBAjbkqfAbEe/QMp2Lw== +tslib@^1.9.3: + version "1.14.1" + resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" + integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== + tslib@^2.0.1, tslib@^2.1.0, tslib@^2.2.0: version "2.4.0" resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" @@ -6374,6 +6428,11 @@ yallist@^3.0.2: resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd" integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g== +yallist@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" + integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== + yargs-parser@20.2.4: version "20.2.4" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.4.tgz#b42890f14566796f85ae8e3a25290d205f154a54"