From 80db2de0d1e4d7478e0a7bb950c9b63fefd2840c Mon Sep 17 00:00:00 2001 From: Yi-Lin Juang Date: Fri, 2 May 2025 21:52:59 +0800 Subject: [PATCH 01/42] Prioritize current schema for pg type generation --- src/server/templates/typescript.ts | 116 ++++++++++++++++++++++------- 1 file changed, 90 insertions(+), 26 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 460887b5..6e3fc750 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -84,7 +84,7 @@ export type Database = { ${[ ...columnsByTableId[table.id].map( (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType(column.format, { + `${JSON.stringify(column.name)}: ${pgTypeToTsType(schema, column.format, { types, schemas, tables, @@ -97,7 +97,12 @@ export type Database = { const type = types.find(({ id }) => id === fn.return_type_id) let tsType = 'unknown' if (type) { - tsType = pgTypeToTsType(type.name, { types, schemas, tables, views }) + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) } return `${JSON.stringify(fn.name)}: ${tsType} | null` }), @@ -121,7 +126,12 @@ export type Database = { output += ':' } - output += pgTypeToTsType(column.format, { types, schemas, tables, views }) + output += pgTypeToTsType(schema, column.format, { + types, + schemas, + tables, + views, + }) if (column.is_nullable) { output += '| null' @@ -138,7 +148,12 @@ export type Database = { return `${output}?: never` } - output += `?: ${pgTypeToTsType(column.format, { types, schemas, tables, views })}` + output += `?: ${pgTypeToTsType(schema, column.format, { + types, + schemas, + tables, + views, + })}` if (column.is_nullable) { output += '| null' @@ -189,7 +204,7 @@ export type Database = { Row: { ${columnsByTableId[view.id].map( (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType(column.format, { + `${JSON.stringify(column.name)}: ${pgTypeToTsType(schema, column.format, { types, schemas, tables, @@ -207,7 +222,12 @@ export type Database = { return `${output}?: never` } - output += `?: ${pgTypeToTsType(column.format, { types, schemas, tables, views })} | null` + output += `?: ${pgTypeToTsType(schema, column.format, { + types, + schemas, + tables, + views, + })} | null` return output })} @@ -220,7 +240,12 @@ export type Database = { return `${output}?: never` } - output += `?: ${pgTypeToTsType(column.format, { types, schemas, tables, views })} | null` + output += `?: ${pgTypeToTsType(schema, column.format, { + types, + schemas, + tables, + views, + })} | null` return output })} @@ -290,7 +315,12 @@ export type Database = { const type = types.find(({ id }) => id === type_id) let tsType = 'unknown' if (type) { - tsType = pgTypeToTsType(type.name, { types, schemas, tables, views }) + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) } return { name, type: tsType, has_default } }) @@ -307,7 +337,12 @@ export type Database = { const type = types.find(({ id }) => id === type_id) let tsType = 'unknown' if (type) { - tsType = pgTypeToTsType(type.name, { types, schemas, tables, views }) + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) } return { name, type: tsType } }) @@ -327,12 +362,16 @@ export type Database = { return `{ ${columnsByTableId[relation.id].map( (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType(column.format, { - types, - schemas, - tables, - views, - })} ${column.is_nullable ? '| null' : ''}` + `${JSON.stringify(column.name)}: ${pgTypeToTsType( + schema, + column.format, + { + types, + schemas, + tables, + views, + } + )} ${column.is_nullable ? '| null' : ''}` )} }` } @@ -340,7 +379,12 @@ export type Database = { // Case 3: returns base/array/composite/enum type. const type = types.find(({ id }) => id === fns[0].return_type_id) if (type) { - return pgTypeToTsType(type.name, { types, schemas, tables, views }) + return pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) } return 'unknown' @@ -372,7 +416,12 @@ export type Database = { const type = types.find(({ id }) => id === type_id) let tsType = 'unknown' if (type) { - tsType = `${pgTypeToTsType(type.name, { types, schemas, tables, views })} | null` + tsType = `${pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + })} | null` } return `${JSON.stringify(name)}: ${tsType}` })} @@ -519,6 +568,7 @@ export const Constants = { // TODO: Make this more robust. Currently doesn't handle range types - returns them as unknown. const pgTypeToTsType = ( + schema: PostgresSchema, pgType: string, { types, @@ -560,10 +610,16 @@ const pgTypeToTsType = ( } else if (pgType === 'record') { return 'Record' } else if (pgType.startsWith('_')) { - return `(${pgTypeToTsType(pgType.substring(1), { types, schemas, tables, views })})[]` + return `(${pgTypeToTsType(schema, pgType.substring(1), { + types, + schemas, + tables, + views, + })})[]` } else { - const enumType = types.find((type) => type.name === pgType && type.enums.length > 0) - if (enumType) { + const enumTypes = types.filter((type) => type.name === pgType && type.enums.length > 0) + if (enumTypes.length > 0) { + const enumType = enumTypes.find((type) => type.schema === schema.name) || enumTypes[0] if (schemas.some(({ name }) => name === enumType.schema)) { return `Database[${JSON.stringify(enumType.schema)}]['Enums'][${JSON.stringify( enumType.name @@ -572,8 +628,12 @@ const pgTypeToTsType = ( return enumType.enums.map((variant) => JSON.stringify(variant)).join('|') } - const compositeType = types.find((type) => type.name === pgType && type.attributes.length > 0) - if (compositeType) { + const compositeTypes = types.filter( + (type) => type.name === pgType && type.attributes.length > 0 + ) + if (compositeTypes.length > 0) { + const compositeType = + compositeTypes.find((type) => type.schema === schema.name) || compositeTypes[0] if (schemas.some(({ name }) => name === compositeType.schema)) { return `Database[${JSON.stringify( compositeType.schema @@ -582,8 +642,10 @@ const pgTypeToTsType = ( return 'unknown' } - const tableRowType = tables.find((table) => table.name === pgType) - if (tableRowType) { + const tableRowTypes = tables.filter((table) => table.name === pgType) + if (tableRowTypes.length > 0) { + const tableRowType = + tableRowTypes.find((type) => type.schema === schema.name) || tableRowTypes[0] if (schemas.some(({ name }) => name === tableRowType.schema)) { return `Database[${JSON.stringify(tableRowType.schema)}]['Tables'][${JSON.stringify( tableRowType.name @@ -592,8 +654,10 @@ const pgTypeToTsType = ( return 'unknown' } - const viewRowType = views.find((view) => view.name === pgType) - if (viewRowType) { + const viewRowTypes = views.filter((view) => view.name === pgType) + if (viewRowTypes.length > 0) { + const viewRowType = + viewRowTypes.find((type) => type.schema === schema.name) || viewRowTypes[0] if (schemas.some(({ name }) => name === viewRowType.schema)) { return `Database[${JSON.stringify(viewRowType.schema)}]['Views'][${JSON.stringify( viewRowType.name From fbdc28c9e8c1f71599931335107d8b61a2733856 Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 13 May 2025 15:13:43 +0200 Subject: [PATCH 02/42] chore: ignore sentryclirc --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 8d050113..7a26dfc4 100644 --- a/.gitignore +++ b/.gitignore @@ -73,6 +73,9 @@ typings/ .env .env.test +# sentry cli config +.sentryclirc + # parcel-bundler cache (https://parceljs.org/) .cache From f58f5071e8ba2f47e72a23e15434ff490bad7374 Mon Sep 17 00:00:00 2001 From: avallete Date: Mon, 19 May 2025 15:50:21 +0200 Subject: [PATCH 03/42] fix(typescript): prefer current schema typescript typegen See: https://github.com/supabase/postgres-meta/commit/80db2de0d1e4d7478e0a7bb950c9b63fefd2840c Trigger new release for this fix From 54347546ac3502989193cef5cb84668ad78b37b6 Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 20 May 2025 13:11:29 +0200 Subject: [PATCH 04/42] fix(query): ensure that open connection are killed after timeout Without statement_timeout set, the query_timeout wont always kill the underlying database query connection leading to possible connections exhaustions --- package.json | 4 ++-- src/server/constants.ts | 3 +++ test/index.test.ts | 1 + test/server/query-timeout.ts | 33 +++++++++++++++++++++++++++++++++ 4 files changed, 39 insertions(+), 2 deletions(-) create mode 100644 test/server/query-timeout.ts diff --git a/package.json b/package.json index e521801c..570ada54 100644 --- a/package.json +++ b/package.json @@ -30,8 +30,8 @@ "test": "run-s db:clean db:run test:run db:clean", "db:clean": "cd test/db && docker compose down", "db:run": "cd test/db && docker compose up --detach --wait", - "test:run": "PG_META_MAX_RESULT_SIZE_MB=20 vitest run --coverage", - "test:update": "run-s db:clean db:run && PG_META_MAX_RESULT_SIZE_MB=20 vitest run --update && run-s db:clean" + "test:run": "PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=3 PG_CONN_TIMEOUT_SECS=30 vitest run --coverage", + "test:update": "run-s db:clean db:run && PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=3 PG_CONN_TIMEOUT_SECS=30 vitest run --update && run-s db:clean" }, "engines": { "node": ">=20", diff --git a/src/server/constants.ts b/src/server/constants.ts index 4d1965f9..731ca117 100644 --- a/src/server/constants.ts +++ b/src/server/constants.ts @@ -59,6 +59,9 @@ export const PG_META_MAX_RESULT_SIZE = process.env.PG_META_MAX_RESULT_SIZE_MB export const DEFAULT_POOL_CONFIG: PoolConfig = { max: 1, connectionTimeoutMillis: PG_CONN_TIMEOUT_SECS * 1000, + // node-postgrest need a statement_timeout to kill the connection when timeout is reached + // otherwise the query will keep running on the database even if query timeout was reached + statement_timeout: (PG_QUERY_TIMEOUT_SECS + 1) * 1000, query_timeout: PG_QUERY_TIMEOUT_SECS * 1000, ssl: PG_META_DB_SSL_ROOT_CERT ? { ca: PG_META_DB_SSL_ROOT_CERT } : undefined, application_name: `postgres-meta ${pkg.version}`, diff --git a/test/index.test.ts b/test/index.test.ts index 9a315921..6ca2b87e 100644 --- a/test/index.test.ts +++ b/test/index.test.ts @@ -23,3 +23,4 @@ import './server/ssl' import './server/table-privileges' import './server/typegen' import './server/result-size-limit' +import './server/query-timeout' diff --git a/test/server/query-timeout.ts b/test/server/query-timeout.ts new file mode 100644 index 00000000..c9064d00 --- /dev/null +++ b/test/server/query-timeout.ts @@ -0,0 +1,33 @@ +import { expect, test, describe } from 'vitest' +import { app } from './utils' +import { pgMeta } from '../lib/utils' + +describe('test query timeout', () => { + test('query timeout after 3s and connection cleanup', async () => { + const query = `SELECT pg_sleep(10);` + // Execute a query that will sleep for 10 seconds + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { + query, + }, + }) + + // Check that we get the proper timeout error response + expect(res.statusCode).toBe(408) // Request Timeout + expect(res.json()).toMatchObject({ + error: expect.stringContaining('Query read timeout'), + }) + // wait one second for the statement timeout to take effect + await new Promise((resolve) => setTimeout(resolve, 1000)) + + // Verify that the connection has been cleaned up by checking active connections + const connectionsRes = await pgMeta.query(` + SELECT * FROM pg_stat_activity where application_name = 'postgres-meta 0.0.0-automated' and query ILIKE '%${query}%'; + `) + + // Should have no active connections except for our current query + expect(connectionsRes.data).toHaveLength(0) + }, 5000) +}) From e41138a9c8843f9422a2eb2b926fb804cf4d1a82 Mon Sep 17 00:00:00 2001 From: avallete Date: Wed, 21 May 2025 14:55:40 +0200 Subject: [PATCH 05/42] fix: exclude pooler from statement_timeout --- src/lib/db.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/lib/db.ts b/src/lib/db.ts index 7ac18783..1b42e538 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -80,6 +80,11 @@ export const init: (config: PoolConfig) => { u.searchParams.delete('sslrootcert') config.connectionString = u.toString() + // For pooler connections like pgbouncer, statement_timeout isn't supported + if (u.port !== '5432') { + config.statement_timeout = undefined + } + // sslmode: null, 'disable', 'prefer', 'require', 'verify-ca', 'verify-full', 'no-verify' // config.ssl: true, false, {} if (sslmode === null) { From 2fa2011dbb3c690388d763abf3fdb16168fef44b Mon Sep 17 00:00:00 2001 From: avallete Date: Thu, 22 May 2025 09:21:51 +0200 Subject: [PATCH 06/42] fix: set the statement_timeout at query level --- src/lib/db.ts | 17 +++++++++-------- src/server/constants.ts | 4 +--- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/src/lib/db.ts b/src/lib/db.ts index 1b42e538..f09b0bcb 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -2,6 +2,7 @@ import pg from 'pg' import * as Sentry from '@sentry/node' import { parse as parseArray } from 'postgres-array' import { PostgresMetaResult, PoolConfig } from './types.js' +import { PG_STATEMENT_TIMEOUT_SECS } from '../server/constants.js' pg.types.setTypeParser(pg.types.builtins.INT8, (x) => { const asNumber = Number(x) @@ -80,11 +81,6 @@ export const init: (config: PoolConfig) => { u.searchParams.delete('sslrootcert') config.connectionString = u.toString() - // For pooler connections like pgbouncer, statement_timeout isn't supported - if (u.port !== '5432') { - config.statement_timeout = undefined - } - // sslmode: null, 'disable', 'prefer', 'require', 'verify-ca', 'verify-full', 'no-verify' // config.ssl: true, false, {} if (sslmode === null) { @@ -117,10 +113,15 @@ export const init: (config: PoolConfig) => { attributes: { sql: trackQueryInSentry ? sql : 'custom' }, }, async () => { + // node-postgres need a statement_timeout to kill the connection when timeout is reached + // otherwise the query will keep running on the database even if query timeout was reached + // This need to be added at query and not connection level because poolers (pgbouncer) doesn't + // allow to set this parameter at connection time + const sqlWithStatementTimeout = `SET statement_timeout='${PG_STATEMENT_TIMEOUT_SECS}s';\n${sql}` try { if (!pool) { const pool = new pg.Pool(config) - let res = await poolerQueryHandleError(pool, sql) + let res = await poolerQueryHandleError(pool, sqlWithStatementTimeout) if (Array.isArray(res)) { res = res.reverse().find((x) => x.rows.length !== 0) ?? { rows: [] } } @@ -128,7 +129,7 @@ export const init: (config: PoolConfig) => { return { data: res.rows, error: null } } - let res = await poolerQueryHandleError(pool, sql) + let res = await poolerQueryHandleError(pool, sqlWithStatementTimeout) if (Array.isArray(res)) { res = res.reverse().find((x) => x.rows.length !== 0) ?? { rows: [] } } @@ -158,7 +159,7 @@ export const init: (config: PoolConfig) => { let lineNumber = 0 let lineOffset = 0 - const lines = sql.split('\n') + const lines = sqlWithStatementTimeout.split('\n') let currentOffset = 0 for (let i = 0; i < lines.length; i++) { if (currentOffset + lines[i].length > position) { diff --git a/src/server/constants.ts b/src/server/constants.ts index 731ca117..759aa8e0 100644 --- a/src/server/constants.ts +++ b/src/server/constants.ts @@ -17,6 +17,7 @@ const PG_META_DB_SSL_MODE = process.env.PG_META_DB_SSL_MODE || 'disable' const PG_CONN_TIMEOUT_SECS = Number(process.env.PG_CONN_TIMEOUT_SECS || 15) const PG_QUERY_TIMEOUT_SECS = Number(process.env.PG_QUERY_TIMEOUT_SECS || 55) +export const PG_STATEMENT_TIMEOUT_SECS = PG_QUERY_TIMEOUT_SECS + 1 export let PG_CONNECTION = process.env.PG_META_DB_URL if (!PG_CONNECTION) { @@ -59,9 +60,6 @@ export const PG_META_MAX_RESULT_SIZE = process.env.PG_META_MAX_RESULT_SIZE_MB export const DEFAULT_POOL_CONFIG: PoolConfig = { max: 1, connectionTimeoutMillis: PG_CONN_TIMEOUT_SECS * 1000, - // node-postgrest need a statement_timeout to kill the connection when timeout is reached - // otherwise the query will keep running on the database even if query timeout was reached - statement_timeout: (PG_QUERY_TIMEOUT_SECS + 1) * 1000, query_timeout: PG_QUERY_TIMEOUT_SECS * 1000, ssl: PG_META_DB_SSL_ROOT_CERT ? { ca: PG_META_DB_SSL_ROOT_CERT } : undefined, application_name: `postgres-meta ${pkg.version}`, From 18d61e21bcf813701c1acd41c6016e687962f3b3 Mon Sep 17 00:00:00 2001 From: avallete Date: Thu, 22 May 2025 10:54:35 +0200 Subject: [PATCH 07/42] fix: use query level statement timeout --- package.json | 4 +-- src/lib/db.ts | 12 ++++++-- test/server/query-timeout.ts | 50 ++++++++++++++++++-------------- test/server/result-size-limit.ts | 16 ++++++---- 4 files changed, 50 insertions(+), 32 deletions(-) diff --git a/package.json b/package.json index 570ada54..62315e9f 100644 --- a/package.json +++ b/package.json @@ -30,8 +30,8 @@ "test": "run-s db:clean db:run test:run db:clean", "db:clean": "cd test/db && docker compose down", "db:run": "cd test/db && docker compose up --detach --wait", - "test:run": "PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=3 PG_CONN_TIMEOUT_SECS=30 vitest run --coverage", - "test:update": "run-s db:clean db:run && PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=3 PG_CONN_TIMEOUT_SECS=30 vitest run --update && run-s db:clean" + "test:run": "PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=5 PG_CONN_TIMEOUT_SECS=30 vitest run --coverage", + "test:update": "run-s db:clean db:run && PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=5 PG_CONN_TIMEOUT_SECS=30 vitest run --update && run-s db:clean" }, "engines": { "node": ">=20", diff --git a/src/lib/db.ts b/src/lib/db.ts index f09b0bcb..a1fe3591 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -4,6 +4,8 @@ import { parse as parseArray } from 'postgres-array' import { PostgresMetaResult, PoolConfig } from './types.js' import { PG_STATEMENT_TIMEOUT_SECS } from '../server/constants.js' +const STATEMENT_TIMEOUT_QUERY_PREFIX = `SET statement_timeout='${PG_STATEMENT_TIMEOUT_SECS}s';` + pg.types.setTypeParser(pg.types.builtins.INT8, (x) => { const asNumber = Number(x) if (Number.isSafeInteger(asNumber)) { @@ -117,7 +119,7 @@ export const init: (config: PoolConfig) => { // otherwise the query will keep running on the database even if query timeout was reached // This need to be added at query and not connection level because poolers (pgbouncer) doesn't // allow to set this parameter at connection time - const sqlWithStatementTimeout = `SET statement_timeout='${PG_STATEMENT_TIMEOUT_SECS}s';\n${sql}` + const sqlWithStatementTimeout = `${STATEMENT_TIMEOUT_QUERY_PREFIX}${sql}` try { if (!pool) { const pool = new pg.Pool(config) @@ -153,13 +155,17 @@ export const init: (config: PoolConfig) => { formattedError += '\n' if (error.position) { // error.position is 1-based - const position = Number(error.position) - 1 + // we also remove our `SET statement_timeout = 'XXs';\n` from the position + const position = + Number(error.position) - 1 - STATEMENT_TIMEOUT_QUERY_PREFIX.length + // we set the new error position + error.position = `${position + 1}` let line = '' let lineNumber = 0 let lineOffset = 0 - const lines = sqlWithStatementTimeout.split('\n') + const lines = sql.split('\n') let currentOffset = 0 for (let i = 0; i < lines.length; i++) { if (currentOffset + lines[i].length > position) { diff --git a/test/server/query-timeout.ts b/test/server/query-timeout.ts index c9064d00..3dc8010d 100644 --- a/test/server/query-timeout.ts +++ b/test/server/query-timeout.ts @@ -2,32 +2,38 @@ import { expect, test, describe } from 'vitest' import { app } from './utils' import { pgMeta } from '../lib/utils' +const TIMEOUT = (Number(process.env.PG_QUERY_TIMEOUT_SECS) ?? 10) + 2 + describe('test query timeout', () => { - test('query timeout after 3s and connection cleanup', async () => { - const query = `SELECT pg_sleep(10);` - // Execute a query that will sleep for 10 seconds - const res = await app.inject({ - method: 'POST', - path: '/query', - payload: { - query, - }, - }) + test( + `query timeout after ${TIMEOUT}s and connection cleanup`, + async () => { + const query = `SELECT pg_sleep(${TIMEOUT});` + // Execute a query that will sleep for 10 seconds + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { + query, + }, + }) - // Check that we get the proper timeout error response - expect(res.statusCode).toBe(408) // Request Timeout - expect(res.json()).toMatchObject({ - error: expect.stringContaining('Query read timeout'), - }) - // wait one second for the statement timeout to take effect - await new Promise((resolve) => setTimeout(resolve, 1000)) + // Check that we get the proper timeout error response + expect(res.statusCode).toBe(408) // Request Timeout + expect(res.json()).toMatchObject({ + error: expect.stringContaining('Query read timeout'), + }) + // wait one second for the statement timeout to take effect + await new Promise((resolve) => setTimeout(resolve, 1000)) - // Verify that the connection has been cleaned up by checking active connections - const connectionsRes = await pgMeta.query(` + // Verify that the connection has been cleaned up by checking active connections + const connectionsRes = await pgMeta.query(` SELECT * FROM pg_stat_activity where application_name = 'postgres-meta 0.0.0-automated' and query ILIKE '%${query}%'; `) - // Should have no active connections except for our current query - expect(connectionsRes.data).toHaveLength(0) - }, 5000) + // Should have no active connections except for our current query + expect(connectionsRes.data).toHaveLength(0) + }, + TIMEOUT * 1000 + ) }) diff --git a/test/server/result-size-limit.ts b/test/server/result-size-limit.ts index 15543d67..7dab1834 100644 --- a/test/server/result-size-limit.ts +++ b/test/server/result-size-limit.ts @@ -72,23 +72,29 @@ describe('test js parser error max result', () => { // Create a table with large data for testing beforeAll(async () => { // Create a table with a large text column - await pgMeta.query(` + await pgMeta.query( + ` CREATE TABLE very_large_data ( id SERIAL PRIMARY KEY, data TEXT ); - `) + `, + false + ) // Insert data that will exceed our limit in tests it's set around ~20MB - await pgMeta.query(` + await pgMeta.query( + ` INSERT INTO very_large_data (data) VALUES (repeat('x', 710 * 1024 * 1024)) -- 700+MB string will raise a JS exception at parse time - `) + `, + false + ) }) afterAll(async () => { // Clean up the test table - await pgMeta.query('DROP TABLE very_large_data;') + await pgMeta.query('DROP TABLE very_large_data;', false) }) test( From 4150b26d15f34073178b2362f3b47ff6a2c02cea Mon Sep 17 00:00:00 2001 From: avallete Date: Thu, 22 May 2025 11:04:15 +0200 Subject: [PATCH 08/42] chore: revert result-size test --- test/server/result-size-limit.ts | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/test/server/result-size-limit.ts b/test/server/result-size-limit.ts index 7dab1834..15543d67 100644 --- a/test/server/result-size-limit.ts +++ b/test/server/result-size-limit.ts @@ -72,29 +72,23 @@ describe('test js parser error max result', () => { // Create a table with large data for testing beforeAll(async () => { // Create a table with a large text column - await pgMeta.query( - ` + await pgMeta.query(` CREATE TABLE very_large_data ( id SERIAL PRIMARY KEY, data TEXT ); - `, - false - ) + `) // Insert data that will exceed our limit in tests it's set around ~20MB - await pgMeta.query( - ` + await pgMeta.query(` INSERT INTO very_large_data (data) VALUES (repeat('x', 710 * 1024 * 1024)) -- 700+MB string will raise a JS exception at parse time - `, - false - ) + `) }) afterAll(async () => { // Clean up the test table - await pgMeta.query('DROP TABLE very_large_data;', false) + await pgMeta.query('DROP TABLE very_large_data;') }) test( From 3d9fea49fdc8eb26815a7cf15b250336e8fd6c3e Mon Sep 17 00:00:00 2001 From: avallete Date: Thu, 22 May 2025 11:40:38 +0200 Subject: [PATCH 09/42] chore: fix secret module mock --- src/lib/secrets.ts | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/lib/secrets.ts b/src/lib/secrets.ts index 03df7639..c44578ec 100644 --- a/src/lib/secrets.ts +++ b/src/lib/secrets.ts @@ -1,6 +1,3 @@ -// Use dynamic import to support module mock -const fs = await import('node:fs/promises') - export const getSecret = async (key: string) => { if (!key) { return '' @@ -15,6 +12,8 @@ export const getSecret = async (key: string) => { if (!file) { return '' } + // Use dynamic import to support module mock + const fs = await import('node:fs/promises') return await fs.readFile(file, { encoding: 'utf8' }).catch((e) => { if (e.code == 'ENOENT') { From bf91a7e7c306723462a09349bbad8b566762ba93 Mon Sep 17 00:00:00 2001 From: avallete Date: Thu, 22 May 2025 16:01:26 +0200 Subject: [PATCH 10/42] feat(typegen): add postgrest_version parameter to typegen --- src/server/routes/generators/typescript.ts | 3 + src/server/templates/typescript.ts | 26 + test/server/typegen.ts | 653 +++++++++++++++++++++ 3 files changed, 682 insertions(+) diff --git a/src/server/routes/generators/typescript.ts b/src/server/routes/generators/typescript.ts index 3e615b32..259cd141 100644 --- a/src/server/routes/generators/typescript.ts +++ b/src/server/routes/generators/typescript.ts @@ -11,6 +11,7 @@ export default async (fastify: FastifyInstance) => { excluded_schemas?: string included_schemas?: string detect_one_to_one_relationships?: string + postgrest_version?: string } }>('/', async (request, reply) => { const config = createConnectionConfig(request) @@ -19,6 +20,7 @@ export default async (fastify: FastifyInstance) => { const includedSchemas = request.query.included_schemas?.split(',').map((schema) => schema.trim()) ?? [] const detectOneToOneRelationships = request.query.detect_one_to_one_relationships === 'true' + const postgrestVersion = request.query.postgrest_version const pgMeta: PostgresMeta = new PostgresMeta(config) const { data: generatorMeta, error: generatorMetaError } = await getGeneratorMetadata(pgMeta, { @@ -34,6 +36,7 @@ export default async (fastify: FastifyInstance) => { return applyTypescriptTemplate({ ...generatorMeta, detectOneToOneRelationships, + postgrestVersion, }) }) } diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 6e3fc750..5ebe9dc8 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -21,8 +21,10 @@ export const apply = async ({ functions, types, detectOneToOneRelationships, + postgrestVersion, }: GeneratorMetadata & { detectOneToOneRelationships: boolean + postgrestVersion?: string }): Promise => { const columnsByTableId = Object.fromEntries( [...tables, ...foreignTables, ...views, ...materializedViews].map((t) => [t.id, []]) @@ -32,6 +34,29 @@ export const apply = async ({ .sort(({ name: a }, { name: b }) => a.localeCompare(b)) .forEach((c) => columnsByTableId[c.table_id].push(c)) + const internal_supabase_schema = postgrestVersion + ? `// Allows to automatically instanciate createClient with right options + // instead of createClient(URL, KEY) + __internal_supabase: { + postgrestVersion: '${postgrestVersion}' + Tables: { + [_ in never]: never + } + Views: { + [_ in never]: never + } + Functions: { + [_ in never]: never + } + Enums: { + [_ in never]: never + } + CompositeTypes: { + [_ in never]: never + } + }` + : '' + let output = ` export type Json = string | number | boolean | null | { [key: string]: Json | undefined } | Json[] @@ -431,6 +456,7 @@ export type Database = { } }` })} + ${internal_supabase_schema} } type DefaultSchema = Database[Extract] diff --git a/test/server/typegen.ts b/test/server/typegen.ts index c0851ef1..9e534d78 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -1875,6 +1875,659 @@ test('typegen: typescript w/ one-to-one relationships', async () => { ) }) +test('typegen: typescript w/ postgrestVersion', async () => { + const { body } = await app.inject({ + method: 'GET', + path: '/generators/typescript', + query: { detect_one_to_one_relationships: 'true', postgrest_version: '13' }, + }) + expect(body).toMatchInlineSnapshot( + ` + "export type Json = + | string + | number + | boolean + | null + | { [key: string]: Json | undefined } + | Json[] + + export type Database = { + public: { + Tables: { + category: { + Row: { + id: number + name: string + } + Insert: { + id?: number + name: string + } + Update: { + id?: number + name?: string + } + Relationships: [] + } + empty: { + Row: {} + Insert: {} + Update: {} + Relationships: [] + } + foreign_table: { + Row: { + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + Insert: { + id: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Update: { + id?: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + memes: { + Row: { + category: number | null + created_at: string + id: number + metadata: Json | null + name: string + status: Database["public"]["Enums"]["meme_status"] | null + } + Insert: { + category?: number | null + created_at: string + id?: number + metadata?: Json | null + name: string + status?: Database["public"]["Enums"]["meme_status"] | null + } + Update: { + category?: number | null + created_at?: string + id?: number + metadata?: Json | null + name?: string + status?: Database["public"]["Enums"]["meme_status"] | null + } + Relationships: [ + { + foreignKeyName: "memes_category_fkey" + columns: ["category"] + isOneToOne: false + referencedRelation: "category" + referencedColumns: ["id"] + }, + ] + } + table_with_other_tables_row_type: { + Row: { + col1: Database["public"]["Tables"]["user_details"]["Row"] | null + col2: Database["public"]["Views"]["a_view"]["Row"] | null + } + Insert: { + col1?: Database["public"]["Tables"]["user_details"]["Row"] | null + col2?: Database["public"]["Views"]["a_view"]["Row"] | null + } + Update: { + col1?: Database["public"]["Tables"]["user_details"]["Row"] | null + col2?: Database["public"]["Views"]["a_view"]["Row"] | null + } + Relationships: [] + } + table_with_primary_key_other_than_id: { + Row: { + name: string | null + other_id: number + } + Insert: { + name?: string | null + other_id?: number + } + Update: { + name?: string | null + other_id?: number + } + Relationships: [] + } + todos: { + Row: { + details: string | null + id: number + "user-id": number + blurb: string | null + blurb_varchar: string | null + details_is_long: boolean | null + details_length: number | null + details_words: string[] | null + } + Insert: { + details?: string | null + id?: number + "user-id": number + } + Update: { + details?: string | null + id?: number + "user-id"?: number + } + Relationships: [ + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + user_details: { + Row: { + details: string | null + user_id: number + } + Insert: { + details?: string | null + user_id: number + } + Update: { + details?: string | null + user_id?: number + } + Relationships: [ + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + users: { + Row: { + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + Insert: { + id?: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Update: { + id?: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + users_audit: { + Row: { + created_at: string | null + id: number + previous_value: Json | null + user_id: number | null + } + Insert: { + created_at?: string | null + id?: number + previous_value?: Json | null + user_id?: number | null + } + Update: { + created_at?: string | null + id?: number + previous_value?: Json | null + user_id?: number | null + } + Relationships: [] + } + } + Views: { + a_view: { + Row: { + id: number | null + } + Insert: { + id?: number | null + } + Update: { + id?: number | null + } + Relationships: [] + } + todos_matview: { + Row: { + details: string | null + id: number | null + "user-id": number | null + } + Relationships: [ + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + todos_view: { + Row: { + details: string | null + id: number | null + "user-id": number | null + } + Insert: { + details?: string | null + id?: number | null + "user-id"?: number | null + } + Update: { + details?: string | null + id?: number | null + "user-id"?: number | null + } + Relationships: [ + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + users_view: { + Row: { + id: number | null + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + Insert: { + id?: number | null + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Update: { + id?: number | null + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + users_view_with_multiple_refs_to_users: { + Row: { + initial_id: number | null + initial_name: string | null + second_id: number | null + second_name: string | null + } + Relationships: [] + } + } + Functions: { + blurb: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: string + } + blurb_varchar: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: string + } + details_is_long: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: boolean + } + details_length: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: number + } + details_words: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: string[] + } + function_returning_row: { + Args: Record + Returns: { + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + } + function_returning_set_of_rows: { + Args: Record + Returns: { + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + }[] + } + function_returning_table: { + Args: Record + Returns: { + id: number + name: string + }[] + } + get_todos_setof_rows: { + Args: + | { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + | { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + } + get_user_audit_setof_single_row: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + created_at: string | null + id: number + previous_value: Json | null + user_id: number | null + }[] + } + polymorphic_function: { + Args: { "": boolean } | { "": string } + Returns: undefined + } + postgres_fdw_disconnect: { + Args: { "": string } + Returns: boolean + } + postgres_fdw_disconnect_all: { + Args: Record + Returns: boolean + } + postgres_fdw_get_connections: { + Args: Record + Returns: Record[] + } + postgres_fdw_handler: { + Args: Record + Returns: unknown + } + test_internal_query: { + Args: Record + Returns: undefined + } + } + Enums: { + meme_status: "new" | "old" | "retired" + user_status: "ACTIVE" | "INACTIVE" + } + CompositeTypes: { + composite_type_with_array_attribute: { + my_text_array: string[] | null + } + composite_type_with_record_attribute: { + todo: Database["public"]["Tables"]["todos"]["Row"] | null + } + } + } + // Allows to automatically instanciate createClient with right options + // instead of createClient(URL, KEY) + __internal_supabase: { + postgrestVersion: "13" + Tables: { + [_ in never]: never + } + Views: { + [_ in never]: never + } + Functions: { + [_ in never]: never + } + Enums: { + [_ in never]: never + } + CompositeTypes: { + [_ in never]: never + } + } + } + + type DefaultSchema = Database[Extract] + + export type Tables< + DefaultSchemaTableNameOrOptions extends + | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) + | { schema: keyof Database }, + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof Database + } + ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + : never = never, + > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } + ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + Row: infer R + } + ? R + : never + : DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] & + DefaultSchema["Views"]) + ? (DefaultSchema["Tables"] & + DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends { + Row: infer R + } + ? R + : never + : never + + export type TablesInsert< + DefaultSchemaTableNameOrOptions extends + | keyof DefaultSchema["Tables"] + | { schema: keyof Database }, + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof Database + } + ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + : never = never, + > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } + ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + Insert: infer I + } + ? I + : never + : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { + Insert: infer I + } + ? I + : never + : never + + export type TablesUpdate< + DefaultSchemaTableNameOrOptions extends + | keyof DefaultSchema["Tables"] + | { schema: keyof Database }, + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof Database + } + ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + : never = never, + > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } + ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + Update: infer U + } + ? U + : never + : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { + Update: infer U + } + ? U + : never + : never + + export type Enums< + DefaultSchemaEnumNameOrOptions extends + | keyof DefaultSchema["Enums"] + | { schema: keyof Database }, + EnumName extends DefaultSchemaEnumNameOrOptions extends { + schema: keyof Database + } + ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + : never = never, + > = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } + ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] + ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] + : never + + export type CompositeTypes< + PublicCompositeTypeNameOrOptions extends + | keyof DefaultSchema["CompositeTypes"] + | { schema: keyof Database }, + CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { + schema: keyof Database + } + ? keyof Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + : never = never, + > = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } + ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] + ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] + : never + + export const Constants = { + public: { + Enums: { + meme_status: ["new", "old", "retired"], + user_status: ["ACTIVE", "INACTIVE"], + }, + }, + } as const + " + ` + ) +}) + test('typegen: go', async () => { const { body } = await app.inject({ method: 'GET', path: '/generators/go' }) expect(body).toMatchInlineSnapshot(` From b45bb4431cb91fe51872af82d22ae93384ec9241 Mon Sep 17 00:00:00 2001 From: avallete Date: Thu, 22 May 2025 16:08:07 +0200 Subject: [PATCH 11/42] chore: add env parameter for cli --- src/server/constants.ts | 1 + src/server/server.ts | 2 ++ 2 files changed, 3 insertions(+) diff --git a/src/server/constants.ts b/src/server/constants.ts index 731ca117..8e6a1508 100644 --- a/src/server/constants.ts +++ b/src/server/constants.ts @@ -45,6 +45,7 @@ export const GENERATE_TYPES_DEFAULT_SCHEMA = process.env.PG_META_GENERATE_TYPES_DEFAULT_SCHEMA || 'public' export const GENERATE_TYPES_DETECT_ONE_TO_ONE_RELATIONSHIPS = process.env.PG_META_GENERATE_TYPES_DETECT_ONE_TO_ONE_RELATIONSHIPS === 'true' +export const POSTGREST_VERSION = process.env.PG_META_POSTGREST_VERSION export const GENERATE_TYPES_SWIFT_ACCESS_CONTROL = process.env .PG_META_GENERATE_TYPES_SWIFT_ACCESS_CONTROL ? (process.env.PG_META_GENERATE_TYPES_SWIFT_ACCESS_CONTROL as AccessControl) diff --git a/src/server/server.ts b/src/server/server.ts index 5396f9e0..9ac3152e 100644 --- a/src/server/server.ts +++ b/src/server/server.ts @@ -13,6 +13,7 @@ import { PG_CONNECTION, PG_META_HOST, PG_META_PORT, + POSTGREST_VERSION, } from './constants.js' import { apply as applyTypescriptTemplate } from './templates/typescript.js' import { apply as applyGoTemplate } from './templates/go.js' @@ -129,6 +130,7 @@ async function getTypeOutput(): Promise { ), types: types!, detectOneToOneRelationships: GENERATE_TYPES_DETECT_ONE_TO_ONE_RELATIONSHIPS, + postgresVersion: POSTGREST_VERSION, } switch (GENERATE_TYPES?.toLowerCase()) { From 6ca485028cc5cced9c131c5c2e108b18313f2327 Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 27 May 2025 18:18:35 +0200 Subject: [PATCH 12/42] chore: set statement_timeout as /query params --- src/lib/PostgresMeta.ts | 5 ++++- src/lib/db.ts | 21 +++++++++++++-------- src/server/routes/query.ts | 9 ++++++++- test/server/query-timeout.ts | 33 +++++++++++++++++++++++++++++++++ 4 files changed, 58 insertions(+), 10 deletions(-) diff --git a/src/lib/PostgresMeta.ts b/src/lib/PostgresMeta.ts index 379fbb23..91050383 100644 --- a/src/lib/PostgresMeta.ts +++ b/src/lib/PostgresMeta.ts @@ -22,7 +22,10 @@ import { init } from './db.js' import { PostgresMetaResult, PoolConfig } from './types.js' export default class PostgresMeta { - query: (sql: string, trackQueryInSentry?: boolean) => Promise> + query: ( + sql: string, + opts?: { statementQueryTimeout?: number; trackQueryInSentry?: boolean } + ) => Promise> end: () => Promise columnPrivileges: PostgresMetaColumnPrivileges columns: PostgresMetaColumns diff --git a/src/lib/db.ts b/src/lib/db.ts index a1fe3591..6f7e906a 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -2,9 +2,6 @@ import pg from 'pg' import * as Sentry from '@sentry/node' import { parse as parseArray } from 'postgres-array' import { PostgresMetaResult, PoolConfig } from './types.js' -import { PG_STATEMENT_TIMEOUT_SECS } from '../server/constants.js' - -const STATEMENT_TIMEOUT_QUERY_PREFIX = `SET statement_timeout='${PG_STATEMENT_TIMEOUT_SECS}s';` pg.types.setTypeParser(pg.types.builtins.INT8, (x) => { const asNumber = Number(x) @@ -65,7 +62,10 @@ const poolerQueryHandleError = (pgpool: pg.Pool, sql: string): Promise { - query: (sql: string, trackQueryInSentry?: boolean) => Promise> + query: ( + sql: string, + opts?: { statementQueryTimeout?: number; trackQueryInSentry?: boolean } + ) => Promise> end: () => Promise } = (config) => { return Sentry.startSpan({ op: 'db', name: 'db.init' }, () => { @@ -106,7 +106,10 @@ export const init: (config: PoolConfig) => { let pool: pg.Pool | null = new pg.Pool(config) return { - async query(sql, trackQueryInSentry = true) { + async query( + sql, + { statementQueryTimeout, trackQueryInSentry } = { trackQueryInSentry: true } + ) { return Sentry.startSpan( // For metrics purposes, log the query that will be run if it's not an user provided query (with possibly sentitives infos) { @@ -115,11 +118,14 @@ export const init: (config: PoolConfig) => { attributes: { sql: trackQueryInSentry ? sql : 'custom' }, }, async () => { + const statementTimeoutQueryPrefix = statementQueryTimeout + ? `SET statement_timeout='${statementQueryTimeout}s';` + : '' // node-postgres need a statement_timeout to kill the connection when timeout is reached // otherwise the query will keep running on the database even if query timeout was reached // This need to be added at query and not connection level because poolers (pgbouncer) doesn't // allow to set this parameter at connection time - const sqlWithStatementTimeout = `${STATEMENT_TIMEOUT_QUERY_PREFIX}${sql}` + const sqlWithStatementTimeout = `${statementTimeoutQueryPrefix}${sql}` try { if (!pool) { const pool = new pg.Pool(config) @@ -156,8 +162,7 @@ export const init: (config: PoolConfig) => { if (error.position) { // error.position is 1-based // we also remove our `SET statement_timeout = 'XXs';\n` from the position - const position = - Number(error.position) - 1 - STATEMENT_TIMEOUT_QUERY_PREFIX.length + const position = Number(error.position) - 1 - statementTimeoutQueryPrefix.length // we set the new error position error.position = `${position + 1}` diff --git a/src/server/routes/query.ts b/src/server/routes/query.ts index 21788ce8..c8f23bc9 100644 --- a/src/server/routes/query.ts +++ b/src/server/routes/query.ts @@ -19,11 +19,18 @@ export default async (fastify: FastifyInstance) => { Body: { query: string } + Querystring: { + statementTimeoutSecs?: number + } }>('/', async (request, reply) => { + const statementTimeoutSecs = request.query.statementTimeoutSecs errorOnEmptyQuery(request) const config = createConnectionConfig(request) const pgMeta = new PostgresMeta(config) - const { data, error } = await pgMeta.query(request.body.query, false) + const { data, error } = await pgMeta.query(request.body.query, { + trackQueryInSentry: true, + statementQueryTimeout: statementTimeoutSecs, + }) await pgMeta.end() if (error) { request.log.error({ error, request: extractRequestForLogging(request) }) diff --git a/test/server/query-timeout.ts b/test/server/query-timeout.ts index 3dc8010d..e41894fc 100644 --- a/test/server/query-timeout.ts +++ b/test/server/query-timeout.ts @@ -13,6 +13,7 @@ describe('test query timeout', () => { const res = await app.inject({ method: 'POST', path: '/query', + query: `statementTimeoutSecs=${TIMEOUT - 2}`, payload: { query, }, @@ -36,4 +37,36 @@ describe('test query timeout', () => { }, TIMEOUT * 1000 ) + + test( + 'query without timeout parameter should not have timeout', + async () => { + const query = `SELECT pg_sleep(${TIMEOUT});` + // Execute a query that will sleep for 10 seconds without specifying timeout + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { + query, + }, + }) + + // Check that we get the proper timeout error response + expect(res.statusCode).toBe(408) // Request Timeout + expect(res.json()).toMatchObject({ + error: expect.stringContaining('Query read timeout'), + }) + // wait one second + await new Promise((resolve) => setTimeout(resolve, 1000)) + + // Verify that the connection has not been cleaned up sinice there is no statementTimetout + const connectionsRes = await pgMeta.query(` + SELECT * FROM pg_stat_activity where application_name = 'postgres-meta 0.0.0-automated' and query ILIKE '%${query}%'; + `) + + // Should have no active connections except for our current query + expect(connectionsRes.data).toHaveLength(1) + }, + TIMEOUT * 1000 + ) }) From 9f728fef86288d5351f2bfa014ee71c5dbc4388e Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 27 May 2025 18:36:37 +0200 Subject: [PATCH 13/42] chore: add query timeout params test --- test/server/query-timeout.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/test/server/query-timeout.ts b/test/server/query-timeout.ts index e41894fc..47554afc 100644 --- a/test/server/query-timeout.ts +++ b/test/server/query-timeout.ts @@ -3,17 +3,18 @@ import { app } from './utils' import { pgMeta } from '../lib/utils' const TIMEOUT = (Number(process.env.PG_QUERY_TIMEOUT_SECS) ?? 10) + 2 +const STATEMENT_TIMEOUT = (Number(process.env.PG_QUERY_TIMEOUT_SECS) ?? 10) + 1 describe('test query timeout', () => { test( `query timeout after ${TIMEOUT}s and connection cleanup`, async () => { - const query = `SELECT pg_sleep(${TIMEOUT});` + const query = `SELECT pg_sleep(${TIMEOUT + 10});` // Execute a query that will sleep for 10 seconds const res = await app.inject({ method: 'POST', path: '/query', - query: `statementTimeoutSecs=${TIMEOUT - 2}`, + query: `statementTimeoutSecs=${STATEMENT_TIMEOUT}`, payload: { query, }, @@ -41,7 +42,7 @@ describe('test query timeout', () => { test( 'query without timeout parameter should not have timeout', async () => { - const query = `SELECT pg_sleep(${TIMEOUT});` + const query = `SELECT pg_sleep(${TIMEOUT + 10});` // Execute a query that will sleep for 10 seconds without specifying timeout const res = await app.inject({ method: 'POST', From f183588ba5bbd516716d3ee417ba9f4a738d230e Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 27 May 2025 18:41:58 +0200 Subject: [PATCH 14/42] chore: cleanup --- src/server/constants.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/server/constants.ts b/src/server/constants.ts index 759aa8e0..4d1965f9 100644 --- a/src/server/constants.ts +++ b/src/server/constants.ts @@ -17,7 +17,6 @@ const PG_META_DB_SSL_MODE = process.env.PG_META_DB_SSL_MODE || 'disable' const PG_CONN_TIMEOUT_SECS = Number(process.env.PG_CONN_TIMEOUT_SECS || 15) const PG_QUERY_TIMEOUT_SECS = Number(process.env.PG_QUERY_TIMEOUT_SECS || 55) -export const PG_STATEMENT_TIMEOUT_SECS = PG_QUERY_TIMEOUT_SECS + 1 export let PG_CONNECTION = process.env.PG_META_DB_URL if (!PG_CONNECTION) { From ed47fd38bd1afecc4c5083bbd0b51ff70f7d5a55 Mon Sep 17 00:00:00 2001 From: avallete Date: Wed, 18 Jun 2025 11:55:27 +0200 Subject: [PATCH 15/42] chore: fix typo --- src/server/server.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/server/server.ts b/src/server/server.ts index 9ac3152e..8b7c1c10 100644 --- a/src/server/server.ts +++ b/src/server/server.ts @@ -130,7 +130,7 @@ async function getTypeOutput(): Promise { ), types: types!, detectOneToOneRelationships: GENERATE_TYPES_DETECT_ONE_TO_ONE_RELATIONSHIPS, - postgresVersion: POSTGREST_VERSION, + postgrestVersion: POSTGREST_VERSION, } switch (GENERATE_TYPES?.toLowerCase()) { From 2d4d29e2c7acfd38f3b4b67e0542dfca6b9b1d72 Mon Sep 17 00:00:00 2001 From: Copple <10214025+kiwicopple@users.noreply.github.com> Date: Mon, 30 Jun 2025 04:56:51 +0200 Subject: [PATCH 16/42] chore: remove sponsorship ask (#954) --- README.md | 6 ------ 1 file changed, 6 deletions(-) diff --git a/README.md b/README.md index da8c787d..dd73028a 100644 --- a/README.md +++ b/README.md @@ -116,9 +116,3 @@ To use your own database connection string instead of the provided test database Apache 2.0 -## Sponsors - -We are building the features of Firebase using enterprise-grade, open source products. We support existing communities wherever possible, and if the products don’t exist we build them and open source them ourselves. - -[![New Sponsor](https://user-images.githubusercontent.com/10214025/90518111-e74bbb00-e198-11ea-8f88-c9e3c1aa4b5b.png)](https://github.com/sponsors/supabase) - From eb600b8d1b022da1aecace50753a8e7ef370d0e0 Mon Sep 17 00:00:00 2001 From: avallete Date: Wed, 2 Jul 2025 15:31:41 +0200 Subject: [PATCH 17/42] chore: use CamelCasing convention --- src/server/templates/typescript.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 5ebe9dc8..e0aff414 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -36,9 +36,9 @@ export const apply = async ({ const internal_supabase_schema = postgrestVersion ? `// Allows to automatically instanciate createClient with right options - // instead of createClient(URL, KEY) - __internal_supabase: { - postgrestVersion: '${postgrestVersion}' + // instead of createClient(URL, KEY) + __InternalSupabase: { + PostgrestVersion: '${postgrestVersion}' Tables: { [_ in never]: never } From c01b4b84bc235ff998fe58d71bfc996650c0beab Mon Sep 17 00:00:00 2001 From: avallete Date: Wed, 2 Jul 2025 15:54:40 +0200 Subject: [PATCH 18/42] chore: use CamelCase --- test/server/typegen.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 9e534d78..8c2ef629 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -2389,9 +2389,9 @@ test('typegen: typescript w/ postgrestVersion', async () => { } } // Allows to automatically instanciate createClient with right options - // instead of createClient(URL, KEY) - __internal_supabase: { - postgrestVersion: "13" + // instead of createClient(URL, KEY) + __InternalSupabase: { + PostgrestVersion: "13" Tables: { [_ in never]: never } From 26be3a2d374902960ad99469e340ab62fa00b645 Mon Sep 17 00:00:00 2001 From: Bobbie Soedirgo Date: Fri, 27 Jun 2025 18:04:39 +0800 Subject: [PATCH 19/42] chore: remove extra props from __internal_supabase --- src/server/templates/typescript.ts | 127 +++++++++++++---------------- 1 file changed, 56 insertions(+), 71 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index e0aff414..29c34c1a 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -39,21 +39,6 @@ export const apply = async ({ // instead of createClient(URL, KEY) __InternalSupabase: { PostgrestVersion: '${postgrestVersion}' - Tables: { - [_ in never]: never - } - Views: { - [_ in never]: never - } - Functions: { - [_ in never]: never - } - Enums: { - [_ in never]: never - } - CompositeTypes: { - [_ in never]: never - } }` : '' @@ -61,6 +46,7 @@ export const apply = async ({ export type Json = string | number | boolean | null | { [key: string]: Json | undefined } | Json[] export type Database = { + ${internal_supabase_schema} ${schemas .sort(({ name: a }, { name: b }) => a.localeCompare(b)) .map((schema) => { @@ -456,113 +442,112 @@ export type Database = { } }` })} - ${internal_supabase_schema} } -type DefaultSchema = Database[Extract] +type DatabaseWithoutInternals = Omit + +type DefaultSchema = DatabaseWithoutInternals[Extract] export type Tables< DefaultSchemaTableNameOrOptions extends | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) - : never = never, -> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + : never = never +> = DefaultSchemaTableNameOrOptions extends { schema: keyof DatabaseWithoutInternals } + ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { Row: infer R } ? R : never - : DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] & - DefaultSchema["Views"]) - ? (DefaultSchema["Tables"] & - DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends { - Row: infer R - } - ? R - : never + : DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) + ? (DefaultSchema["Tables"] & DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends { + Row: infer R + } + ? R : never + : never export type TablesInsert< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] - : never = never, -> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + : never = never +> = DefaultSchemaTableNameOrOptions extends { schema: keyof DatabaseWithoutInternals } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Insert: infer I } ? I : never : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] - ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { - Insert: infer I - } - ? I - : never + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { + Insert: infer I + } + ? I : never + : never export type TablesUpdate< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] - : never = never, -> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + : never = never +> = DefaultSchemaTableNameOrOptions extends { schema: keyof DatabaseWithoutInternals } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Update: infer U } ? U : never : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] - ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { - Update: infer U - } - ? U - : never + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { + Update: infer U + } + ? U : never + : never export type Enums< DefaultSchemaEnumNameOrOptions extends | keyof DefaultSchema["Enums"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, EnumName extends DefaultSchemaEnumNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] - : never = never, -> = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + : never = never +> = DefaultSchemaEnumNameOrOptions extends { schema: keyof DatabaseWithoutInternals } + ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] - ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] - : never + ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] + : never export type CompositeTypes< PublicCompositeTypeNameOrOptions extends | keyof DefaultSchema["CompositeTypes"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] - : never = never, -> = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } - ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + : never = never +> = PublicCompositeTypeNameOrOptions extends { schema: keyof DatabaseWithoutInternals } + ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] - ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] - : never + ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] + : never export const Constants = { ${schemas From be5a28ddfba748e6ec9323efd058a0234c73321a Mon Sep 17 00:00:00 2001 From: avallete Date: Wed, 2 Jul 2025 16:10:27 +0200 Subject: [PATCH 20/42] chore: update test snapshot --- test/server/typegen.ts | 297 +++++++++++++++++++++++------------------ 1 file changed, 165 insertions(+), 132 deletions(-) diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 8c2ef629..fa47cbec 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -491,21 +491,25 @@ test('typegen: typescript', async () => { } } - type DefaultSchema = Database[Extract] + type DatabaseWithoutInternals = Omit + + type DefaultSchema = DatabaseWithoutInternals[Extract] export type Tables< DefaultSchemaTableNameOrOptions extends | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { Row: infer R } ? R @@ -523,14 +527,16 @@ test('typegen: typescript', async () => { export type TablesInsert< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Insert: infer I } ? I @@ -546,14 +552,16 @@ test('typegen: typescript', async () => { export type TablesUpdate< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Update: infer U } ? U @@ -569,14 +577,16 @@ test('typegen: typescript', async () => { export type Enums< DefaultSchemaEnumNameOrOptions extends | keyof DefaultSchema["Enums"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, EnumName extends DefaultSchemaEnumNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] : never = never, - > = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + > = DefaultSchemaEnumNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] : never @@ -584,14 +594,16 @@ test('typegen: typescript', async () => { export type CompositeTypes< PublicCompositeTypeNameOrOptions extends | keyof DefaultSchema["CompositeTypes"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] : never = never, - > = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } - ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + > = PublicCompositeTypeNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] : never @@ -1124,21 +1136,25 @@ test('typegen w/ one-to-one relationships', async () => { } } - type DefaultSchema = Database[Extract] + type DatabaseWithoutInternals = Omit + + type DefaultSchema = DatabaseWithoutInternals[Extract] export type Tables< DefaultSchemaTableNameOrOptions extends | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { Row: infer R } ? R @@ -1156,14 +1172,16 @@ test('typegen w/ one-to-one relationships', async () => { export type TablesInsert< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Insert: infer I } ? I @@ -1179,14 +1197,16 @@ test('typegen w/ one-to-one relationships', async () => { export type TablesUpdate< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Update: infer U } ? U @@ -1202,14 +1222,16 @@ test('typegen w/ one-to-one relationships', async () => { export type Enums< DefaultSchemaEnumNameOrOptions extends | keyof DefaultSchema["Enums"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, EnumName extends DefaultSchemaEnumNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] : never = never, - > = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + > = DefaultSchemaEnumNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] : never @@ -1217,14 +1239,16 @@ test('typegen w/ one-to-one relationships', async () => { export type CompositeTypes< PublicCompositeTypeNameOrOptions extends | keyof DefaultSchema["CompositeTypes"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] : never = never, - > = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } - ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + > = PublicCompositeTypeNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] : never @@ -1757,21 +1781,25 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } } - type DefaultSchema = Database[Extract] + type DatabaseWithoutInternals = Omit + + type DefaultSchema = DatabaseWithoutInternals[Extract] export type Tables< DefaultSchemaTableNameOrOptions extends | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { Row: infer R } ? R @@ -1789,14 +1817,16 @@ test('typegen: typescript w/ one-to-one relationships', async () => { export type TablesInsert< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Insert: infer I } ? I @@ -1812,14 +1842,16 @@ test('typegen: typescript w/ one-to-one relationships', async () => { export type TablesUpdate< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Update: infer U } ? U @@ -1835,14 +1867,16 @@ test('typegen: typescript w/ one-to-one relationships', async () => { export type Enums< DefaultSchemaEnumNameOrOptions extends | keyof DefaultSchema["Enums"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, EnumName extends DefaultSchemaEnumNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] : never = never, - > = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + > = DefaultSchemaEnumNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] : never @@ -1850,14 +1884,16 @@ test('typegen: typescript w/ one-to-one relationships', async () => { export type CompositeTypes< PublicCompositeTypeNameOrOptions extends | keyof DefaultSchema["CompositeTypes"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] : never = never, - > = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } - ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + > = PublicCompositeTypeNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] : never @@ -1892,6 +1928,11 @@ test('typegen: typescript w/ postgrestVersion', async () => { | Json[] export type Database = { + // Allows to automatically instanciate createClient with right options + // instead of createClient(URL, KEY) + __InternalSupabase: { + PostgrestVersion: "13" + } public: { Tables: { category: { @@ -2388,43 +2429,27 @@ test('typegen: typescript w/ postgrestVersion', async () => { } } } - // Allows to automatically instanciate createClient with right options - // instead of createClient(URL, KEY) - __InternalSupabase: { - PostgrestVersion: "13" - Tables: { - [_ in never]: never - } - Views: { - [_ in never]: never - } - Functions: { - [_ in never]: never - } - Enums: { - [_ in never]: never - } - CompositeTypes: { - [_ in never]: never - } - } } - type DefaultSchema = Database[Extract] + type DatabaseWithoutInternals = Omit + + type DefaultSchema = DatabaseWithoutInternals[Extract] export type Tables< DefaultSchemaTableNameOrOptions extends | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { Row: infer R } ? R @@ -2442,14 +2467,16 @@ test('typegen: typescript w/ postgrestVersion', async () => { export type TablesInsert< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Insert: infer I } ? I @@ -2465,14 +2492,16 @@ test('typegen: typescript w/ postgrestVersion', async () => { export type TablesUpdate< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Update: infer U } ? U @@ -2488,14 +2517,16 @@ test('typegen: typescript w/ postgrestVersion', async () => { export type Enums< DefaultSchemaEnumNameOrOptions extends | keyof DefaultSchema["Enums"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, EnumName extends DefaultSchemaEnumNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] : never = never, - > = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + > = DefaultSchemaEnumNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] : never @@ -2503,14 +2534,16 @@ test('typegen: typescript w/ postgrestVersion', async () => { export type CompositeTypes< PublicCompositeTypeNameOrOptions extends | keyof DefaultSchema["CompositeTypes"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] : never = never, - > = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } - ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + > = PublicCompositeTypeNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] : never From 5744206a7aa0bd70001100ab6cc2376cca5d32c7 Mon Sep 17 00:00:00 2001 From: avallete Date: Wed, 2 Jul 2025 17:22:32 +0200 Subject: [PATCH 21/42] feat(query): add idle_session_timeout for idle session auto-close --- src/lib/db.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/lib/db.ts b/src/lib/db.ts index 6f7e906a..263be4d8 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -118,8 +118,10 @@ export const init: (config: PoolConfig) => { attributes: { sql: trackQueryInSentry ? sql : 'custom' }, }, async () => { + // Use statement_timeout AND idle_session_timeout to ensure the connection will be killed even if idle after + // timeout time. const statementTimeoutQueryPrefix = statementQueryTimeout - ? `SET statement_timeout='${statementQueryTimeout}s';` + ? `SET statement_timeout='${statementQueryTimeout}s'; SET idle_session_timeout='${statementQueryTimeout}s';` : '' // node-postgres need a statement_timeout to kill the connection when timeout is reached // otherwise the query will keep running on the database even if query timeout was reached From 003391e58afdb4afeebcf82d3d886d99ec50240a Mon Sep 17 00:00:00 2001 From: "Siddharth M. Bhatia" Date: Fri, 11 Jul 2025 20:05:48 -0700 Subject: [PATCH 22/42] fix: Add 'case' to list of Swift keywords (#956) --- src/server/templates/swift.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/server/templates/swift.ts b/src/server/templates/swift.ts index fee24297..e596610e 100644 --- a/src/server/templates/swift.ts +++ b/src/server/templates/swift.ts @@ -392,7 +392,7 @@ function formatForSwiftTypeName(name: string): string { ) } -const SWIFT_KEYWORDS = ['in', 'default'] +const SWIFT_KEYWORDS = ['in', 'default', 'case'] /** * Converts a Postgres name to pascalCase. From dd3e9adb08a68ddbcae2e87e181866e4bd839dc8 Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 22 Jul 2025 12:11:34 +0200 Subject: [PATCH 23/42] fix(server): bump bodyLimit to 3MB default allow parameterize --- src/server/app.ts | 9 +++++++-- src/server/constants.ts | 5 +++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/server/app.ts b/src/server/app.ts index 8efa733c..9df05341 100644 --- a/src/server/app.ts +++ b/src/server/app.ts @@ -3,14 +3,19 @@ import * as Sentry from '@sentry/node' import cors from '@fastify/cors' import swagger from '@fastify/swagger' import { fastify, FastifyInstance, FastifyServerOptions } from 'fastify' -import { PG_META_REQ_HEADER } from './constants.js' +import { PG_META_REQ_HEADER, MAX_BODY_LIMIT } from './constants.js' import routes from './routes/index.js' import { extractRequestForLogging } from './utils.js' // Pseudo package declared only for this module import pkg from '#package.json' with { type: 'json' } export const build = (opts: FastifyServerOptions = {}): FastifyInstance => { - const app = fastify({ disableRequestLogging: true, requestIdHeader: PG_META_REQ_HEADER, ...opts }) + const app = fastify({ + disableRequestLogging: true, + requestIdHeader: PG_META_REQ_HEADER, + bodyLimit: MAX_BODY_LIMIT, + ...opts, + }) Sentry.setupFastifyErrorHandler(app) app.setErrorHandler((error, request, reply) => { diff --git a/src/server/constants.ts b/src/server/constants.ts index 8bf66417..9354c59f 100644 --- a/src/server/constants.ts +++ b/src/server/constants.ts @@ -57,6 +57,11 @@ export const PG_META_MAX_RESULT_SIZE = process.env.PG_META_MAX_RESULT_SIZE_MB parseInt(process.env.PG_META_MAX_RESULT_SIZE_MB, 10) * 1024 * 1024 : 2 * 1024 * 1024 * 1024 // default to 2GB max query size result +export const MAX_BODY_LIMIT = process.env.PG_META_MAX_BODY_LIMIT_MB + ? // Fastify server max body size allowed, is in bytes, convert from MB to Bytes + parseInt(process.env.PG_META_MAX_BODY_LIMIT_MB, 10) * 1024 * 1024 + : 3 * 1024 * 1024 + export const DEFAULT_POOL_CONFIG: PoolConfig = { max: 1, connectionTimeoutMillis: PG_CONN_TIMEOUT_SECS * 1000, From e583d438a70a99d11c91d71fcf633a8721a3fe9a Mon Sep 17 00:00:00 2001 From: Josh O'Steen <4296435+im-jersh@users.noreply.github.com> Date: Mon, 28 Jul 2025 07:47:25 -0700 Subject: [PATCH 24/42] fix(typegen): Map postgres numeric type to Swift Decimal type (#960) --- src/server/templates/swift.ts | 2 + test/db/00-init.sql | 3 +- test/lib/tables.ts | 18 ++ test/server/query.ts | 2 + test/server/typegen.ts | 436 +++++++++++++++++++--------------- 5 files changed, 268 insertions(+), 193 deletions(-) diff --git a/src/server/templates/swift.ts b/src/server/templates/swift.ts index e596610e..7bb41207 100644 --- a/src/server/templates/swift.ts +++ b/src/server/templates/swift.ts @@ -309,6 +309,8 @@ const pgTypeToSwiftType = ( swiftType = 'Float' } else if (pgType === 'float8') { swiftType = 'Double' + } else if (['numeric', 'decimal'].includes(pgType)) { + swiftType = 'Decimal' } else if (pgType === 'uuid') { swiftType = 'UUID' } else if ( diff --git a/test/db/00-init.sql b/test/db/00-init.sql index 00c6a472..3551a4e7 100644 --- a/test/db/00-init.sql +++ b/test/db/00-init.sql @@ -8,7 +8,8 @@ CREATE TYPE composite_type_with_array_attribute AS (my_text_array text[]); CREATE TABLE public.users ( id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, name text, - status user_status DEFAULT 'ACTIVE' + status user_status DEFAULT 'ACTIVE', + decimal numeric ); INSERT INTO public.users (name) diff --git a/test/lib/tables.ts b/test/lib/tables.ts index c4c934e7..c35546b8 100644 --- a/test/lib/tables.ts +++ b/test/lib/tables.ts @@ -78,6 +78,24 @@ test('list', async () => { "schema": "public", "table": "users", }, + { + "check": null, + "comment": null, + "data_type": "numeric", + "default_value": null, + "enums": [], + "format": "numeric", + "identity_generation": null, + "is_generated": false, + "is_identity": false, + "is_nullable": true, + "is_unique": false, + "is_updatable": true, + "name": "decimal", + "ordinal_position": 4, + "schema": "public", + "table": "users", + }, { "check": null, "comment": null, diff --git a/test/server/query.ts b/test/server/query.ts index 2b4bc2ba..8a9d6076 100644 --- a/test/server/query.ts +++ b/test/server/query.ts @@ -10,11 +10,13 @@ test('query', async () => { expect(res.json()).toMatchInlineSnapshot(` [ { + "decimal": null, "id": 1, "name": "Joe Bloggs", "status": "ACTIVE", }, { + "decimal": null, "id": 2, "name": "Jane Doe", "status": "ACTIVE", diff --git a/test/server/typegen.ts b/test/server/typegen.ts index fa47cbec..87996416 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -221,16 +221,19 @@ test('typegen: typescript', async () => { } users: { Row: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -362,16 +365,19 @@ test('typegen: typescript', async () => { } users_view: { Row: { + decimal: number | null id: number | null name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -412,6 +418,7 @@ test('typegen: typescript', async () => { function_returning_row: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -420,6 +427,7 @@ test('typegen: typescript', async () => { function_returning_set_of_rows: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -856,16 +864,19 @@ test('typegen w/ one-to-one relationships', async () => { } users: { Row: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -1007,16 +1018,19 @@ test('typegen w/ one-to-one relationships', async () => { } users_view: { Row: { + decimal: number | null id: number | null name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -1057,6 +1071,7 @@ test('typegen w/ one-to-one relationships', async () => { function_returning_row: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -1065,6 +1080,7 @@ test('typegen w/ one-to-one relationships', async () => { function_returning_set_of_rows: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -1501,16 +1517,19 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } users: { Row: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -1652,16 +1671,19 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } users_view: { Row: { + decimal: number | null id: number | null name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -1702,6 +1724,7 @@ test('typegen: typescript w/ one-to-one relationships', async () => { function_returning_row: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -1710,6 +1733,7 @@ test('typegen: typescript w/ one-to-one relationships', async () => { function_returning_set_of_rows: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -2151,16 +2175,19 @@ test('typegen: typescript w/ postgrestVersion', async () => { } users: { Row: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -2302,16 +2329,19 @@ test('typegen: typescript w/ postgrestVersion', async () => { } users_view: { Row: { + decimal: number | null id: number | null name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -2352,6 +2382,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { function_returning_row: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -2360,6 +2391,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { function_returning_set_of_rows: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -2566,198 +2598,202 @@ test('typegen: go', async () => { expect(body).toMatchInlineSnapshot(` "package database -type PublicUsersSelect struct { - Id int64 \`json:"id"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicUsersInsert struct { - Id *int64 \`json:"id"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicUsersUpdate struct { - Id *int64 \`json:"id"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicTodosSelect struct { - Details *string \`json:"details"\` - Id int64 \`json:"id"\` - UserId int64 \`json:"user-id"\` -} - -type PublicTodosInsert struct { - Details *string \`json:"details"\` - Id *int64 \`json:"id"\` - UserId int64 \`json:"user-id"\` -} - -type PublicTodosUpdate struct { - Details *string \`json:"details"\` - Id *int64 \`json:"id"\` - UserId *int64 \`json:"user-id"\` -} - -type PublicUsersAuditSelect struct { - CreatedAt *string \`json:"created_at"\` - Id int64 \`json:"id"\` - PreviousValue interface{} \`json:"previous_value"\` - UserId *int64 \`json:"user_id"\` -} - -type PublicUsersAuditInsert struct { - CreatedAt *string \`json:"created_at"\` - Id *int64 \`json:"id"\` - PreviousValue interface{} \`json:"previous_value"\` - UserId *int64 \`json:"user_id"\` -} - -type PublicUsersAuditUpdate struct { - CreatedAt *string \`json:"created_at"\` - Id *int64 \`json:"id"\` - PreviousValue interface{} \`json:"previous_value"\` - UserId *int64 \`json:"user_id"\` -} - -type PublicUserDetailsSelect struct { - Details *string \`json:"details"\` - UserId int64 \`json:"user_id"\` -} - -type PublicUserDetailsInsert struct { - Details *string \`json:"details"\` - UserId int64 \`json:"user_id"\` -} - -type PublicUserDetailsUpdate struct { - Details *string \`json:"details"\` - UserId *int64 \`json:"user_id"\` -} - -type PublicEmptySelect struct { - -} - -type PublicEmptyInsert struct { - -} - -type PublicEmptyUpdate struct { - -} - -type PublicTableWithOtherTablesRowTypeSelect struct { - Col1 interface{} \`json:"col1"\` - Col2 interface{} \`json:"col2"\` -} - -type PublicTableWithOtherTablesRowTypeInsert struct { - Col1 interface{} \`json:"col1"\` - Col2 interface{} \`json:"col2"\` -} - -type PublicTableWithOtherTablesRowTypeUpdate struct { - Col1 interface{} \`json:"col1"\` - Col2 interface{} \`json:"col2"\` -} - -type PublicTableWithPrimaryKeyOtherThanIdSelect struct { - Name *string \`json:"name"\` - OtherId int64 \`json:"other_id"\` -} - -type PublicTableWithPrimaryKeyOtherThanIdInsert struct { - Name *string \`json:"name"\` - OtherId *int64 \`json:"other_id"\` -} - -type PublicTableWithPrimaryKeyOtherThanIdUpdate struct { - Name *string \`json:"name"\` - OtherId *int64 \`json:"other_id"\` -} - -type PublicCategorySelect struct { - Id int32 \`json:"id"\` - Name string \`json:"name"\` -} - -type PublicCategoryInsert struct { - Id *int32 \`json:"id"\` - Name string \`json:"name"\` -} - -type PublicCategoryUpdate struct { - Id *int32 \`json:"id"\` - Name *string \`json:"name"\` -} - -type PublicMemesSelect struct { - Category *int32 \`json:"category"\` - CreatedAt string \`json:"created_at"\` - Id int32 \`json:"id"\` - Metadata interface{} \`json:"metadata"\` - Name string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicMemesInsert struct { - Category *int32 \`json:"category"\` - CreatedAt string \`json:"created_at"\` - Id *int32 \`json:"id"\` - Metadata interface{} \`json:"metadata"\` - Name string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicMemesUpdate struct { - Category *int32 \`json:"category"\` - CreatedAt *string \`json:"created_at"\` - Id *int32 \`json:"id"\` - Metadata interface{} \`json:"metadata"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicTodosViewSelect struct { - Details *string \`json:"details"\` - Id *int64 \`json:"id"\` - UserId *int64 \`json:"user-id"\` -} - -type PublicUsersViewSelect struct { - Id *int64 \`json:"id"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicAViewSelect struct { - Id *int64 \`json:"id"\` -} - -type PublicUsersViewWithMultipleRefsToUsersSelect struct { - InitialId *int64 \`json:"initial_id"\` - InitialName *string \`json:"initial_name"\` - SecondId *int64 \`json:"second_id"\` - SecondName *string \`json:"second_name"\` -} - -type PublicTodosMatviewSelect struct { - Details *string \`json:"details"\` - Id *int64 \`json:"id"\` - UserId *int64 \`json:"user-id"\` -} - -type PublicCompositeTypeWithArrayAttribute struct { - MyTextArray interface{} \`json:"my_text_array"\` -} - -type PublicCompositeTypeWithRecordAttribute struct { - Todo interface{} \`json:"todo"\` -}" + type PublicUsersSelect struct { + Decimal *float64 \`json:"decimal"\` + Id int64 \`json:"id"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicUsersInsert struct { + Decimal *float64 \`json:"decimal"\` + Id *int64 \`json:"id"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicUsersUpdate struct { + Decimal *float64 \`json:"decimal"\` + Id *int64 \`json:"id"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicTodosSelect struct { + Details *string \`json:"details"\` + Id int64 \`json:"id"\` + UserId int64 \`json:"user-id"\` + } + + type PublicTodosInsert struct { + Details *string \`json:"details"\` + Id *int64 \`json:"id"\` + UserId int64 \`json:"user-id"\` + } + + type PublicTodosUpdate struct { + Details *string \`json:"details"\` + Id *int64 \`json:"id"\` + UserId *int64 \`json:"user-id"\` + } + + type PublicUsersAuditSelect struct { + CreatedAt *string \`json:"created_at"\` + Id int64 \`json:"id"\` + PreviousValue interface{} \`json:"previous_value"\` + UserId *int64 \`json:"user_id"\` + } + + type PublicUsersAuditInsert struct { + CreatedAt *string \`json:"created_at"\` + Id *int64 \`json:"id"\` + PreviousValue interface{} \`json:"previous_value"\` + UserId *int64 \`json:"user_id"\` + } + + type PublicUsersAuditUpdate struct { + CreatedAt *string \`json:"created_at"\` + Id *int64 \`json:"id"\` + PreviousValue interface{} \`json:"previous_value"\` + UserId *int64 \`json:"user_id"\` + } + + type PublicUserDetailsSelect struct { + Details *string \`json:"details"\` + UserId int64 \`json:"user_id"\` + } + + type PublicUserDetailsInsert struct { + Details *string \`json:"details"\` + UserId int64 \`json:"user_id"\` + } + + type PublicUserDetailsUpdate struct { + Details *string \`json:"details"\` + UserId *int64 \`json:"user_id"\` + } + + type PublicEmptySelect struct { + + } + + type PublicEmptyInsert struct { + + } + + type PublicEmptyUpdate struct { + + } + + type PublicTableWithOtherTablesRowTypeSelect struct { + Col1 interface{} \`json:"col1"\` + Col2 interface{} \`json:"col2"\` + } + + type PublicTableWithOtherTablesRowTypeInsert struct { + Col1 interface{} \`json:"col1"\` + Col2 interface{} \`json:"col2"\` + } + + type PublicTableWithOtherTablesRowTypeUpdate struct { + Col1 interface{} \`json:"col1"\` + Col2 interface{} \`json:"col2"\` + } + + type PublicTableWithPrimaryKeyOtherThanIdSelect struct { + Name *string \`json:"name"\` + OtherId int64 \`json:"other_id"\` + } + + type PublicTableWithPrimaryKeyOtherThanIdInsert struct { + Name *string \`json:"name"\` + OtherId *int64 \`json:"other_id"\` + } + + type PublicTableWithPrimaryKeyOtherThanIdUpdate struct { + Name *string \`json:"name"\` + OtherId *int64 \`json:"other_id"\` + } + + type PublicCategorySelect struct { + Id int32 \`json:"id"\` + Name string \`json:"name"\` + } + + type PublicCategoryInsert struct { + Id *int32 \`json:"id"\` + Name string \`json:"name"\` + } + + type PublicCategoryUpdate struct { + Id *int32 \`json:"id"\` + Name *string \`json:"name"\` + } + + type PublicMemesSelect struct { + Category *int32 \`json:"category"\` + CreatedAt string \`json:"created_at"\` + Id int32 \`json:"id"\` + Metadata interface{} \`json:"metadata"\` + Name string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicMemesInsert struct { + Category *int32 \`json:"category"\` + CreatedAt string \`json:"created_at"\` + Id *int32 \`json:"id"\` + Metadata interface{} \`json:"metadata"\` + Name string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicMemesUpdate struct { + Category *int32 \`json:"category"\` + CreatedAt *string \`json:"created_at"\` + Id *int32 \`json:"id"\` + Metadata interface{} \`json:"metadata"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicTodosViewSelect struct { + Details *string \`json:"details"\` + Id *int64 \`json:"id"\` + UserId *int64 \`json:"user-id"\` + } + + type PublicUsersViewSelect struct { + Decimal *float64 \`json:"decimal"\` + Id *int64 \`json:"id"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicAViewSelect struct { + Id *int64 \`json:"id"\` + } + + type PublicUsersViewWithMultipleRefsToUsersSelect struct { + InitialId *int64 \`json:"initial_id"\` + InitialName *string \`json:"initial_name"\` + SecondId *int64 \`json:"second_id"\` + SecondName *string \`json:"second_name"\` + } + + type PublicTodosMatviewSelect struct { + Details *string \`json:"details"\` + Id *int64 \`json:"id"\` + UserId *int64 \`json:"user-id"\` + } + + type PublicCompositeTypeWithArrayAttribute struct { + MyTextArray interface{} \`json:"my_text_array"\` + } + + type PublicCompositeTypeWithRecordAttribute struct { + Todo interface{} \`json:"todo"\` + }" `) }) @@ -2991,30 +3027,36 @@ test('typegen: swift', async () => { } } internal struct UsersSelect: Codable, Hashable, Sendable, Identifiable { + internal let decimal: Decimal? internal let id: Int64 internal let name: String? internal let status: UserStatus? internal enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" } } internal struct UsersInsert: Codable, Hashable, Sendable, Identifiable { + internal let decimal: Decimal? internal let id: Int64? internal let name: String? internal let status: UserStatus? internal enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" } } internal struct UsersUpdate: Codable, Hashable, Sendable, Identifiable { + internal let decimal: Decimal? internal let id: Int64? internal let name: String? internal let status: UserStatus? internal enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" @@ -3083,10 +3125,12 @@ test('typegen: swift', async () => { } } internal struct UsersViewSelect: Codable, Hashable, Sendable { + internal let decimal: Decimal? internal let id: Int64? internal let name: String? internal let status: UserStatus? internal enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" @@ -3354,30 +3398,36 @@ test('typegen: swift w/ public access control', async () => { } } public struct UsersSelect: Codable, Hashable, Sendable, Identifiable { + public let decimal: Decimal? public let id: Int64 public let name: String? public let status: UserStatus? public enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" } } public struct UsersInsert: Codable, Hashable, Sendable, Identifiable { + public let decimal: Decimal? public let id: Int64? public let name: String? public let status: UserStatus? public enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" } } public struct UsersUpdate: Codable, Hashable, Sendable, Identifiable { + public let decimal: Decimal? public let id: Int64? public let name: String? public let status: UserStatus? public enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" @@ -3446,10 +3496,12 @@ test('typegen: swift w/ public access control', async () => { } } public struct UsersViewSelect: Codable, Hashable, Sendable { + public let decimal: Decimal? public let id: Int64? public let name: String? public let status: UserStatus? public enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" From c626519d3a38b6adcea96e7941f4dda00e41be91 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Wed, 30 Jul 2025 18:15:10 +0200 Subject: [PATCH 25/42] fix(typegen): improve ts typegen consistency between generation (#964) Fixes: #959 --- src/server/templates/typescript.ts | 4 +- test/server/typegen.ts | 920 +++++++++-------------------- 2 files changed, 270 insertions(+), 654 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 29c34c1a..f8e6e7ca 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -316,7 +316,9 @@ export type Database = { `${JSON.stringify(fnName)}: { Args: ${fns .map(({ args }) => { - const inArgs = args.filter(({ mode }) => mode === 'in') + const inArgs = args + .toSorted((a, b) => a.name.localeCompare(b.name)) + .filter(({ mode }) => mode === 'in') if (inArgs.length === 0) { return 'Record' diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 87996416..9a03ea9e 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -629,659 +629,6 @@ test('typegen: typescript', async () => { ) }) -test('typegen w/ one-to-one relationships', async () => { - const { body } = await app.inject({ - method: 'GET', - path: '/generators/typescript', - query: { detect_one_to_one_relationships: 'true' }, - }) - expect(body).toMatchInlineSnapshot( - ` - "export type Json = - | string - | number - | boolean - | null - | { [key: string]: Json | undefined } - | Json[] - - export type Database = { - public: { - Tables: { - category: { - Row: { - id: number - name: string - } - Insert: { - id?: number - name: string - } - Update: { - id?: number - name?: string - } - Relationships: [] - } - empty: { - Row: {} - Insert: {} - Update: {} - Relationships: [] - } - foreign_table: { - Row: { - id: number - name: string | null - status: Database["public"]["Enums"]["user_status"] | null - } - Insert: { - id: number - name?: string | null - status?: Database["public"]["Enums"]["user_status"] | null - } - Update: { - id?: number - name?: string | null - status?: Database["public"]["Enums"]["user_status"] | null - } - Relationships: [] - } - memes: { - Row: { - category: number | null - created_at: string - id: number - metadata: Json | null - name: string - status: Database["public"]["Enums"]["meme_status"] | null - } - Insert: { - category?: number | null - created_at: string - id?: number - metadata?: Json | null - name: string - status?: Database["public"]["Enums"]["meme_status"] | null - } - Update: { - category?: number | null - created_at?: string - id?: number - metadata?: Json | null - name?: string - status?: Database["public"]["Enums"]["meme_status"] | null - } - Relationships: [ - { - foreignKeyName: "memes_category_fkey" - columns: ["category"] - isOneToOne: false - referencedRelation: "category" - referencedColumns: ["id"] - }, - ] - } - table_with_other_tables_row_type: { - Row: { - col1: Database["public"]["Tables"]["user_details"]["Row"] | null - col2: Database["public"]["Views"]["a_view"]["Row"] | null - } - Insert: { - col1?: Database["public"]["Tables"]["user_details"]["Row"] | null - col2?: Database["public"]["Views"]["a_view"]["Row"] | null - } - Update: { - col1?: Database["public"]["Tables"]["user_details"]["Row"] | null - col2?: Database["public"]["Views"]["a_view"]["Row"] | null - } - Relationships: [] - } - table_with_primary_key_other_than_id: { - Row: { - name: string | null - other_id: number - } - Insert: { - name?: string | null - other_id?: number - } - Update: { - name?: string | null - other_id?: number - } - Relationships: [] - } - todos: { - Row: { - details: string | null - id: number - "user-id": number - blurb: string | null - blurb_varchar: string | null - details_is_long: boolean | null - details_length: number | null - details_words: string[] | null - } - Insert: { - details?: string | null - id?: number - "user-id": number - } - Update: { - details?: string | null - id?: number - "user-id"?: number - } - Relationships: [ - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "a_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["initial_id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["second_id"] - }, - ] - } - user_details: { - Row: { - details: string | null - user_id: number - } - Insert: { - details?: string | null - user_id: number - } - Update: { - details?: string | null - user_id?: number - } - Relationships: [ - { - foreignKeyName: "user_details_user_id_fkey" - columns: ["user_id"] - isOneToOne: true - referencedRelation: "a_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "user_details_user_id_fkey" - columns: ["user_id"] - isOneToOne: true - referencedRelation: "users" - referencedColumns: ["id"] - }, - { - foreignKeyName: "user_details_user_id_fkey" - columns: ["user_id"] - isOneToOne: true - referencedRelation: "users_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "user_details_user_id_fkey" - columns: ["user_id"] - isOneToOne: true - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["initial_id"] - }, - { - foreignKeyName: "user_details_user_id_fkey" - columns: ["user_id"] - isOneToOne: true - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["second_id"] - }, - ] - } - users: { - Row: { - decimal: number | null - id: number - name: string | null - status: Database["public"]["Enums"]["user_status"] | null - } - Insert: { - decimal?: number | null - id?: number - name?: string | null - status?: Database["public"]["Enums"]["user_status"] | null - } - Update: { - decimal?: number | null - id?: number - name?: string | null - status?: Database["public"]["Enums"]["user_status"] | null - } - Relationships: [] - } - users_audit: { - Row: { - created_at: string | null - id: number - previous_value: Json | null - user_id: number | null - } - Insert: { - created_at?: string | null - id?: number - previous_value?: Json | null - user_id?: number | null - } - Update: { - created_at?: string | null - id?: number - previous_value?: Json | null - user_id?: number | null - } - Relationships: [] - } - } - Views: { - a_view: { - Row: { - id: number | null - } - Insert: { - id?: number | null - } - Update: { - id?: number | null - } - Relationships: [] - } - todos_matview: { - Row: { - details: string | null - id: number | null - "user-id": number | null - } - Relationships: [ - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "a_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["initial_id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["second_id"] - }, - ] - } - todos_view: { - Row: { - details: string | null - id: number | null - "user-id": number | null - } - Insert: { - details?: string | null - id?: number | null - "user-id"?: number | null - } - Update: { - details?: string | null - id?: number | null - "user-id"?: number | null - } - Relationships: [ - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "a_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["initial_id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["second_id"] - }, - ] - } - users_view: { - Row: { - decimal: number | null - id: number | null - name: string | null - status: Database["public"]["Enums"]["user_status"] | null - } - Insert: { - decimal?: number | null - id?: number | null - name?: string | null - status?: Database["public"]["Enums"]["user_status"] | null - } - Update: { - decimal?: number | null - id?: number | null - name?: string | null - status?: Database["public"]["Enums"]["user_status"] | null - } - Relationships: [] - } - users_view_with_multiple_refs_to_users: { - Row: { - initial_id: number | null - initial_name: string | null - second_id: number | null - second_name: string | null - } - Relationships: [] - } - } - Functions: { - blurb: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string - } - blurb_varchar: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string - } - details_is_long: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: boolean - } - details_length: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: number - } - details_words: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string[] - } - function_returning_row: { - Args: Record - Returns: { - decimal: number | null - id: number - name: string | null - status: Database["public"]["Enums"]["user_status"] | null - } - } - function_returning_set_of_rows: { - Args: Record - Returns: { - decimal: number | null - id: number - name: string | null - status: Database["public"]["Enums"]["user_status"] | null - }[] - } - function_returning_table: { - Args: Record - Returns: { - id: number - name: string - }[] - } - get_todos_setof_rows: { - Args: - | { todo_row: Database["public"]["Tables"]["todos"]["Row"] } - | { user_row: Database["public"]["Tables"]["users"]["Row"] } - Returns: { - details: string | null - id: number - "user-id": number - }[] - } - get_user_audit_setof_single_row: { - Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } - Returns: { - created_at: string | null - id: number - previous_value: Json | null - user_id: number | null - }[] - } - polymorphic_function: { - Args: { "": boolean } | { "": string } - Returns: undefined - } - postgres_fdw_disconnect: { - Args: { "": string } - Returns: boolean - } - postgres_fdw_disconnect_all: { - Args: Record - Returns: boolean - } - postgres_fdw_get_connections: { - Args: Record - Returns: Record[] - } - postgres_fdw_handler: { - Args: Record - Returns: unknown - } - test_internal_query: { - Args: Record - Returns: undefined - } - } - Enums: { - meme_status: "new" | "old" | "retired" - user_status: "ACTIVE" | "INACTIVE" - } - CompositeTypes: { - composite_type_with_array_attribute: { - my_text_array: string[] | null - } - composite_type_with_record_attribute: { - todo: Database["public"]["Tables"]["todos"]["Row"] | null - } - } - } - } - - type DatabaseWithoutInternals = Omit - - type DefaultSchema = DatabaseWithoutInternals[Extract] - - export type Tables< - DefaultSchemaTableNameOrOptions extends - | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) - | { schema: keyof DatabaseWithoutInternals }, - TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) - : never = never, - > = DefaultSchemaTableNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { - Row: infer R - } - ? R - : never - : DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] & - DefaultSchema["Views"]) - ? (DefaultSchema["Tables"] & - DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends { - Row: infer R - } - ? R - : never - : never - - export type TablesInsert< - DefaultSchemaTableNameOrOptions extends - | keyof DefaultSchema["Tables"] - | { schema: keyof DatabaseWithoutInternals }, - TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] - : never = never, - > = DefaultSchemaTableNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { - Insert: infer I - } - ? I - : never - : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] - ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { - Insert: infer I - } - ? I - : never - : never - - export type TablesUpdate< - DefaultSchemaTableNameOrOptions extends - | keyof DefaultSchema["Tables"] - | { schema: keyof DatabaseWithoutInternals }, - TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] - : never = never, - > = DefaultSchemaTableNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { - Update: infer U - } - ? U - : never - : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] - ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { - Update: infer U - } - ? U - : never - : never - - export type Enums< - DefaultSchemaEnumNameOrOptions extends - | keyof DefaultSchema["Enums"] - | { schema: keyof DatabaseWithoutInternals }, - EnumName extends DefaultSchemaEnumNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] - : never = never, - > = DefaultSchemaEnumNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] - : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] - ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] - : never - - export type CompositeTypes< - PublicCompositeTypeNameOrOptions extends - | keyof DefaultSchema["CompositeTypes"] - | { schema: keyof DatabaseWithoutInternals }, - CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] - : never = never, - > = PublicCompositeTypeNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] - : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] - ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] - : never - - export const Constants = { - public: { - Enums: { - meme_status: ["new", "old", "retired"], - user_status: ["ACTIVE", "INACTIVE"], - }, - }, - } as const - " - ` - ) -}) - test('typegen: typescript w/ one-to-one relationships', async () => { const { body } = await app.inject({ method: 'GET', @@ -2593,6 +1940,273 @@ test('typegen: typescript w/ postgrestVersion', async () => { ) }) +test('typegen: typescript consistent types definitions orders', async () => { + // Helper function to clean up test entities + const cleanupTestEntities = async () => { + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + -- Drop materialized views first (depend on views/tables) + DROP MATERIALIZED VIEW IF EXISTS test_matview_alpha CASCADE; + DROP MATERIALIZED VIEW IF EXISTS test_matview_beta CASCADE; + DROP MATERIALIZED VIEW IF EXISTS test_matview_gamma CASCADE; + + -- Drop views (may depend on tables) + DROP VIEW IF EXISTS test_view_alpha CASCADE; + DROP VIEW IF EXISTS test_view_beta CASCADE; + DROP VIEW IF EXISTS test_view_gamma CASCADE; + + -- Drop functions + DROP FUNCTION IF EXISTS test_func_alpha(integer, text, boolean) CASCADE; + DROP FUNCTION IF EXISTS test_func_beta(integer, text, boolean) CASCADE; + DROP FUNCTION IF EXISTS test_func_gamma(integer, text, boolean) CASCADE; + + -- Alternative signatures for functions (different parameter orders) + DROP FUNCTION IF EXISTS test_func_alpha(text, boolean, integer) CASCADE; + DROP FUNCTION IF EXISTS test_func_beta(boolean, integer, text) CASCADE; + DROP FUNCTION IF EXISTS test_func_gamma(boolean, text, integer) CASCADE; + + -- Drop tables + DROP TABLE IF EXISTS test_table_alpha CASCADE; + DROP TABLE IF EXISTS test_table_beta CASCADE; + DROP TABLE IF EXISTS test_table_gamma CASCADE; + + -- Drop types + DROP TYPE IF EXISTS test_enum_alpha CASCADE; + DROP TYPE IF EXISTS test_enum_beta CASCADE; + `, + }, + }) + } + + // Clean up any existing test entities + await cleanupTestEntities() + + // === FIRST ROUND: Create entities in order A->B->G with property order 1 === + + // Create custom types first + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE TYPE test_enum_alpha AS ENUM ('active', 'inactive', 'pending'); + CREATE TYPE test_enum_beta AS ENUM ('high', 'medium', 'low'); + `, + }, + }) + + // Create tables in order: alpha, beta, gamma with specific column orders + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE TABLE test_table_alpha ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + status test_enum_alpha DEFAULT 'active', + created_at TIMESTAMP DEFAULT NOW() + ); + + CREATE TABLE test_table_beta ( + id SERIAL PRIMARY KEY, + priority test_enum_beta DEFAULT 'medium', + description TEXT, + alpha_id INTEGER REFERENCES test_table_alpha(id) + ); + + CREATE TABLE test_table_gamma ( + id SERIAL PRIMARY KEY, + beta_id INTEGER REFERENCES test_table_beta(id), + value NUMERIC(10,2), + is_active BOOLEAN DEFAULT true + ); + `, + }, + }) + + // Create functions in order: alpha, beta, gamma with specific parameter orders + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE FUNCTION test_func_alpha(param_a integer, param_b text, param_c boolean) + RETURNS integer AS 'SELECT param_a + 1' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_beta(param_a integer, param_b text, param_c boolean) + RETURNS text AS 'SELECT param_b || ''_processed''' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_gamma(param_a integer, param_b text, param_c boolean) + RETURNS boolean AS 'SELECT NOT param_c' LANGUAGE sql IMMUTABLE; + `, + }, + }) + + // Create views in order: alpha, beta, gamma + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE VIEW test_view_alpha AS + SELECT id, name, status, created_at FROM test_table_alpha; + + CREATE VIEW test_view_beta AS + SELECT id, priority, description, alpha_id FROM test_table_beta; + + CREATE VIEW test_view_gamma AS + SELECT id, beta_id, value, is_active FROM test_table_gamma; + `, + }, + }) + + // Create materialized views in order: alpha, beta, gamma + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE MATERIALIZED VIEW test_matview_alpha AS + SELECT id, name, status FROM test_table_alpha; + + CREATE MATERIALIZED VIEW test_matview_beta AS + SELECT id, priority, description FROM test_table_beta; + + CREATE MATERIALIZED VIEW test_matview_gamma AS + SELECT id, value, is_active FROM test_table_gamma; + `, + }, + }) + + // Generate types for first configuration + const { body: firstCall } = await app.inject({ + method: 'GET', + path: '/generators/typescript', + query: { detect_one_to_one_relationships: 'true', postgrest_version: '13' }, + }) + + // === SECOND ROUND: Drop and recreate in reverse order G->B->A with different property orders === + + // Clean up all test entities + await cleanupTestEntities() + + // Create custom types in reverse order but keep the enum internal ordering (typegen is rightfully dependent on the enum order) + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE TYPE test_enum_beta AS ENUM ('high', 'medium', 'low'); + CREATE TYPE test_enum_alpha AS ENUM ('active', 'inactive', 'pending'); + `, + }, + }) + + // Create tables in reverse order: gamma, beta, alpha with different column orders + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE TABLE test_table_alpha ( + created_at TIMESTAMP DEFAULT NOW(), + status test_enum_alpha DEFAULT 'active', + name TEXT NOT NULL, + id SERIAL PRIMARY KEY + ); + + CREATE TABLE test_table_beta ( + alpha_id INTEGER REFERENCES test_table_alpha(id), + description TEXT, + priority test_enum_beta DEFAULT 'medium', + id SERIAL PRIMARY KEY + ); + + CREATE TABLE test_table_gamma ( + is_active BOOLEAN DEFAULT true, + value NUMERIC(10,2), + beta_id INTEGER REFERENCES test_table_beta(id), + id SERIAL PRIMARY KEY + ); + `, + }, + }) + + // Create functions in reverse order: gamma, beta, alpha with different parameter orders + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE FUNCTION test_func_gamma(param_c boolean, param_a integer, param_b text) + RETURNS boolean AS 'SELECT NOT param_c' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_beta(param_b text, param_c boolean, param_a integer) + RETURNS text AS 'SELECT param_b || ''_processed''' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_alpha(param_c boolean, param_b text, param_a integer) + RETURNS integer AS 'SELECT param_a + 1' LANGUAGE sql IMMUTABLE; + `, + }, + }) + + // Create views in reverse order: gamma, beta, alpha + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE VIEW test_view_gamma AS + SELECT is_active, value, beta_id, id FROM test_table_gamma; + + CREATE VIEW test_view_beta AS + SELECT alpha_id, description, priority, id FROM test_table_beta; + + CREATE VIEW test_view_alpha AS + SELECT created_at, status, name, id FROM test_table_alpha; + `, + }, + }) + + // Create materialized views in reverse order: gamma, beta, alpha + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE MATERIALIZED VIEW test_matview_gamma AS + SELECT is_active, value, id FROM test_table_gamma; + + CREATE MATERIALIZED VIEW test_matview_beta AS + SELECT description, priority, id FROM test_table_beta; + + CREATE MATERIALIZED VIEW test_matview_alpha AS + SELECT status, name, id FROM test_table_alpha; + `, + }, + }) + + // Generate types for second configuration + const { body: secondCall } = await app.inject({ + method: 'GET', + path: '/generators/typescript', + query: { detect_one_to_one_relationships: 'true', postgrest_version: '13' }, + }) + + // Clean up test entities + await cleanupTestEntities() + + // The generated types should be identical regardless of: + // 1. Entity creation order (alpha->beta->gamma vs gamma->beta->alpha) + // 2. Property declaration order (columns, function parameters) + // 3. Enum value order + expect(firstCall).toEqual(secondCall) +}) + test('typegen: go', async () => { const { body } = await app.inject({ method: 'GET', path: '/generators/go' }) expect(body).toMatchInlineSnapshot(` From a142d7334b9e0f6326c1573b6ba264c4c7d4f56b Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Wed, 30 Jul 2025 19:24:40 -0400 Subject: [PATCH 26/42] chore(ts-template): correct "instanciate" to "instantiate" misspelling (#965) Fix spelling mistake in TypeScript generation template that was propagating to all generated database type files. --- src/server/templates/typescript.ts | 2 +- test/server/typegen.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index f8e6e7ca..c3cae645 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -35,7 +35,7 @@ export const apply = async ({ .forEach((c) => columnsByTableId[c.table_id].push(c)) const internal_supabase_schema = postgrestVersion - ? `// Allows to automatically instanciate createClient with right options + ? `// Allows to automatically instantiate createClient with right options // instead of createClient(URL, KEY) __InternalSupabase: { PostgrestVersion: '${postgrestVersion}' diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 9a03ea9e..76ac6218 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -1299,7 +1299,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { | Json[] export type Database = { - // Allows to automatically instanciate createClient with right options + // Allows to automatically instantiate createClient with right options // instead of createClient(URL, KEY) __InternalSupabase: { PostgrestVersion: "13" From 60397be39f3aabdb7fa0c253da986498a0eed308 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Mon, 4 Aug 2025 12:44:42 +0200 Subject: [PATCH 27/42] fix(typegen): ensure determinism in functions returns properties (#970) Fixes #959 --- src/server/templates/typescript.ts | 34 ++++++++++++++++-------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index c3cae645..4f9cac03 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -361,9 +361,9 @@ export type Database = { }) return `{ - ${argsNameAndType.map( - ({ name, type }) => `${JSON.stringify(name)}: ${type}` - )} + ${argsNameAndType + .toSorted((a, b) => a.name.localeCompare(b.name)) + .map(({ name, type }) => `${JSON.stringify(name)}: ${type}`)} }` } @@ -373,19 +373,21 @@ export type Database = { ) if (relation) { return `{ - ${columnsByTableId[relation.id].map( - (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType( - schema, - column.format, - { - types, - schemas, - tables, - views, - } - )} ${column.is_nullable ? '| null' : ''}` - )} + ${columnsByTableId[relation.id] + .toSorted((a, b) => a.name.localeCompare(b.name)) + .map( + (column) => + `${JSON.stringify(column.name)}: ${pgTypeToTsType( + schema, + column.format, + { + types, + schemas, + tables, + views, + } + )} ${column.is_nullable ? '| null' : ''}` + )} }` } From 4205b26dd47998172b4d6f4b3af7394b8d8beb86 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 06:36:23 +0000 Subject: [PATCH 28/42] chore(deps): bump actions/checkout from 4 to 5 Bumps [actions/checkout](https://github.com/actions/checkout) from 4 to 5. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/checkout dependency-version: '5' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 6 +++--- .github/workflows/docs.yml | 2 +- .github/workflows/publish-deps.yml | 2 +- .github/workflows/release.yml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ac0d7539..c531213f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,7 +20,7 @@ jobs: name: Test runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-node@v4 with: @@ -40,7 +40,7 @@ jobs: name: Prettier check runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Setup node uses: actions/setup-node@v4 @@ -64,7 +64,7 @@ jobs: contents: read packages: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 name: Checkout Repo - uses: docker/setup-buildx-action@v3 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index d3645502..3630904a 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -20,7 +20,7 @@ jobs: name: Publish docs runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-node@v4 with: diff --git a/.github/workflows/publish-deps.yml b/.github/workflows/publish-deps.yml index 7e50ecf5..693a3edd 100644 --- a/.github/workflows/publish-deps.yml +++ b/.github/workflows/publish-deps.yml @@ -13,7 +13,7 @@ jobs: # Must match glibc verison in node:20 runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: repository: 'pyramation/libpg-query-node' ref: 'v15' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 686752cd..5afcd6ee 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -19,7 +19,7 @@ jobs: permissions: contents: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-node@v4 with: From 28cd3e6780353c1ba39da774c94ea91b9b3477bb Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Wed, 3 Sep 2025 08:52:07 +0200 Subject: [PATCH 29/42] perf: suboptimal tables query schema filter happens late (#980) * wip: optimize queries * wip: refactor queries for root filtering * perf: add root level filtering to all queries * feat: add functions args retrieval * chore: fix functions * fix(table): use or for table relationships * test(functions): add tests for retrival argument based --- src/lib/PostgresMetaColumnPrivileges.ts | 37 ++------- src/lib/PostgresMetaColumns.ts | 38 +++------- src/lib/PostgresMetaConfig.ts | 10 +-- src/lib/PostgresMetaExtensions.ts | 14 ++-- src/lib/PostgresMetaForeignTables.ts | 53 +++++++------ src/lib/PostgresMetaFunctions.ts | 76 ++++--------------- src/lib/PostgresMetaIndexes.ts | 33 ++------ src/lib/PostgresMetaMaterializedViews.ts | 67 ++++++++-------- src/lib/PostgresMetaPolicies.ts | 28 +++---- src/lib/PostgresMetaPublications.ts | 21 +++-- src/lib/PostgresMetaRelationships.ts | 40 ++++++---- src/lib/PostgresMetaRoles.ts | 36 ++------- src/lib/PostgresMetaSchemas.ts | 33 ++++---- src/lib/PostgresMetaTablePrivileges.ts | 53 +++---------- src/lib/PostgresMetaTables.ts | 53 ++++++++----- src/lib/PostgresMetaTriggers.ts | 37 +++------ src/lib/PostgresMetaTypes.ts | 38 +--------- src/lib/PostgresMetaVersion.ts | 4 +- src/lib/PostgresMetaViews.ts | 67 ++++++++-------- src/lib/generators.ts | 26 +++++-- src/lib/helpers.ts | 13 +++- ...rivileges.sql => column_privileges.sql.ts} | 20 ++++- src/lib/sql/{columns.sql => columns.sql.ts} | 18 +++++ src/lib/sql/common.ts | 17 +++++ src/lib/sql/{config.sql => config.sql.ts} | 6 ++ src/lib/sql/extensions.sql | 10 --- src/lib/sql/extensions.sql.ts | 19 +++++ src/lib/sql/foreign_tables.sql | 10 --- src/lib/sql/foreign_tables.sql.ts | 25 ++++++ .../sql/{functions.sql => functions.sql.ts} | 46 ++++++++++- src/lib/sql/index.ts | 34 --------- src/lib/sql/{indexes.sql => indexes.sql.ts} | 11 ++- src/lib/sql/materialized_views.sql | 11 --- src/lib/sql/materialized_views.sql.ts | 24 ++++++ src/lib/sql/{policies.sql => policies.sql.ts} | 12 +++ .../{publications.sql => publications.sql.ts} | 11 +++ src/lib/sql/{roles.sql => roles.sql.ts} | 17 +++++ src/lib/sql/schemas.sql | 17 ----- src/lib/sql/schemas.sql.ts | 27 +++++++ src/lib/sql/{tables.sql => table.sql.ts} | 15 ++++ ...privileges.sql => table_privileges.sql.ts} | 15 +++- ...onships.sql => table_relationships.sql.ts} | 7 ++ src/lib/sql/{triggers.sql => triggers.sql.ts} | 22 +++++- src/lib/sql/types.sql | 35 --------- src/lib/sql/types.sql.ts | 72 ++++++++++++++++++ src/lib/sql/{version.sql => version.sql.ts} | 2 + src/lib/sql/views.sql | 12 --- src/lib/sql/views.sql.ts | 25 ++++++ ...cies.sql => views_key_dependencies.sql.ts} | 42 +++++----- src/server/templates/typescript.ts | 4 +- test/lib/functions.ts | 49 ++++++++++++ test/lib/tables.ts | 30 ++++---- 52 files changed, 788 insertions(+), 654 deletions(-) rename src/lib/sql/{column_privileges.sql => column_privileges.sql.ts} (88%) rename src/lib/sql/{columns.sql => columns.sql.ts} (80%) create mode 100644 src/lib/sql/common.ts rename src/lib/sql/{config.sql => config.sql.ts} (57%) delete mode 100644 src/lib/sql/extensions.sql create mode 100644 src/lib/sql/extensions.sql.ts delete mode 100644 src/lib/sql/foreign_tables.sql create mode 100644 src/lib/sql/foreign_tables.sql.ts rename src/lib/sql/{functions.sql => functions.sql.ts} (70%) delete mode 100644 src/lib/sql/index.ts rename src/lib/sql/{indexes.sql => indexes.sql.ts} (79%) delete mode 100644 src/lib/sql/materialized_views.sql create mode 100644 src/lib/sql/materialized_views.sql.ts rename src/lib/sql/{policies.sql => policies.sql.ts} (66%) rename src/lib/sql/{publications.sql => publications.sql.ts} (67%) rename src/lib/sql/{roles.sql => roles.sql.ts} (52%) delete mode 100644 src/lib/sql/schemas.sql create mode 100644 src/lib/sql/schemas.sql.ts rename src/lib/sql/{tables.sql => table.sql.ts} (73%) rename src/lib/sql/{table_privileges.sql => table_privileges.sql.ts} (74%) rename src/lib/sql/{table_relationships.sql => table_relationships.sql.ts} (80%) rename src/lib/sql/{triggers.sql => triggers.sql.ts} (62%) delete mode 100644 src/lib/sql/types.sql create mode 100644 src/lib/sql/types.sql.ts rename src/lib/sql/{version.sql => version.sql.ts} (84%) delete mode 100644 src/lib/sql/views.sql create mode 100644 src/lib/sql/views.sql.ts rename src/lib/sql/{views_key_dependencies.sql => views_key_dependencies.sql.ts} (78%) diff --git a/src/lib/PostgresMetaColumnPrivileges.ts b/src/lib/PostgresMetaColumnPrivileges.ts index 4df0d39a..b2a0b6fe 100644 --- a/src/lib/PostgresMetaColumnPrivileges.ts +++ b/src/lib/PostgresMetaColumnPrivileges.ts @@ -1,7 +1,7 @@ import { ident, literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { filterByList } from './helpers.js' -import { columnPrivilegesSql } from './sql/index.js' +import { filterByValue, filterByList } from './helpers.js' +import { COLUMN_PRIVILEGES_SQL } from './sql/column_privileges.sql.js' import { PostgresMetaResult, PostgresColumnPrivileges, @@ -29,25 +29,12 @@ export default class PostgresMetaColumnPrivileges { limit?: number offset?: number } = {}): Promise> { - let sql = ` -with column_privileges as (${columnPrivilegesSql}) -select * -from column_privileges -` - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` where relation_schema ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const sql = COLUMN_PRIVILEGES_SQL({ schemaFilter, limit, offset }) return await this.query(sql) } @@ -86,12 +73,8 @@ end $$; // Return the updated column privileges for modified columns. const columnIds = [...new Set(grants.map(({ column_id }) => column_id))] - sql = ` -with column_privileges as (${columnPrivilegesSql}) -select * -from column_privileges -where column_id in (${columnIds.map(literal).join(',')}) -` + const columnIdsFilter = filterByValue(columnIds) + sql = COLUMN_PRIVILEGES_SQL({ columnIdsFilter }) return await this.query(sql) } @@ -130,12 +113,8 @@ end $$; // Return the updated column privileges for modified columns. const columnIds = [...new Set(revokes.map(({ column_id }) => column_id))] - sql = ` -with column_privileges as (${columnPrivilegesSql}) -select * -from column_privileges -where column_id in (${columnIds.map(literal).join(',')}) -` + const columnIdsFilter = filterByValue(columnIds) + sql = COLUMN_PRIVILEGES_SQL({ columnIdsFilter }) return await this.query(sql) } } diff --git a/src/lib/PostgresMetaColumns.ts b/src/lib/PostgresMetaColumns.ts index 15e56507..613c8ea2 100644 --- a/src/lib/PostgresMetaColumns.ts +++ b/src/lib/PostgresMetaColumns.ts @@ -1,9 +1,9 @@ import { ident, literal } from 'pg-format' import PostgresMetaTables from './PostgresMetaTables.js' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { columnsSql } from './sql/index.js' import { PostgresMetaResult, PostgresColumn } from './types.js' -import { filterByList } from './helpers.js' +import { filterByValue, filterByList } from './helpers.js' +import { COLUMNS_SQL } from './sql/columns.sql.js' export default class PostgresMetaColumns { query: (sql: string) => Promise> @@ -29,32 +29,13 @@ export default class PostgresMetaColumns { limit?: number offset?: number } = {}): Promise> { - let sql = ` -WITH - columns AS (${columnsSql}) -SELECT - * -FROM - columns -WHERE - true` - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` AND schema ${filter}` - } - if (tableId !== undefined) { - sql += ` AND table_id = ${literal(tableId)}` - } - if (limit) { - sql += ` LIMIT ${limit}` - } - if (offset) { - sql += ` OFFSET ${offset}` - } + const tableIdFilter = tableId ? filterByValue([`${tableId}`]) : undefined + const sql = COLUMNS_SQL({ schemaFilter, tableIdFilter, limit, offset }) return await this.query(sql) } @@ -79,6 +60,7 @@ WHERE table?: string schema?: string }): Promise> { + const schemaFilter = schema ? filterByList([schema], []) : undefined if (id) { const regexp = /^(\d+)\.(\d+)$/ if (!regexp.test(id)) { @@ -86,7 +68,8 @@ WHERE } const matches = id.match(regexp) as RegExpMatchArray const [tableId, ordinalPos] = matches.slice(1).map(Number) - const sql = `${columnsSql} AND c.oid = ${tableId} AND a.attnum = ${ordinalPos};` + const idsFilter = filterByValue([`${tableId}.${ordinalPos}`]) + const sql = COLUMNS_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -96,9 +79,8 @@ WHERE return { data: data[0], error } } } else if (name && table) { - const sql = `${columnsSql} AND a.attname = ${literal(name)} AND c.relname = ${literal( - table - )} AND nc.nspname = ${literal(schema)};` + const columnNameFilter = filterByValue([`${table}.${name}`]) + const sql = `${COLUMNS_SQL({ schemaFilter, columnNameFilter })};` const { data, error } = await this.query(sql) if (error) { return { data, error } diff --git a/src/lib/PostgresMetaConfig.ts b/src/lib/PostgresMetaConfig.ts index d362641b..35b194d8 100644 --- a/src/lib/PostgresMetaConfig.ts +++ b/src/lib/PostgresMetaConfig.ts @@ -1,4 +1,4 @@ -import { configSql } from './sql/index.js' +import { CONFIG_SQL } from './sql/config.sql.js' import { PostgresMetaResult, PostgresConfig } from './types.js' export default class PostgresMetaConfig { @@ -15,13 +15,7 @@ export default class PostgresMetaConfig { limit?: number offset?: number } = {}): Promise> { - let sql = configSql - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + const sql = CONFIG_SQL({ limit, offset }) return await this.query(sql) } } diff --git a/src/lib/PostgresMetaExtensions.ts b/src/lib/PostgresMetaExtensions.ts index 4589057f..9543dc2c 100644 --- a/src/lib/PostgresMetaExtensions.ts +++ b/src/lib/PostgresMetaExtensions.ts @@ -1,6 +1,7 @@ import { ident, literal } from 'pg-format' -import { extensionsSql } from './sql/index.js' import { PostgresMetaResult, PostgresExtension } from './types.js' +import { EXTENSIONS_SQL } from './sql/extensions.sql.js' +import { filterByValue } from './helpers.js' export default class PostgresMetaExtensions { query: (sql: string) => Promise> @@ -16,18 +17,13 @@ export default class PostgresMetaExtensions { limit?: number offset?: number } = {}): Promise> { - let sql = extensionsSql - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + const sql = EXTENSIONS_SQL({ limit, offset }) return await this.query(sql) } async retrieve({ name }: { name: string }): Promise> { - const sql = `${extensionsSql} WHERE name = ${literal(name)};` + const nameFilter = filterByValue([name]) + const sql = EXTENSIONS_SQL({ nameFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } diff --git a/src/lib/PostgresMetaForeignTables.ts b/src/lib/PostgresMetaForeignTables.ts index 40ed859f..e565da43 100644 --- a/src/lib/PostgresMetaForeignTables.ts +++ b/src/lib/PostgresMetaForeignTables.ts @@ -1,7 +1,7 @@ -import { literal } from 'pg-format' -import { coalesceRowsToArray, filterByList } from './helpers.js' -import { columnsSql, foreignTablesSql } from './sql/index.js' +import { coalesceRowsToArray, filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresForeignTable } from './types.js' +import { FOREIGN_TABLES_SQL } from './sql/foreign_tables.sql.js' +import { COLUMNS_SQL } from './sql/columns.sql.js' export default class PostgresMetaForeignTables { query: (sql: string) => Promise> @@ -37,17 +37,8 @@ export default class PostgresMetaForeignTables { offset?: number includeColumns?: boolean } = {}): Promise> { - let sql = generateEnrichedForeignTablesSql({ includeColumns }) - const filter = filterByList(includedSchemas, excludedSchemas) - if (filter) { - sql += ` where schema ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const schemaFilter = filterByList(includedSchemas, excludedSchemas) + const sql = generateEnrichedForeignTablesSql({ includeColumns, schemaFilter, limit, offset }) return await this.query(sql) } @@ -69,9 +60,11 @@ export default class PostgresMetaForeignTables { schema?: string }): Promise> { if (id) { - const sql = `${generateEnrichedForeignTablesSql({ + const idsFilter = filterByValue([`${id}`]) + const sql = generateEnrichedForeignTablesSql({ includeColumns: true, - })} where foreign_tables.id = ${literal(id)};` + idsFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -81,11 +74,11 @@ export default class PostgresMetaForeignTables { return { data: data[0], error } } } else if (name) { - const sql = `${generateEnrichedForeignTablesSql({ + const nameFilter = filterByValue([`${schema}.${name}`]) + const sql = generateEnrichedForeignTablesSql({ includeColumns: true, - })} where foreign_tables.name = ${literal(name)} and foreign_tables.schema = ${literal( - schema - )};` + tableIdentifierFilter: nameFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -103,9 +96,23 @@ export default class PostgresMetaForeignTables { } } -const generateEnrichedForeignTablesSql = ({ includeColumns }: { includeColumns: boolean }) => ` -with foreign_tables as (${foreignTablesSql}) - ${includeColumns ? `, columns as (${columnsSql})` : ''} +const generateEnrichedForeignTablesSql = ({ + includeColumns, + schemaFilter, + idsFilter, + tableIdentifierFilter, + limit, + offset, +}: { + includeColumns: boolean + schemaFilter?: string + idsFilter?: string + tableIdentifierFilter?: string + limit?: number + offset?: number +}) => ` +with foreign_tables as (${FOREIGN_TABLES_SQL({ schemaFilter, tableIdentifierFilter, limit, offset })}) + ${includeColumns ? `, columns as (${COLUMNS_SQL({ schemaFilter, tableIdentifierFilter, tableIdFilter: idsFilter })})` : ''} select * ${ diff --git a/src/lib/PostgresMetaFunctions.ts b/src/lib/PostgresMetaFunctions.ts index b50e6761..b6e2a39c 100644 --- a/src/lib/PostgresMetaFunctions.ts +++ b/src/lib/PostgresMetaFunctions.ts @@ -1,8 +1,8 @@ import { ident, literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { filterByList } from './helpers.js' -import { functionsSql } from './sql/index.js' +import { filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresFunction, PostgresFunctionCreate } from './types.js' +import { FUNCTIONS_SQL } from './sql/functions.sql.js' export default class PostgresMetaFunctions { query: (sql: string) => Promise> @@ -24,21 +24,12 @@ export default class PostgresMetaFunctions { limit?: number offset?: number } = {}): Promise> { - let sql = enrichedFunctionsSql - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` WHERE schema ${filter}` - } - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + const sql = FUNCTIONS_SQL({ schemaFilter, limit, offset }) return await this.query(sql) } @@ -63,8 +54,10 @@ export default class PostgresMetaFunctions { schema?: string args?: string[] }): Promise> { + const schemaFilter = schema ? filterByList([schema], []) : undefined if (id) { - const sql = `${enrichedFunctionsSql} WHERE id = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = FUNCTIONS_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -74,7 +67,8 @@ export default class PostgresMetaFunctions { return { data: data[0], error } } } else if (name && schema && args) { - const sql = this.generateRetrieveFunctionSql({ name, schema, args }) + const nameFilter = filterByValue([name]) + const sql = FUNCTIONS_SQL({ schemaFilter, nameFilter, args: args.map(literal) }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -169,6 +163,11 @@ export default class PostgresMetaFunctions { )}(${identityArgs}) SET SCHEMA ${ident(schema)};` : '' + const currentSchemaFilter = currentFunc!.schema + ? filterByList([currentFunc!.schema], []) + : undefined + const currentNameFilter = currentFunc!.name ? filterByValue([currentFunc!.name]) : undefined + const sql = ` DO LANGUAGE plpgsql $$ BEGIN @@ -177,7 +176,7 @@ export default class PostgresMetaFunctions { IF ( SELECT id - FROM (${functionsSql}) AS f + FROM (${FUNCTIONS_SQL({ schemaFilter: currentSchemaFilter, nameFilter: currentNameFilter })}) AS f WHERE f.schema = ${literal(currentFunc!.schema)} AND f.name = ${literal(currentFunc!.name)} AND f.identity_argument_types = ${literal(identityArgs)} @@ -262,49 +261,4 @@ export default class PostgresMetaFunctions { }; ` } - - private generateRetrieveFunctionSql({ - schema, - name, - args, - }: { - schema: string - name: string - args: string[] - }): string { - return `${enrichedFunctionsSql} JOIN pg_proc AS p ON id = p.oid WHERE schema = ${literal( - schema - )} AND name = ${literal(name)} AND p.proargtypes::text = ${ - args.length - ? `( - SELECT STRING_AGG(type_oid::text, ' ') FROM ( - SELECT ( - split_args.arr[ - array_length( - split_args.arr, - 1 - ) - ]::regtype::oid - ) AS type_oid FROM ( - SELECT STRING_TO_ARRAY( - UNNEST( - ARRAY[${args.map(literal)}] - ), - ' ' - ) AS arr - ) AS split_args - ) args - )` - : literal('') - }` - } } - -const enrichedFunctionsSql = ` - WITH f AS ( - ${functionsSql} - ) - SELECT - f.* - FROM f -` diff --git a/src/lib/PostgresMetaIndexes.ts b/src/lib/PostgresMetaIndexes.ts index 14ffbba7..84f7f100 100644 --- a/src/lib/PostgresMetaIndexes.ts +++ b/src/lib/PostgresMetaIndexes.ts @@ -1,10 +1,9 @@ -import { ident, literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { filterByList } from './helpers.js' -import { indexesSql } from './sql/index.js' +import { filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresIndex } from './types.js' +import { INDEXES_SQL } from './sql/indexes.sql.js' -export default class PostgresMetaFunctions { +export default class PostgresMetaIndexes { query: (sql: string) => Promise> constructor(query: (sql: string) => Promise>) { @@ -24,21 +23,12 @@ export default class PostgresMetaFunctions { limit?: number offset?: number } = {}): Promise> { - let sql = enrichedSql - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` WHERE schema ${filter}` - } - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + const sql = INDEXES_SQL({ schemaFilter, limit, offset }) return await this.query(sql) } @@ -54,13 +44,13 @@ export default class PostgresMetaFunctions { }): Promise> async retrieve({ id, - args = [], }: { id?: number args?: string[] }): Promise> { if (id) { - const sql = `${enrichedSql} WHERE id = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = INDEXES_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -74,12 +64,3 @@ export default class PostgresMetaFunctions { } } } - -const enrichedSql = ` - WITH x AS ( - ${indexesSql} - ) - SELECT - x.* - FROM x -` diff --git a/src/lib/PostgresMetaMaterializedViews.ts b/src/lib/PostgresMetaMaterializedViews.ts index 7f1efac5..0a32793a 100644 --- a/src/lib/PostgresMetaMaterializedViews.ts +++ b/src/lib/PostgresMetaMaterializedViews.ts @@ -1,7 +1,7 @@ -import { literal } from 'pg-format' -import { coalesceRowsToArray, filterByList } from './helpers.js' -import { columnsSql, materializedViewsSql } from './sql/index.js' +import { filterByList, coalesceRowsToArray, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresMaterializedView } from './types.js' +import { MATERIALIZED_VIEWS_SQL } from './sql/materialized_views.sql.js' +import { COLUMNS_SQL } from './sql/columns.sql.js' export default class PostgresMetaMaterializedViews { query: (sql: string) => Promise> @@ -10,20 +10,6 @@ export default class PostgresMetaMaterializedViews { this.query = query } - async list(options: { - includedSchemas?: string[] - excludedSchemas?: string[] - limit?: number - offset?: number - includeColumns: true - }): Promise> - async list(options?: { - includedSchemas?: string[] - excludedSchemas?: string[] - limit?: number - offset?: number - includeColumns?: boolean - }): Promise> async list({ includedSchemas, excludedSchemas, @@ -37,17 +23,8 @@ export default class PostgresMetaMaterializedViews { offset?: number includeColumns?: boolean } = {}): Promise> { - let sql = generateEnrichedMaterializedViewsSql({ includeColumns }) - const filter = filterByList(includedSchemas, excludedSchemas, undefined) - if (filter) { - sql += ` where schema ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const schemaFilter = filterByList(includedSchemas, excludedSchemas, undefined) + let sql = generateEnrichedMaterializedViewsSql({ includeColumns, schemaFilter, limit, offset }) return await this.query(sql) } @@ -69,9 +46,11 @@ export default class PostgresMetaMaterializedViews { schema?: string }): Promise> { if (id) { - const sql = `${generateEnrichedMaterializedViewsSql({ + const idsFilter = filterByValue([id]) + const sql = generateEnrichedMaterializedViewsSql({ includeColumns: true, - })} where materialized_views.id = ${literal(id)};` + idsFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -81,11 +60,11 @@ export default class PostgresMetaMaterializedViews { return { data: data[0], error } } } else if (name) { - const sql = `${generateEnrichedMaterializedViewsSql({ + const materializedViewIdentifierFilter = filterByValue([`${schema}.${name}`]) + const sql = generateEnrichedMaterializedViewsSql({ includeColumns: true, - })} where materialized_views.name = ${literal( - name - )} and materialized_views.schema = ${literal(schema)};` + materializedViewIdentifierFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -103,9 +82,23 @@ export default class PostgresMetaMaterializedViews { } } -const generateEnrichedMaterializedViewsSql = ({ includeColumns }: { includeColumns: boolean }) => ` -with materialized_views as (${materializedViewsSql}) - ${includeColumns ? `, columns as (${columnsSql})` : ''} +const generateEnrichedMaterializedViewsSql = ({ + includeColumns, + schemaFilter, + materializedViewIdentifierFilter, + idsFilter, + limit, + offset, +}: { + includeColumns: boolean + schemaFilter?: string + materializedViewIdentifierFilter?: string + idsFilter?: string + limit?: number + offset?: number +}) => ` +with materialized_views as (${MATERIALIZED_VIEWS_SQL({ schemaFilter, limit, offset, materializedViewIdentifierFilter, idsFilter })}) + ${includeColumns ? `, columns as (${COLUMNS_SQL({ schemaFilter, limit, offset, tableIdentifierFilter: materializedViewIdentifierFilter, tableIdFilter: idsFilter })})` : ''} select * ${ diff --git a/src/lib/PostgresMetaPolicies.ts b/src/lib/PostgresMetaPolicies.ts index fa476c12..72d3157b 100644 --- a/src/lib/PostgresMetaPolicies.ts +++ b/src/lib/PostgresMetaPolicies.ts @@ -1,8 +1,8 @@ -import { ident, literal } from 'pg-format' +import { ident } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { filterByList } from './helpers.js' -import { policiesSql } from './sql/index.js' +import { filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresPolicy } from './types.js' +import { POLICIES_SQL } from './sql/policies.sql.js' export default class PostgresMetaPolicies { query: (sql: string) => Promise> @@ -24,21 +24,12 @@ export default class PostgresMetaPolicies { limit?: number offset?: number } = {}): Promise> { - let sql = policiesSql - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` WHERE n.nspname ${filter}` - } - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + let sql = POLICIES_SQL({ schemaFilter, limit, offset }) return await this.query(sql) } @@ -63,8 +54,10 @@ export default class PostgresMetaPolicies { table?: string schema?: string }): Promise> { + const schemaFilter = schema ? filterByList([schema], []) : undefined if (id) { - const sql = `${policiesSql} WHERE pol.oid = ${literal(id)};` + const idsFilter = filterByValue([`${id}`]) + const sql = POLICIES_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -74,9 +67,8 @@ export default class PostgresMetaPolicies { return { data: data[0], error } } } else if (name && table) { - const sql = `${policiesSql} WHERE pol.polname = ${literal(name)} AND n.nspname = ${literal( - schema - )} AND c.relname = ${literal(table)};` + const functionNameIdentifierFilter = filterByValue([`${table}.${name}`]) + const sql = POLICIES_SQL({ schemaFilter, functionNameIdentifierFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } diff --git a/src/lib/PostgresMetaPublications.ts b/src/lib/PostgresMetaPublications.ts index 63c1bafe..f3fdc549 100644 --- a/src/lib/PostgresMetaPublications.ts +++ b/src/lib/PostgresMetaPublications.ts @@ -1,6 +1,7 @@ import { ident, literal } from 'pg-format' -import { publicationsSql } from './sql/index.js' -import { PostgresMetaResult, PostgresPublication, PostgresTable } from './types.js' +import { PostgresMetaResult, PostgresPublication } from './types.js' +import { PUBLICATIONS_SQL } from './sql/publications.sql.js' +import { filterByValue } from './helpers.js' export default class PostgresMetaPublications { query: (sql: string) => Promise> @@ -16,13 +17,7 @@ export default class PostgresMetaPublications { limit?: number offset?: number }): Promise> { - let sql = publicationsSql - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + let sql = PUBLICATIONS_SQL({ limit, offset }) return await this.query(sql) } @@ -36,7 +31,8 @@ export default class PostgresMetaPublications { name?: string }): Promise> { if (id) { - const sql = `${publicationsSql} WHERE p.oid = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = PUBLICATIONS_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -46,7 +42,8 @@ export default class PostgresMetaPublications { return { data: data[0], error } } } else if (name) { - const sql = `${publicationsSql} WHERE p.pubname = ${literal(name)};` + const nameFilter = filterByValue([name]) + const sql = PUBLICATIONS_SQL({ nameFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -223,7 +220,7 @@ begin create temp table pg_meta_publication_tmp (name) on commit drop as values (coalesce(new_name, old.pubname)); end $$; -with publications as (${publicationsSql}) select * from publications where name = (select name from pg_meta_publication_tmp); +with publications as (${PUBLICATIONS_SQL({})}) select * from publications where name = (select name from pg_meta_publication_tmp); ` const { data, error } = await this.query(sql) if (error) { diff --git a/src/lib/PostgresMetaRelationships.ts b/src/lib/PostgresMetaRelationships.ts index 059762c3..e4e47d60 100644 --- a/src/lib/PostgresMetaRelationships.ts +++ b/src/lib/PostgresMetaRelationships.ts @@ -1,23 +1,37 @@ -import { literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { tableRelationshipsSql, viewsKeyDependenciesSql } from './sql/index.js' -import { PostgresMetaResult, PostgresRelationship } from './types.js' +import { filterByList } from './helpers.js' +import type { PostgresMetaResult, PostgresRelationship } from './types.js' +import { TABLE_RELATIONSHIPS_SQL } from './sql/table_relationships.sql.js' +import { VIEWS_KEY_DEPENDENCIES_SQL } from './sql/views_key_dependencies.sql.js' /* * Only used for generating types at the moment. Will need some cleanups before * using it for other things, e.g. /relationships endpoint. */ export default class PostgresMetaRelationships { - query: (sql: string) => Promise> + query: (sql: string) => Promise> - constructor(query: (sql: string) => Promise>) { + constructor(query: (sql: string) => Promise>) { this.query = query } - async list(): Promise> { + async list({ + includeSystemSchemas = false, + includedSchemas, + excludedSchemas, + }: { + includeSystemSchemas?: boolean + includedSchemas?: string[] + excludedSchemas?: string[] + } = {}): Promise> { + const schemaFilter = filterByList( + includedSchemas, + excludedSchemas, + !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined + ) let allTableM2oAndO2oRelationships: PostgresRelationship[] { - let sql = tableRelationshipsSql + const sql = TABLE_RELATIONSHIPS_SQL({ schemaFilter }) const { data, error } = (await this.query(sql)) as PostgresMetaResult if (error) { return { data: null, error } @@ -45,8 +59,9 @@ export default class PostgresMetaRelationships { column_dependencies: ColDep[] } + const viewsKeyDependenciesSql = VIEWS_KEY_DEPENDENCIES_SQL({ schemaFilter }) const { data: viewsKeyDependencies, error } = (await this.query( - allViewsKeyDependenciesSql + viewsKeyDependenciesSql )) as PostgresMetaResult if (error) { return { data: null, error } @@ -62,8 +77,8 @@ export default class PostgresMetaRelationships { return allEntries.reduce( (results, entries) => results - .map((result) => entries.map((entry) => [...result, entry])) - .reduce((subResults, result) => [...subResults, ...result], []), + .map((result) => entries.map((entry) => result.concat(entry))) + .reduce((subResults, result) => subResults.concat(result), []), [[]] ) } @@ -147,8 +162,3 @@ export default class PostgresMetaRelationships { } } } - -const allViewsKeyDependenciesSql = viewsKeyDependenciesSql.replaceAll( - '__EXCLUDED_SCHEMAS', - literal(DEFAULT_SYSTEM_SCHEMAS) -) diff --git a/src/lib/PostgresMetaRoles.ts b/src/lib/PostgresMetaRoles.ts index f55fb4a9..537b0622 100644 --- a/src/lib/PostgresMetaRoles.ts +++ b/src/lib/PostgresMetaRoles.ts @@ -1,11 +1,12 @@ import { ident, literal } from 'pg-format' -import { rolesSql } from './sql/index.js' +import { ROLES_SQL } from './sql/roles.sql.js' import { PostgresMetaResult, PostgresRole, PostgresRoleCreate, PostgresRoleUpdate, } from './types.js' +import { filterByValue } from './helpers.js' export function changeRoleConfig2Object(config: string[]) { if (!config) { return null @@ -32,32 +33,7 @@ export default class PostgresMetaRoles { limit?: number offset?: number } = {}): Promise> { - let sql = ` -WITH - roles AS (${rolesSql}) -SELECT - * -FROM - roles -WHERE - true` - if (!includeDefaultRoles) { - // All default/predefined roles start with pg_: https://www.postgresql.org/docs/15/predefined-roles.html - // The pg_ prefix is also reserved: - // - // ``` - // postgres=# create role pg_mytmp; - // ERROR: role name "pg_mytmp" is reserved - // DETAIL: Role names starting with "pg_" are reserved. - // ``` - sql += ` AND NOT pg_catalog.starts_with(name, 'pg_')` - } - if (limit) { - sql += ` LIMIT ${limit}` - } - if (offset) { - sql += ` OFFSET ${offset}` - } + const sql = ROLES_SQL({ limit, offset, includeDefaultRoles }) const result = await this.query(sql) if (result.data) { result.data = result.data.map((role: any) => { @@ -78,7 +54,8 @@ WHERE name?: string }): Promise> { if (id) { - const sql = `${rolesSql} WHERE oid = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = ROLES_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { @@ -90,7 +67,8 @@ WHERE return { data: data[0], error } } } else if (name) { - const sql = `${rolesSql} WHERE rolname = ${literal(name)};` + const nameFilter = filterByValue([name]) + const sql = ROLES_SQL({ nameFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } diff --git a/src/lib/PostgresMetaSchemas.ts b/src/lib/PostgresMetaSchemas.ts index b84a64cc..aa17bcfd 100644 --- a/src/lib/PostgresMetaSchemas.ts +++ b/src/lib/PostgresMetaSchemas.ts @@ -1,12 +1,13 @@ -import { ident, literal } from 'pg-format' -import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { schemasSql } from './sql/index.js' +import { ident } from 'pg-format' +import { SCHEMAS_SQL } from './sql/schemas.sql.js' import { PostgresMetaResult, PostgresSchema, PostgresSchemaCreate, PostgresSchemaUpdate, } from './types.js' +import { filterByList, filterByValue } from './helpers.js' +import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' export default class PostgresMetaSchemas { query: (sql: string) => Promise> @@ -16,24 +17,24 @@ export default class PostgresMetaSchemas { } async list({ + includedSchemas, + excludedSchemas, includeSystemSchemas = false, limit, offset, }: { + includedSchemas?: string[] + excludedSchemas?: string[] includeSystemSchemas?: boolean limit?: number offset?: number } = {}): Promise> { - let sql = schemasSql - if (!includeSystemSchemas) { - sql = `${sql} AND NOT (n.nspname IN (${DEFAULT_SYSTEM_SCHEMAS.map(literal).join(',')}))` - } - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + const schemaFilter = filterByList( + includedSchemas, + excludedSchemas, + !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined + ) + const sql = SCHEMAS_SQL({ limit, offset, includeSystemSchemas, nameFilter: schemaFilter }) return await this.query(sql) } @@ -47,7 +48,8 @@ export default class PostgresMetaSchemas { name?: string }): Promise> { if (id) { - const sql = `${schemasSql} AND n.oid = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = SCHEMAS_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -57,7 +59,8 @@ export default class PostgresMetaSchemas { return { data: data[0], error } } } else if (name) { - const sql = `${schemasSql} AND n.nspname = ${literal(name)};` + const nameFilter = filterByValue([name]) + const sql = SCHEMAS_SQL({ nameFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } diff --git a/src/lib/PostgresMetaTablePrivileges.ts b/src/lib/PostgresMetaTablePrivileges.ts index 9edb32e9..e0e79a05 100644 --- a/src/lib/PostgresMetaTablePrivileges.ts +++ b/src/lib/PostgresMetaTablePrivileges.ts @@ -1,13 +1,13 @@ -import { ident, literal } from 'pg-format' +import { ident } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { filterByList } from './helpers.js' -import { tablePrivilegesSql } from './sql/index.js' +import { filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresTablePrivileges, PostgresTablePrivilegesGrant, PostgresTablePrivilegesRevoke, } from './types.js' +import { TABLE_PRIVILEGES_SQL } from './sql/table_privileges.sql.js' export default class PostgresMetaTablePrivileges { query: (sql: string) => Promise> @@ -29,25 +29,12 @@ export default class PostgresMetaTablePrivileges { limit?: number offset?: number } = {}): Promise> { - let sql = ` -with table_privileges as (${tablePrivilegesSql}) -select * -from table_privileges -` - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` where schema ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const sql = TABLE_PRIVILEGES_SQL({ schemaFilter, limit, offset }) return await this.query(sql) } @@ -69,11 +56,8 @@ from table_privileges schema?: string }): Promise> { if (id) { - const sql = ` -with table_privileges as (${tablePrivilegesSql}) -select * -from table_privileges -where table_privileges.relation_id = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = TABLE_PRIVILEGES_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -83,13 +67,8 @@ where table_privileges.relation_id = ${literal(id)};` return { data: data[0], error } } } else if (name) { - const sql = ` -with table_privileges as (${tablePrivilegesSql}) -select * -from table_privileges -where table_privileges.schema = ${literal(schema)} - and table_privileges.name = ${literal(name)} -` + const nameIdentifierFilter = filterByValue([`${schema}.${name}`]) + const sql = TABLE_PRIVILEGES_SQL({ nameIdentifierFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -129,12 +108,7 @@ end $$; // Return the updated table privileges for modified relations. const relationIds = [...new Set(grants.map(({ relation_id }) => relation_id))] - sql = ` -with table_privileges as (${tablePrivilegesSql}) -select * -from table_privileges -where relation_id in (${relationIds.map(literal).join(',')}) -` + sql = TABLE_PRIVILEGES_SQL({ idsFilter: filterByList(relationIds) }) return await this.query(sql) } @@ -159,12 +133,7 @@ end $$; // Return the updated table privileges for modified relations. const relationIds = [...new Set(revokes.map(({ relation_id }) => relation_id))] - sql = ` -with table_privileges as (${tablePrivilegesSql}) -select * -from table_privileges -where relation_id in (${relationIds.map(literal).join(',')}) -` + sql = TABLE_PRIVILEGES_SQL({ idsFilter: filterByList(relationIds) }) return await this.query(sql) } } diff --git a/src/lib/PostgresMetaTables.ts b/src/lib/PostgresMetaTables.ts index 5b97c253..8d3d9a47 100644 --- a/src/lib/PostgresMetaTables.ts +++ b/src/lib/PostgresMetaTables.ts @@ -1,13 +1,14 @@ import { ident, literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { coalesceRowsToArray, filterByList } from './helpers.js' -import { columnsSql, tablesSql } from './sql/index.js' +import { coalesceRowsToArray, filterByValue, filterByList } from './helpers.js' import { PostgresMetaResult, PostgresTable, PostgresTableCreate, PostgresTableUpdate, } from './types.js' +import { TABLES_SQL } from './sql/table.sql.js' +import { COLUMNS_SQL } from './sql/columns.sql.js' export default class PostgresMetaTables { query: (sql: string) => Promise> @@ -47,21 +48,12 @@ export default class PostgresMetaTables { offset?: number includeColumns?: boolean } = {}): Promise> { - let sql = generateEnrichedTablesSql({ includeColumns }) - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` where schema ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const sql = generateEnrichedTablesSql({ includeColumns, schemaFilter, limit, offset }) return await this.query(sql) } @@ -82,10 +74,14 @@ export default class PostgresMetaTables { name?: string schema?: string }): Promise> { + const schemaFilter = schema ? filterByList([schema], []) : undefined if (id) { - const sql = `${generateEnrichedTablesSql({ + const idsFilter = filterByValue([id]) + const sql = generateEnrichedTablesSql({ + schemaFilter, includeColumns: true, - })} where tables.id = ${literal(id)};` + idsFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -95,9 +91,12 @@ export default class PostgresMetaTables { return { data: data[0], error } } } else if (name) { - const sql = `${generateEnrichedTablesSql({ + const tableIdentifierFilter = filterByValue([`${schema}.${name}`]) + const sql = generateEnrichedTablesSql({ + schemaFilter, includeColumns: true, - })} where tables.name = ${literal(name)} and tables.schema = ${literal(schema)};` + tableIdentifierFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -247,9 +246,23 @@ COMMIT;` } } -const generateEnrichedTablesSql = ({ includeColumns }: { includeColumns: boolean }) => ` -with tables as (${tablesSql}) - ${includeColumns ? `, columns as (${columnsSql})` : ''} +const generateEnrichedTablesSql = ({ + includeColumns, + schemaFilter, + tableIdentifierFilter, + idsFilter, + limit, + offset, +}: { + includeColumns: boolean + schemaFilter?: string + tableIdentifierFilter?: string + idsFilter?: string + limit?: number + offset?: number +}) => ` +with tables as (${TABLES_SQL({ schemaFilter, tableIdentifierFilter, idsFilter, limit, offset })}) + ${includeColumns ? `, columns as (${COLUMNS_SQL({ schemaFilter, tableIdFilter: idsFilter, tableIdentifierFilter: tableIdentifierFilter })})` : ''} select * ${includeColumns ? `, ${coalesceRowsToArray('columns', 'columns.table_id = tables.id')}` : ''} diff --git a/src/lib/PostgresMetaTriggers.ts b/src/lib/PostgresMetaTriggers.ts index 5ce05f76..f7dfbc95 100644 --- a/src/lib/PostgresMetaTriggers.ts +++ b/src/lib/PostgresMetaTriggers.ts @@ -1,8 +1,8 @@ import { ident, literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { filterByList } from './helpers.js' -import { triggersSql } from './sql/index.js' +import { filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresTrigger } from './types.js' +import { TRIGGERS_SQL } from './sql/triggers.sql.js' export default class PostgresMetaTriggers { query: (sql: string) => Promise> @@ -24,21 +24,12 @@ export default class PostgresMetaTriggers { limit?: number offset?: number } = {}): Promise> { - let sql = enrichedTriggersSql - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` WHERE schema ${filter}` - } - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + let sql = TRIGGERS_SQL({ schemaFilter, limit, offset }) return await this.query(sql) } @@ -63,8 +54,10 @@ export default class PostgresMetaTriggers { schema?: string table?: string }): Promise> { + const schemaFilter = schema ? filterByList([schema], []) : undefined if (id) { - const sql = `${enrichedTriggersSql} WHERE id = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = TRIGGERS_SQL({ idsFilter }) const { data, error } = await this.query(sql) @@ -82,9 +75,9 @@ export default class PostgresMetaTriggers { } if (name && schema && table) { - const sql = `${enrichedTriggersSql} WHERE name = ${literal(name)} AND schema = ${literal( - schema - )} AND triggers.table = ${literal(table)};` + const nameFilter = filterByValue([name]) + const tableNameFilter = filterByValue([table]) + const sql = TRIGGERS_SQL({ schemaFilter, nameFilter, tableNameFilter }) const { data, error } = await this.query(sql) @@ -168,7 +161,6 @@ export default class PostgresMetaTriggers { if (error) { return { data: null, error } } - return await this.retrieve({ name, table, @@ -254,12 +246,3 @@ export default class PostgresMetaTriggers { return { data: triggerRecord!, error: null } } } - -const enrichedTriggersSql = ` - WITH triggers AS ( - ${triggersSql} - ) - SELECT - * - FROM triggers -` diff --git a/src/lib/PostgresMetaTypes.ts b/src/lib/PostgresMetaTypes.ts index 35371d55..990c94e3 100644 --- a/src/lib/PostgresMetaTypes.ts +++ b/src/lib/PostgresMetaTypes.ts @@ -1,7 +1,7 @@ import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' import { filterByList } from './helpers.js' -import { typesSql } from './sql/index.js' import { PostgresMetaResult, PostgresType } from './types.js' +import { TYPES_SQL } from './sql/types.sql.js' export default class PostgresMetaTypes { query: (sql: string) => Promise> @@ -27,44 +27,12 @@ export default class PostgresMetaTypes { limit?: number offset?: number } = {}): Promise> { - let sql = `${typesSql} - where - ( - t.typrelid = 0 - or ( - select - c.relkind ${includeTableTypes ? `in ('c', 'r')` : `= 'c'`} - from - pg_class c - where - c.oid = t.typrelid - ) - ) - ` - if (!includeArrayTypes) { - sql += ` and not exists ( - select - from - pg_type el - where - el.oid = t.typelem - and el.typarray = t.oid - )` - } - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` and n.nspname ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const sql = TYPES_SQL({ schemaFilter, limit, offset, includeTableTypes, includeArrayTypes }) return await this.query(sql) } } diff --git a/src/lib/PostgresMetaVersion.ts b/src/lib/PostgresMetaVersion.ts index 38e0299f..5ea23f37 100644 --- a/src/lib/PostgresMetaVersion.ts +++ b/src/lib/PostgresMetaVersion.ts @@ -1,4 +1,4 @@ -import { versionSql } from './sql/index.js' +import { VERSION_SQL } from './sql/version.sql.js' import { PostgresMetaResult, PostgresVersion } from './types.js' export default class PostgresMetaVersion { @@ -9,7 +9,7 @@ export default class PostgresMetaVersion { } async retrieve(): Promise> { - const { data, error } = await this.query(versionSql) + const { data, error } = await this.query(VERSION_SQL()) if (error) { return { data, error } } diff --git a/src/lib/PostgresMetaViews.ts b/src/lib/PostgresMetaViews.ts index 0f6ad09c..a9e7b0ce 100644 --- a/src/lib/PostgresMetaViews.ts +++ b/src/lib/PostgresMetaViews.ts @@ -1,8 +1,8 @@ -import { literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { coalesceRowsToArray, filterByList } from './helpers.js' -import { columnsSql, viewsSql } from './sql/index.js' +import { coalesceRowsToArray, filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresView } from './types.js' +import { VIEWS_SQL } from './sql/views.sql.js' +import { COLUMNS_SQL } from './sql/columns.sql.js' export default class PostgresMetaViews { query: (sql: string) => Promise> @@ -11,22 +11,6 @@ export default class PostgresMetaViews { this.query = query } - async list(options: { - includeSystemSchemas?: boolean - includedSchemas?: string[] - excludedSchemas?: string[] - limit?: number - offset?: number - includeColumns: false - }): Promise> - async list(options?: { - includeSystemSchemas?: boolean - includedSchemas?: string[] - excludedSchemas?: string[] - limit?: number - offset?: number - includeColumns?: boolean - }): Promise> async list({ includeSystemSchemas = false, includedSchemas, @@ -42,21 +26,12 @@ export default class PostgresMetaViews { offset?: number includeColumns?: boolean } = {}): Promise> { - let sql = generateEnrichedViewsSql({ includeColumns }) - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` where schema ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const sql = generateEnrichedViewsSql({ includeColumns, schemaFilter, limit, offset }) return await this.query(sql) } @@ -78,9 +53,11 @@ export default class PostgresMetaViews { schema?: string }): Promise> { if (id) { - const sql = `${generateEnrichedViewsSql({ + const idsFilter = filterByValue([id]) + const sql = generateEnrichedViewsSql({ includeColumns: true, - })} where views.id = ${literal(id)};` + idsFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -90,9 +67,11 @@ export default class PostgresMetaViews { return { data: data[0], error } } } else if (name) { - const sql = `${generateEnrichedViewsSql({ + const viewIdentifierFilter = filterByValue([`${schema}.${name}`]) + const sql = generateEnrichedViewsSql({ includeColumns: true, - })} where views.name = ${literal(name)} and views.schema = ${literal(schema)};` + viewIdentifierFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -110,9 +89,23 @@ export default class PostgresMetaViews { } } -const generateEnrichedViewsSql = ({ includeColumns }: { includeColumns: boolean }) => ` -with views as (${viewsSql}) - ${includeColumns ? `, columns as (${columnsSql})` : ''} +const generateEnrichedViewsSql = ({ + includeColumns, + schemaFilter, + idsFilter, + viewIdentifierFilter, + limit, + offset, +}: { + includeColumns: boolean + schemaFilter?: string + idsFilter?: string + viewIdentifierFilter?: string + limit?: number + offset?: number +}) => ` +with views as (${VIEWS_SQL({ schemaFilter, limit, offset, viewIdentifierFilter, idsFilter })}) + ${includeColumns ? `, columns as (${COLUMNS_SQL({ schemaFilter, tableIdentifierFilter: viewIdentifierFilter, tableIdFilter: idsFilter })})` : ''} select * ${includeColumns ? `, ${coalesceRowsToArray('columns', 'columns.table_id = views.id')}` : ''} diff --git a/src/lib/generators.ts b/src/lib/generators.ts index c916a44c..6b5f55e5 100644 --- a/src/lib/generators.ts +++ b/src/lib/generators.ts @@ -34,14 +34,18 @@ export async function getGeneratorMetadata( const includedSchemas = filters.includedSchemas ?? [] const excludedSchemas = filters.excludedSchemas ?? [] - const { data: schemas, error: schemasError } = await pgMeta.schemas.list() + const { data: schemas, error: schemasError } = await pgMeta.schemas.list({ + includeSystemSchemas: false, + includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, + }) if (schemasError) { return { data: null, error: schemasError } } const { data: tables, error: tablesError } = await pgMeta.tables.list({ includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, - excludedSchemas, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, includeColumns: false, }) if (tablesError) { @@ -50,7 +54,7 @@ export async function getGeneratorMetadata( const { data: foreignTables, error: foreignTablesError } = await pgMeta.foreignTables.list({ includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, - excludedSchemas, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, includeColumns: false, }) if (foreignTablesError) { @@ -59,7 +63,7 @@ export async function getGeneratorMetadata( const { data: views, error: viewsError } = await pgMeta.views.list({ includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, - excludedSchemas, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, includeColumns: false, }) if (viewsError) { @@ -69,7 +73,7 @@ export async function getGeneratorMetadata( const { data: materializedViews, error: materializedViewsError } = await pgMeta.materializedViews.list({ includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, - excludedSchemas, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, includeColumns: false, }) if (materializedViewsError) { @@ -78,20 +82,26 @@ export async function getGeneratorMetadata( const { data: columns, error: columnsError } = await pgMeta.columns.list({ includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, - excludedSchemas, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, + includeSystemSchemas: false, }) if (columnsError) { return { data: null, error: columnsError } } - const { data: relationships, error: relationshipsError } = await pgMeta.relationships.list() + const { data: relationships, error: relationshipsError } = await pgMeta.relationships.list({ + includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, + includeSystemSchemas: false, + }) if (relationshipsError) { return { data: null, error: relationshipsError } } const { data: functions, error: functionsError } = await pgMeta.functions.list({ includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, - excludedSchemas, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, + includeSystemSchemas: false, }) if (functionsError) { return { data: null, error: functionsError } diff --git a/src/lib/helpers.ts b/src/lib/helpers.ts index 7145bb40..4fca3124 100644 --- a/src/lib/helpers.ts +++ b/src/lib/helpers.ts @@ -13,7 +13,11 @@ COALESCE( ) AS ${source}` } -export const filterByList = (include?: string[], exclude?: string[], defaultExclude?: string[]) => { +export const filterByList = ( + include?: (string | number)[], + exclude?: (string | number)[], + defaultExclude?: (string | number)[] +) => { if (defaultExclude) { exclude = defaultExclude.concat(exclude ?? []) } @@ -24,3 +28,10 @@ export const filterByList = (include?: string[], exclude?: string[], defaultExcl } return '' } + +export const filterByValue = (ids?: (string | number)[]) => { + if (ids?.length) { + return `IN (${ids.map(literal).join(',')})` + } + return '' +} diff --git a/src/lib/sql/column_privileges.sql b/src/lib/sql/column_privileges.sql.ts similarity index 88% rename from src/lib/sql/column_privileges.sql rename to src/lib/sql/column_privileges.sql.ts index 8540c583..f60101dc 100644 --- a/src/lib/sql/column_privileges.sql +++ b/src/lib/sql/column_privileges.sql.ts @@ -1,3 +1,10 @@ +import type { SQLQueryPropsWithSchemaFilter } from './common.js' + +export const COLUMN_PRIVILEGES_SQL = ( + props: SQLQueryPropsWithSchemaFilter & { + columnIdsFilter?: string + } +) => /* SQL */ ` -- Lists each column's privileges in the form of: -- -- [ @@ -28,8 +35,8 @@ -- - we include column privileges for materialized views -- (reason for exclusion in information_schema.column_privileges: -- https://www.postgresql.org/message-id/9136.1502740844%40sss.pgh.pa.us) --- - we query a.attrelid and a.attnum to generate `column_id` --- - `table_catalog` is omitted +-- - we query a.attrelid and a.attnum to generate \`column_id\` +-- - \`table_catalog\` is omitted -- - table_schema -> relation_schema, table_name -> relation_name -- -- Column privileges are intertwined with table privileges in that table @@ -37,12 +44,12 @@ -- -- grant all on mytable to myrole; -- --- Then `myrole` is granted privileges for ALL columns. Likewise, if we do: +-- Then \`myrole\` is granted privileges for ALL columns. Likewise, if we do: -- -- grant all (id) on mytable to myrole; -- revoke all on mytable from myrole; -- --- Then the grant on the `id` column is revoked. +-- Then the grant on the \`id\` column is revoked. -- -- This is unlike how grants for schemas and tables interact, where you need -- privileges for BOTH the schema the table is in AND the table itself in order @@ -130,6 +137,8 @@ from union all select (0)::oid as oid, 'PUBLIC') grantee(oid, rolname) where ((x.relnamespace = nc.oid) + ${props.schemaFilter ? `and nc.nspname ${props.schemaFilter}` : ''} + ${props.columnIdsFilter ? `and (x.attrelid || '.' || x.attnum) ${props.columnIdsFilter}` : ''} and (x.grantee = grantee.oid) and (x.grantor = u_grantor.oid) and (x.prtype = any (ARRAY['INSERT', @@ -143,3 +152,6 @@ group by column_id, nc.nspname, x.relname, x.attname +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/columns.sql b/src/lib/sql/columns.sql.ts similarity index 80% rename from src/lib/sql/columns.sql rename to src/lib/sql/columns.sql.ts index ad01e22a..d19c968c 100644 --- a/src/lib/sql/columns.sql +++ b/src/lib/sql/columns.sql.ts @@ -1,3 +1,13 @@ +import type { SQLQueryPropsWithSchemaFilter } from './common.js' + +export const COLUMNS_SQL = ( + props: SQLQueryPropsWithSchemaFilter & { + tableIdFilter?: string + tableIdentifierFilter?: string + columnNameFilter?: string + idsFilter?: string + } +) => /* SQL */ ` -- Adapted from information_schema.columns SELECT @@ -97,6 +107,11 @@ FROM ORDER BY table_id, ordinal_position, oid asc ) AS check_constraints ON check_constraints.table_id = c.oid AND check_constraints.ordinal_position = a.attnum WHERE + ${props.schemaFilter ? `nc.nspname ${props.schemaFilter} AND` : ''} + ${props.idsFilter ? `(c.oid || '.' || a.attnum) ${props.idsFilter} AND` : ''} + ${props.columnNameFilter ? `(c.relname || '.' || a.attname) ${props.columnNameFilter} AND` : ''} + ${props.tableIdFilter ? `c.oid ${props.tableIdFilter} AND` : ''} + ${props.tableIdentifierFilter ? `nc.nspname || '.' || c.relname ${props.tableIdentifierFilter} AND` : ''} NOT pg_is_other_temp_schema(nc.oid) AND a.attnum > 0 AND NOT a.attisdropped @@ -109,3 +124,6 @@ WHERE 'SELECT, INSERT, UPDATE, REFERENCES' ) ) +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/common.ts b/src/lib/sql/common.ts new file mode 100644 index 00000000..b9c37ec9 --- /dev/null +++ b/src/lib/sql/common.ts @@ -0,0 +1,17 @@ +export type SQLQueryProps = { + limit?: number + offset?: number +} + +export type SQLQueryPropsWithSchemaFilter = SQLQueryProps & { + schemaFilter?: string +} + +export type SQLQueryPropsWithIdsFilter = SQLQueryProps & { + idsFilter?: string +} + +export type SQLQueryPropsWithSchemaFilterAndIdsFilter = SQLQueryProps & { + schemaFilter?: string + idsFilter?: string +} diff --git a/src/lib/sql/config.sql b/src/lib/sql/config.sql.ts similarity index 57% rename from src/lib/sql/config.sql rename to src/lib/sql/config.sql.ts index 553e4426..f33305d5 100644 --- a/src/lib/sql/config.sql +++ b/src/lib/sql/config.sql.ts @@ -1,3 +1,6 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const CONFIG_SQL = (props: SQLQueryPropsWithSchemaFilterAndIdsFilter) => /* SQL */ ` SELECT name, setting, @@ -23,3 +26,6 @@ FROM ORDER BY category, name +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/extensions.sql b/src/lib/sql/extensions.sql deleted file mode 100644 index 9a8700f8..00000000 --- a/src/lib/sql/extensions.sql +++ /dev/null @@ -1,10 +0,0 @@ -SELECT - e.name, - n.nspname AS schema, - e.default_version, - x.extversion AS installed_version, - e.comment -FROM - pg_available_extensions() e(name, default_version, comment) - LEFT JOIN pg_extension x ON e.name = x.extname - LEFT JOIN pg_namespace n ON x.extnamespace = n.oid diff --git a/src/lib/sql/extensions.sql.ts b/src/lib/sql/extensions.sql.ts new file mode 100644 index 00000000..fe65b0c2 --- /dev/null +++ b/src/lib/sql/extensions.sql.ts @@ -0,0 +1,19 @@ +import type { SQLQueryProps } from './common.js' + +export const EXTENSIONS_SQL = (props: SQLQueryProps & { nameFilter?: string }) => /* SQL */ ` +SELECT + e.name, + n.nspname AS schema, + e.default_version, + x.extversion AS installed_version, + e.comment +FROM + pg_available_extensions() e(name, default_version, comment) + LEFT JOIN pg_extension x ON e.name = x.extname + LEFT JOIN pg_namespace n ON x.extnamespace = n.oid +WHERE + true + ${props.nameFilter ? `AND e.name ${props.nameFilter}` : ''} +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/foreign_tables.sql b/src/lib/sql/foreign_tables.sql deleted file mode 100644 index e3e5e14f..00000000 --- a/src/lib/sql/foreign_tables.sql +++ /dev/null @@ -1,10 +0,0 @@ -SELECT - c.oid :: int8 AS id, - n.nspname AS schema, - c.relname AS name, - obj_description(c.oid) AS comment -FROM - pg_class c - JOIN pg_namespace n ON n.oid = c.relnamespace -WHERE - c.relkind = 'f' diff --git a/src/lib/sql/foreign_tables.sql.ts b/src/lib/sql/foreign_tables.sql.ts new file mode 100644 index 00000000..00541f0f --- /dev/null +++ b/src/lib/sql/foreign_tables.sql.ts @@ -0,0 +1,25 @@ +import type { SQLQueryProps } from './common.js' + +export const FOREIGN_TABLES_SQL = ( + props: SQLQueryProps & { + schemaFilter?: string + idsFilter?: string + tableIdentifierFilter?: string + } +) => /* SQL */ ` +SELECT + c.oid :: int8 AS id, + n.nspname AS schema, + c.relname AS name, + obj_description(c.oid) AS comment +FROM + pg_class c + JOIN pg_namespace n ON n.oid = c.relnamespace +WHERE + ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} + ${props.idsFilter ? `c.oid ${props.idsFilter} AND` : ''} + ${props.tableIdentifierFilter ? `(n.nspname || '.' || c.relname) ${props.tableIdentifierFilter} AND` : ''} + c.relkind = 'f' +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/functions.sql b/src/lib/sql/functions.sql.ts similarity index 70% rename from src/lib/sql/functions.sql rename to src/lib/sql/functions.sql.ts index d2258402..92715b95 100644 --- a/src/lib/sql/functions.sql +++ b/src/lib/sql/functions.sql.ts @@ -1,9 +1,17 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const FUNCTIONS_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { + nameFilter?: string + args?: string[] + } +) => /* SQL */ ` -- CTE with sane arg_modes, arg_names, and arg_types. -- All three are always of the same length. -- All three include all args, including OUT and TABLE args. with functions as ( select - *, + p.*, -- proargmodes is null when all arg modes are IN coalesce( p.proargmodes, @@ -21,7 +29,40 @@ with functions as ( array_fill(true, array[pronargdefaults])) as arg_has_defaults from pg_proc as p + ${props.schemaFilter ? `join pg_namespace n on p.pronamespace = n.oid` : ''} where + ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} + ${props.idsFilter ? `p.oid ${props.idsFilter} AND` : ''} + ${props.nameFilter ? `p.proname ${props.nameFilter} AND` : ''} + ${ + props.args === undefined + ? '' + : props.args.length > 0 + ? `p.proargtypes::text = ${ + props.args.length + ? `( + SELECT STRING_AGG(type_oid::text, ' ') FROM ( + SELECT ( + split_args.arr[ + array_length( + split_args.arr, + 1 + ) + ]::regtype::oid + ) AS type_oid FROM ( + SELECT STRING_TO_ARRAY( + UNNEST( + ARRAY[${props.args}] + ), + ' ' + ) AS arr + ) AS split_args + ) args + )` + : "''" + } AND` + : '' + } p.prokind = 'f' ) select @@ -105,3 +146,6 @@ from group by t1.oid ) f_args on f_args.oid = f.oid +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/index.ts b/src/lib/sql/index.ts deleted file mode 100644 index 64be3aa8..00000000 --- a/src/lib/sql/index.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { readFile } from 'node:fs/promises' -import { dirname, join } from 'node:path' -import { fileURLToPath } from 'node:url' - -const __dirname = dirname(fileURLToPath(import.meta.url)) -export const columnPrivilegesSql = await readFile(join(__dirname, 'column_privileges.sql'), 'utf-8') -export const columnsSql = await readFile(join(__dirname, 'columns.sql'), 'utf-8') -export const configSql = await readFile(join(__dirname, 'config.sql'), 'utf-8') -export const extensionsSql = await readFile(join(__dirname, 'extensions.sql'), 'utf-8') -export const foreignTablesSql = await readFile(join(__dirname, 'foreign_tables.sql'), 'utf-8') -export const functionsSql = await readFile(join(__dirname, 'functions.sql'), 'utf-8') -export const indexesSql = await readFile(join(__dirname, 'indexes.sql'), 'utf-8') -export const materializedViewsSql = await readFile( - join(__dirname, 'materialized_views.sql'), - 'utf-8' -) -export const policiesSql = await readFile(join(__dirname, 'policies.sql'), 'utf-8') -export const publicationsSql = await readFile(join(__dirname, 'publications.sql'), 'utf-8') -export const tableRelationshipsSql = await readFile( - join(__dirname, 'table_relationships.sql'), - 'utf-8' -) -export const rolesSql = await readFile(join(__dirname, 'roles.sql'), 'utf-8') -export const schemasSql = await readFile(join(__dirname, 'schemas.sql'), 'utf-8') -export const tablePrivilegesSql = await readFile(join(__dirname, 'table_privileges.sql'), 'utf-8') -export const tablesSql = await readFile(join(__dirname, 'tables.sql'), 'utf-8') -export const triggersSql = await readFile(join(__dirname, 'triggers.sql'), 'utf-8') -export const typesSql = await readFile(join(__dirname, 'types.sql'), 'utf-8') -export const versionSql = await readFile(join(__dirname, 'version.sql'), 'utf-8') -export const viewsKeyDependenciesSql = await readFile( - join(__dirname, 'views_key_dependencies.sql'), - 'utf-8' -) -export const viewsSql = await readFile(join(__dirname, 'views.sql'), 'utf-8') diff --git a/src/lib/sql/indexes.sql b/src/lib/sql/indexes.sql.ts similarity index 79% rename from src/lib/sql/indexes.sql rename to src/lib/sql/indexes.sql.ts index ff0c8f36..5f893a8f 100644 --- a/src/lib/sql/indexes.sql +++ b/src/lib/sql/indexes.sql.ts @@ -1,3 +1,6 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const INDEXES_SQL = (props: SQLQueryPropsWithSchemaFilterAndIdsFilter) => /* SQL */ ` SELECT idx.indexrelid::int8 AS id, idx.indrelid::int8 AS table_id, @@ -37,5 +40,11 @@ SELECT JOIN pg_am am ON c.relam = am.oid JOIN pg_attribute a ON a.attrelid = c.oid AND a.attnum = ANY(idx.indkey) JOIN pg_indexes ix ON c.relname = ix.indexname + WHERE + ${props.schemaFilter ? `n.nspname ${props.schemaFilter}` : 'true'} + ${props.idsFilter ? `AND idx.indexrelid ${props.idsFilter}` : ''} GROUP BY - idx.indexrelid, idx.indrelid, n.nspname, idx.indnatts, idx.indnkeyatts, idx.indisunique, idx.indisprimary, idx.indisexclusion, idx.indimmediate, idx.indisclustered, idx.indisvalid, idx.indcheckxmin, idx.indisready, idx.indislive, idx.indisreplident, idx.indkey, idx.indcollation, idx.indclass, idx.indoption, idx.indexprs, idx.indpred, ix.indexdef, am.amname \ No newline at end of file + idx.indexrelid, idx.indrelid, n.nspname, idx.indnatts, idx.indnkeyatts, idx.indisunique, idx.indisprimary, idx.indisexclusion, idx.indimmediate, idx.indisclustered, idx.indisvalid, idx.indcheckxmin, idx.indisready, idx.indislive, idx.indisreplident, idx.indkey, idx.indcollation, idx.indclass, idx.indoption, idx.indexprs, idx.indpred, ix.indexdef, am.amname +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/materialized_views.sql b/src/lib/sql/materialized_views.sql deleted file mode 100644 index 5281f7da..00000000 --- a/src/lib/sql/materialized_views.sql +++ /dev/null @@ -1,11 +0,0 @@ -select - c.oid::int8 as id, - n.nspname as schema, - c.relname as name, - c.relispopulated as is_populated, - obj_description(c.oid) as comment -from - pg_class c - join pg_namespace n on n.oid = c.relnamespace -where - c.relkind = 'm' diff --git a/src/lib/sql/materialized_views.sql.ts b/src/lib/sql/materialized_views.sql.ts new file mode 100644 index 00000000..aae179e8 --- /dev/null +++ b/src/lib/sql/materialized_views.sql.ts @@ -0,0 +1,24 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const MATERIALIZED_VIEWS_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { + materializedViewIdentifierFilter?: string + } +) => /* SQL */ ` +select + c.oid::int8 as id, + n.nspname as schema, + c.relname as name, + c.relispopulated as is_populated, + obj_description(c.oid) as comment +from + pg_class c + join pg_namespace n on n.oid = c.relnamespace +where + ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} + ${props.idsFilter ? `c.oid ${props.idsFilter} AND` : ''} + ${props.materializedViewIdentifierFilter ? `(n.nspname || '.' || c.relname) ${props.materializedViewIdentifierFilter} AND` : ''} + c.relkind = 'm' +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/policies.sql b/src/lib/sql/policies.sql.ts similarity index 66% rename from src/lib/sql/policies.sql rename to src/lib/sql/policies.sql.ts index 20a09327..9e354931 100644 --- a/src/lib/sql/policies.sql +++ b/src/lib/sql/policies.sql.ts @@ -1,3 +1,8 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const POLICIES_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { functionNameIdentifierFilter?: string } +) => /* SQL */ ` SELECT pol.oid :: int8 AS id, n.nspname AS schema, @@ -40,3 +45,10 @@ FROM pg_policy pol JOIN pg_class c ON c.oid = pol.polrelid LEFT JOIN pg_namespace n ON n.oid = c.relnamespace +WHERE + ${props.schemaFilter ? `n.nspname ${props.schemaFilter}` : 'true'} + ${props.idsFilter ? `AND pol.oid ${props.idsFilter}` : ''} + ${props.functionNameIdentifierFilter ? `AND (c.relname || '.' || pol.polname) ${props.functionNameIdentifierFilter}` : ''} +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/publications.sql b/src/lib/sql/publications.sql.ts similarity index 67% rename from src/lib/sql/publications.sql rename to src/lib/sql/publications.sql.ts index ed0a2e20..cd04e05b 100644 --- a/src/lib/sql/publications.sql +++ b/src/lib/sql/publications.sql.ts @@ -1,3 +1,8 @@ +import type { SQLQueryPropsWithIdsFilter } from './common.js' + +export const PUBLICATIONS_SQL = ( + props: SQLQueryPropsWithIdsFilter & { nameFilter?: string } +) => /* SQL */ ` SELECT p.oid :: int8 AS id, p.pubname AS name, @@ -34,3 +39,9 @@ FROM WHERE pr.prpubid = p.oid ) AS pr ON 1 = 1 +WHERE + ${props.idsFilter ? `p.oid ${props.idsFilter}` : 'true'} + ${props.nameFilter ? `AND p.pubname ${props.nameFilter}` : ''} +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/roles.sql b/src/lib/sql/roles.sql.ts similarity index 52% rename from src/lib/sql/roles.sql rename to src/lib/sql/roles.sql.ts index a0c79d6f..b3d29358 100644 --- a/src/lib/sql/roles.sql +++ b/src/lib/sql/roles.sql.ts @@ -1,3 +1,11 @@ +import type { SQLQueryPropsWithIdsFilter } from './common.js' + +export const ROLES_SQL = ( + props: SQLQueryPropsWithIdsFilter & { + includeDefaultRoles?: boolean + nameFilter?: string + } +) => /* SQL */ ` -- TODO: Consider using pg_authid vs. pg_roles for unencrypted password field SELECT oid :: int8 AS id, @@ -25,3 +33,12 @@ SELECT rolconfig AS config FROM pg_roles +WHERE + ${props.idsFilter ? `oid ${props.idsFilter}` : 'true'} + -- All default/predefined roles start with pg_: https://www.postgresql.org/docs/15/predefined-roles.html + -- The pg_ prefix is also reserved. + ${!props.includeDefaultRoles ? `AND NOT pg_catalog.starts_with(rolname, 'pg_')` : ''} + ${props.nameFilter ? `AND rolname ${props.nameFilter}` : ''} +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/schemas.sql b/src/lib/sql/schemas.sql deleted file mode 100644 index a4859fff..00000000 --- a/src/lib/sql/schemas.sql +++ /dev/null @@ -1,17 +0,0 @@ --- Adapted from information_schema.schemata - -select - n.oid::int8 as id, - n.nspname as name, - u.rolname as owner -from - pg_namespace n, - pg_roles u -where - n.nspowner = u.oid - and ( - pg_has_role(n.nspowner, 'USAGE') - or has_schema_privilege(n.oid, 'CREATE, USAGE') - ) - and not pg_catalog.starts_with(n.nspname, 'pg_temp_') - and not pg_catalog.starts_with(n.nspname, 'pg_toast_temp_') diff --git a/src/lib/sql/schemas.sql.ts b/src/lib/sql/schemas.sql.ts new file mode 100644 index 00000000..a9e5d85b --- /dev/null +++ b/src/lib/sql/schemas.sql.ts @@ -0,0 +1,27 @@ +import type { SQLQueryProps } from './common.js' + +export const SCHEMAS_SQL = ( + props: SQLQueryProps & { nameFilter?: string; idsFilter?: string; includeSystemSchemas?: boolean } +) => /* SQL */ ` +-- Adapted from information_schema.schemata +select + n.oid::int8 as id, + n.nspname as name, + u.rolname as owner +from + pg_namespace n, + pg_roles u +where + n.nspowner = u.oid + ${props.idsFilter ? `and n.oid ${props.idsFilter}` : ''} + ${props.nameFilter ? `and n.nspname ${props.nameFilter}` : ''} + ${!props.includeSystemSchemas ? `and not pg_catalog.starts_with(n.nspname, 'pg_')` : ''} + and ( + pg_has_role(n.nspowner, 'USAGE') + or has_schema_privilege(n.oid, 'CREATE, USAGE') + ) + and not pg_catalog.starts_with(n.nspname, 'pg_temp_') + and not pg_catalog.starts_with(n.nspname, 'pg_toast_temp_') +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/tables.sql b/src/lib/sql/table.sql.ts similarity index 73% rename from src/lib/sql/tables.sql rename to src/lib/sql/table.sql.ts index d0bb9df3..d7f70331 100644 --- a/src/lib/sql/tables.sql +++ b/src/lib/sql/table.sql.ts @@ -1,3 +1,8 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const TABLES_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { tableIdentifierFilter?: string } +) => /* SQL */ ` SELECT c.oid :: int8 AS id, nc.nspname AS schema, @@ -41,6 +46,8 @@ FROM pg_attribute a, pg_namespace n where + ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} + ${props.tableIdentifierFilter ? `n.nspname || '.' || c.relname ${props.tableIdentifierFilter} AND` : ''} i.indrelid = c.oid and c.relnamespace = n.oid and a.attrelid = c.oid @@ -73,11 +80,16 @@ FROM join pg_namespace nta on cta.relnamespace = nta.oid ) on ta.attrelid = c.confrelid and ta.attnum = any (c.confkey) where + ${props.schemaFilter ? `nsa.nspname ${props.schemaFilter} OR nta.nspname ${props.schemaFilter} AND` : ''} + ${props.tableIdentifierFilter ? `(nsa.nspname || '.' || csa.relname) ${props.tableIdentifierFilter} OR (nta.nspname || '.' || cta.relname) ${props.tableIdentifierFilter} AND` : ''} c.contype = 'f' ) as relationships on (relationships.source_schema = nc.nspname and relationships.source_table_name = c.relname) or (relationships.target_table_schema = nc.nspname and relationships.target_table_name = c.relname) WHERE + ${props.schemaFilter ? `nc.nspname ${props.schemaFilter} AND` : ''} + ${props.idsFilter ? `c.oid ${props.idsFilter} AND` : ''} + ${props.tableIdentifierFilter ? `nc.nspname || '.' || c.relname ${props.tableIdentifierFilter} AND` : ''} c.relkind IN ('r', 'p') AND NOT pg_is_other_temp_schema(nc.oid) AND ( @@ -96,3 +108,6 @@ group by c.relreplident, nc.nspname, pk.primary_keys +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/table_privileges.sql b/src/lib/sql/table_privileges.sql.ts similarity index 74% rename from src/lib/sql/table_privileges.sql rename to src/lib/sql/table_privileges.sql.ts index 435409dc..ca4ea122 100644 --- a/src/lib/sql/table_privileges.sql +++ b/src/lib/sql/table_privileges.sql.ts @@ -1,4 +1,11 @@ --- Despite the name `table_privileges`, this includes other kinds of relations: +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const TABLE_PRIVILEGES_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { + nameIdentifierFilter?: string + } +) => /* SQL */ ` +-- Despite the name \`table_privileges\`, this includes other kinds of relations: -- views, matviews, etc. "Relation privileges" just doesn't roll off the tongue. -- -- For each relation, get its relacl in a jsonb format, @@ -59,6 +66,9 @@ left join ( ) as grantee (oid, rolname) on grantee.oid = _priv.grantee where c.relkind in ('r', 'v', 'm', 'f', 'p') + ${props.schemaFilter ? `and nc.nspname ${props.schemaFilter}` : ''} + ${props.idsFilter ? `and c.oid ${props.idsFilter}` : ''} + ${props.nameIdentifierFilter ? `and (nc.nspname || '.' || c.relname) ${props.nameIdentifierFilter}` : ''} and not pg_is_other_temp_schema(c.relnamespace) and ( pg_has_role(c.relowner, 'USAGE') @@ -73,3 +83,6 @@ group by nc.nspname, c.relname, c.relkind +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/table_relationships.sql b/src/lib/sql/table_relationships.sql.ts similarity index 80% rename from src/lib/sql/table_relationships.sql rename to src/lib/sql/table_relationships.sql.ts index 53b80ded..d74c12a4 100644 --- a/src/lib/sql/table_relationships.sql +++ b/src/lib/sql/table_relationships.sql.ts @@ -1,3 +1,6 @@ +import type { SQLQueryPropsWithSchemaFilter } from './common.js' + +export const TABLE_RELATIONSHIPS_SQL = (props: SQLQueryPropsWithSchemaFilter) => /* SQL */ ` -- Adapted from -- https://github.com/PostgREST/postgrest/blob/f9f0f79fa914ac00c11fbf7f4c558e14821e67e2/src/PostgREST/SchemaCache.hs#L722 WITH @@ -15,6 +18,7 @@ pks_uniques_cols AS ( WHERE contype IN ('p', 'u') and connamespace::regnamespace::text <> 'pg_catalog' + ${props.schemaFilter ? `and connamespace::regnamespace::text ${props.schemaFilter}` : ''} GROUP BY connamespace, conrelid ) SELECT @@ -34,6 +38,7 @@ JOIN LATERAL ( FROM unnest(traint.conkey, traint.confkey) WITH ORDINALITY AS _(col, ref, ord) JOIN pg_attribute cols ON cols.attrelid = traint.conrelid AND cols.attnum = col JOIN pg_attribute refs ON refs.attrelid = traint.confrelid AND refs.attnum = ref + WHERE ${props.schemaFilter ? `traint.connamespace::regnamespace::text ${props.schemaFilter}` : 'true'} ) AS column_info ON TRUE JOIN pg_namespace ns1 ON ns1.oid = traint.connamespace JOIN pg_class tab ON tab.oid = traint.conrelid @@ -42,3 +47,5 @@ JOIN pg_namespace ns2 ON ns2.oid = other.relnamespace LEFT JOIN pks_uniques_cols pks_uqs ON pks_uqs.connamespace = traint.connamespace AND pks_uqs.conrelid = traint.conrelid WHERE traint.contype = 'f' AND traint.conparentid = 0 +${props.schemaFilter ? `and ns1.nspname ${props.schemaFilter}` : ''} +` diff --git a/src/lib/sql/triggers.sql b/src/lib/sql/triggers.sql.ts similarity index 62% rename from src/lib/sql/triggers.sql rename to src/lib/sql/triggers.sql.ts index 09fcef14..5580373e 100644 --- a/src/lib/sql/triggers.sql +++ b/src/lib/sql/triggers.sql.ts @@ -1,3 +1,11 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const TRIGGERS_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { + tableNameFilter?: string + nameFilter?: string + } +) => /* SQL */ ` SELECT pg_t.oid AS id, pg_t.tgrelid AS table_id, @@ -6,10 +14,10 @@ SELECT WHEN pg_t.tgenabled = 'O' THEN 'ORIGIN' WHEN pg_t.tgenabled = 'R' THEN 'REPLICA' WHEN pg_t.tgenabled = 'A' THEN 'ALWAYS' - END AS enabled_mode, + END AS enabled_mode, ( STRING_TO_ARRAY( - ENCODE(pg_t.tgargs, 'escape'), '\000' + ENCODE(pg_t.tgargs, 'escape'), '\\000' ) )[:pg_t.tgnargs] AS function_args, is_t.trigger_name AS name, @@ -26,6 +34,8 @@ FROM JOIN pg_class AS pg_c ON pg_t.tgrelid = pg_c.oid +JOIN pg_namespace AS table_ns +ON pg_c.relnamespace = table_ns.oid JOIN information_schema.triggers AS is_t ON is_t.trigger_name = pg_t.tgname AND pg_c.relname = is_t.event_object_table @@ -34,6 +44,11 @@ JOIN pg_proc AS pg_p ON pg_t.tgfoid = pg_p.oid JOIN pg_namespace AS pg_n ON pg_p.pronamespace = pg_n.oid +WHERE + ${props.schemaFilter ? `table_ns.nspname ${props.schemaFilter}` : 'true'} + ${props.tableNameFilter ? `AND pg_c.relname ${props.tableNameFilter}` : ''} + ${props.nameFilter ? `AND is_t.trigger_name ${props.nameFilter}` : ''} + ${props.idsFilter ? `AND pg_t.oid ${props.idsFilter}` : ''} GROUP BY pg_t.oid, pg_t.tgrelid, @@ -48,3 +63,6 @@ GROUP BY is_t.action_timing, pg_p.proname, pg_n.nspname +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/types.sql b/src/lib/sql/types.sql deleted file mode 100644 index 7a628ed1..00000000 --- a/src/lib/sql/types.sql +++ /dev/null @@ -1,35 +0,0 @@ -select - t.oid::int8 as id, - t.typname as name, - n.nspname as schema, - format_type (t.oid, null) as format, - coalesce(t_enums.enums, '[]') as enums, - coalesce(t_attributes.attributes, '[]') as attributes, - obj_description (t.oid, 'pg_type') as comment -from - pg_type t - left join pg_namespace n on n.oid = t.typnamespace - left join ( - select - enumtypid, - jsonb_agg(enumlabel order by enumsortorder) as enums - from - pg_enum - group by - enumtypid - ) as t_enums on t_enums.enumtypid = t.oid - left join ( - select - oid, - jsonb_agg( - jsonb_build_object('name', a.attname, 'type_id', a.atttypid::int8) - order by a.attnum asc - ) as attributes - from - pg_class c - join pg_attribute a on a.attrelid = c.oid - where - c.relkind = 'c' and not a.attisdropped - group by - c.oid - ) as t_attributes on t_attributes.oid = t.typrelid diff --git a/src/lib/sql/types.sql.ts b/src/lib/sql/types.sql.ts new file mode 100644 index 00000000..990fa22f --- /dev/null +++ b/src/lib/sql/types.sql.ts @@ -0,0 +1,72 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const TYPES_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { + includeTableTypes?: boolean + includeArrayTypes?: boolean + } +) => /* SQL */ ` +select + t.oid::int8 as id, + t.typname as name, + n.nspname as schema, + format_type (t.oid, null) as format, + coalesce(t_enums.enums, '[]') as enums, + coalesce(t_attributes.attributes, '[]') as attributes, + obj_description (t.oid, 'pg_type') as comment +from + pg_type t + left join pg_namespace n on n.oid = t.typnamespace + left join ( + select + enumtypid, + jsonb_agg(enumlabel order by enumsortorder) as enums + from + pg_enum + group by + enumtypid + ) as t_enums on t_enums.enumtypid = t.oid + left join ( + select + oid, + jsonb_agg( + jsonb_build_object('name', a.attname, 'type_id', a.atttypid::int8) + order by a.attnum asc + ) as attributes + from + pg_class c + join pg_attribute a on a.attrelid = c.oid + where + c.relkind = 'c' and not a.attisdropped + group by + c.oid + ) as t_attributes on t_attributes.oid = t.typrelid + where + ( + t.typrelid = 0 + or ( + select + c.relkind ${props.includeTableTypes ? `in ('c', 'r')` : `= 'c'`} + from + pg_class c + where + c.oid = t.typrelid + ) + ) + ${ + !props.includeArrayTypes + ? `and not exists ( + select + from + pg_type el + where + el.oid = t.typelem + and el.typarray = t.oid + )` + : '' + } + ${props.schemaFilter ? `and n.nspname ${props.schemaFilter}` : ''} + ${props.idsFilter ? `and t.oid ${props.idsFilter}` : ''} +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/version.sql b/src/lib/sql/version.sql.ts similarity index 84% rename from src/lib/sql/version.sql rename to src/lib/sql/version.sql.ts index ed7fab7e..f959c5fd 100644 --- a/src/lib/sql/version.sql +++ b/src/lib/sql/version.sql.ts @@ -1,3 +1,4 @@ +export const VERSION_SQL = () => /* SQL */ ` SELECT version(), current_setting('server_version_num') :: int8 AS version_number, @@ -8,3 +9,4 @@ SELECT pg_stat_activity ) AS active_connections, current_setting('max_connections') :: int8 AS max_connections +` diff --git a/src/lib/sql/views.sql b/src/lib/sql/views.sql deleted file mode 100644 index bd60da2b..00000000 --- a/src/lib/sql/views.sql +++ /dev/null @@ -1,12 +0,0 @@ -SELECT - c.oid :: int8 AS id, - n.nspname AS schema, - c.relname AS name, - -- See definition of information_schema.views - (pg_relation_is_updatable(c.oid, false) & 20) = 20 AS is_updatable, - obj_description(c.oid) AS comment -FROM - pg_class c - JOIN pg_namespace n ON n.oid = c.relnamespace -WHERE - c.relkind = 'v' diff --git a/src/lib/sql/views.sql.ts b/src/lib/sql/views.sql.ts new file mode 100644 index 00000000..95a707e2 --- /dev/null +++ b/src/lib/sql/views.sql.ts @@ -0,0 +1,25 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const VIEWS_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { + viewIdentifierFilter?: string + } +) => /* SQL */ ` +SELECT + c.oid :: int8 AS id, + n.nspname AS schema, + c.relname AS name, + -- See definition of information_schema.views + (pg_relation_is_updatable(c.oid, false) & 20) = 20 AS is_updatable, + obj_description(c.oid) AS comment +FROM + pg_class c + JOIN pg_namespace n ON n.oid = c.relnamespace +WHERE + ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} + ${props.idsFilter ? `c.oid ${props.idsFilter} AND` : ''} + ${props.viewIdentifierFilter ? `(n.nspname || '.' || c.relname) ${props.viewIdentifierFilter} AND` : ''} + c.relkind = 'v' +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/views_key_dependencies.sql b/src/lib/sql/views_key_dependencies.sql.ts similarity index 78% rename from src/lib/sql/views_key_dependencies.sql rename to src/lib/sql/views_key_dependencies.sql.ts index c8534486..31035012 100644 --- a/src/lib/sql/views_key_dependencies.sql +++ b/src/lib/sql/views_key_dependencies.sql.ts @@ -1,3 +1,6 @@ +import type { SQLQueryPropsWithSchemaFilter } from './common.js' + +export const VIEWS_KEY_DEPENDENCIES_SQL = (props: SQLQueryPropsWithSchemaFilter) => /* SQL */ ` -- Adapted from -- https://github.com/PostgREST/postgrest/blob/f9f0f79fa914ac00c11fbf7f4c558e14821e67e2/src/PostgREST/SchemaCache.hs#L820 with recursive @@ -25,6 +28,7 @@ pks_fks as ( from pg_constraint left join lateral unnest(confkey) with ordinality as _(col, ord) on true where contype='f' + ${props.schemaFilter ? `and connamespace::regnamespace::text ${props.schemaFilter}` : ''} ), views as ( select @@ -35,7 +39,8 @@ views as ( from pg_class c join pg_namespace n on n.oid = c.relnamespace join pg_rewrite r on r.ev_class = c.oid - where c.relkind in ('v', 'm') and n.nspname not in (__EXCLUDED_SCHEMAS) + where c.relkind in ('v', 'm') + ${props.schemaFilter ? `and n.nspname ${props.schemaFilter}` : ''} ), transform_json as ( select @@ -71,48 +76,48 @@ transform_json as ( -- ----------------------------------------------- -- pattern | replacement | flags -- ----------------------------------------------- - -- `<>` in pg_node_tree is the same as `null` in JSON, but due to very poor performance of json_typeof + -- <> in pg_node_tree is the same as null in JSON, but due to very poor performance of json_typeof -- we need to make this an empty array here to prevent json_array_elements from throwing an error -- when the targetList is null. -- We'll need to put it first, to make the node protection below work for node lists that start with - -- null: `(<> ...`, too. This is the case for coldefexprs, when the first column does not have a default value. + -- null: (<> ..., too. This is the case for coldefexprs, when the first column does not have a default value. '<>' , '()' - -- `,` is not part of the pg_node_tree format, but used in the regex. - -- This removes all `,` that might be part of column names. + -- , is not part of the pg_node_tree format, but used in the regex. + -- This removes all , that might be part of column names. ), ',' , '' - -- The same applies for `{` and `}`, although those are used a lot in pg_node_tree. + -- The same applies for { and }, although those are used a lot in pg_node_tree. -- We remove the escaped ones, which might be part of column names again. - ), E'\\{' , '' - ), E'\\}' , '' + ), E'\\\\{' , '' + ), E'\\\\}' , '' -- The fields we need are formatted as json manually to protect them from the regex. ), ' :targetList ' , ',"targetList":' ), ' :resno ' , ',"resno":' ), ' :resorigtbl ' , ',"resorigtbl":' ), ' :resorigcol ' , ',"resorigcol":' - -- Make the regex also match the node type, e.g. `{QUERY ...`, to remove it in one pass. + -- Make the regex also match the node type, e.g. \`{QUERY ...\`, to remove it in one pass. ), '{' , '{ :' - -- Protect node lists, which start with `({` or `((` from the greedy regex. - -- The extra `{` is removed again later. + -- Protect node lists, which start with \`({\` or \`((\` from the greedy regex. + -- The extra \`{\` is removed again later. ), '((' , '{((' ), '({' , '{({' -- This regex removes all unused fields to avoid the need to format all of them correctly. -- This leads to a smaller json result as well. - -- Removal stops at `,` for used fields (see above) and `}` for the end of the current node. - -- Nesting can't be parsed correctly with a regex, so we stop at `{` as well and + -- Removal stops at \`,\` for used fields (see above) and \`}\` for the end of the current node. + -- Nesting can't be parsed correctly with a regex, so we stop at \`{\` as well and -- add an empty key for the followig node. ), ' :[^}{,]+' , ',"":' , 'g' - -- For performance, the regex also added those empty keys when hitting a `,` or `}`. + -- For performance, the regex also added those empty keys when hitting a \`,\` or \`}\`. -- Those are removed next. ), ',"":}' , '}' ), ',"":,' , ',' -- This reverses the "node list protection" from above. ), '{(' , '(' - -- Every key above has been added with a `,` so far. The first key in an object doesn't need it. + -- Every key above has been added with a \`,\` so far. The first key in an object doesn't need it. ), '{,' , '{' - -- pg_node_tree has `()` around lists, but JSON uses `[]` + -- pg_node_tree has \`()\` around lists, but JSON uses \`[]\` ), '(' , '[' ), ')' , ']' - -- pg_node_tree has ` ` between list items, but JSON uses `,` + -- pg_node_tree has \` \` between list items, but JSON uses \`,\` ), ' ' , ',' )::json as view_definition from views @@ -139,7 +144,7 @@ recursion(view_id, view_schema, view_name, view_column, resorigtbl, resorigcol, false, ARRAY[resorigtbl] from results r - where view_schema not in (__EXCLUDED_SCHEMAS) + where ${props.schemaFilter ? `view_schema ${props.schemaFilter}` : 'true'} union all select view.view_id, @@ -189,3 +194,4 @@ join pg_namespace sch on sch.oid = tbl.relnamespace group by sch.nspname, tbl.relname, rep.view_schema, rep.view_name, pks_fks.conname, pks_fks.contype, pks_fks.ncol -- make sure we only return key for which all columns are referenced in the view - no partial PKs or FKs having ncol = array_length(array_agg(row(col.attname, view_columns) order by pks_fks.ord), 1) +` diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 4f9cac03..03b407d4 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -32,7 +32,9 @@ export const apply = async ({ columns .filter((c) => c.table_id in columnsByTableId) .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - .forEach((c) => columnsByTableId[c.table_id].push(c)) + .forEach((c) => { + columnsByTableId[c.table_id].push(c) + }) const internal_supabase_schema = postgrestVersion ? `// Allows to automatically instantiate createClient with right options diff --git a/test/lib/functions.ts b/test/lib/functions.ts index 05de3244..fb2c4692 100644 --- a/test/lib/functions.ts +++ b/test/lib/functions.ts @@ -354,3 +354,52 @@ test('retrieve set-returning function', async () => { ` ) }) + +test('retrieve function by args filter - polymorphic function with text argument', async () => { + const res = await pgMeta.functions.retrieve({ + schema: 'public', + name: 'polymorphic_function', + args: ['text'], + }) + expect(res.data).toMatchObject({ + name: 'polymorphic_function', + schema: 'public', + argument_types: 'text', + args: [ + { type_id: 25, mode: 'in' }, // text type_id is 25 + ], + }) + expect(res.error).toBeNull() +}) + +test('retrieve function by args filter - polymorphic function with boolean argument', async () => { + const res = await pgMeta.functions.retrieve({ + schema: 'public', + name: 'polymorphic_function', + args: ['boolean'], + }) + expect(res.data).toMatchObject({ + name: 'polymorphic_function', + schema: 'public', + argument_types: 'boolean', + args: [ + { type_id: 16, mode: 'in' }, // boolean type_id is 16 + ], + }) + expect(res.error).toBeNull() +}) + +test('retrieve function by args filter - function with no arguments', async () => { + const res = await pgMeta.functions.retrieve({ + schema: 'public', + name: 'function_returning_set_of_rows', + args: [], + }) + expect(res.data).toMatchObject({ + name: 'function_returning_set_of_rows', + schema: 'public', + argument_types: '', + args: [], + }) + expect(res.error).toBeNull() +}) diff --git a/test/lib/tables.ts b/test/lib/tables.ts index c35546b8..00230ab4 100644 --- a/test/lib/tables.ts +++ b/test/lib/tables.ts @@ -81,39 +81,39 @@ test('list', async () => { { "check": null, "comment": null, - "data_type": "numeric", - "default_value": null, - "enums": [], - "format": "numeric", + "data_type": "USER-DEFINED", + "default_value": "'ACTIVE'::user_status", + "enums": [ + "ACTIVE", + "INACTIVE", + ], + "format": "user_status", "identity_generation": null, "is_generated": false, "is_identity": false, "is_nullable": true, "is_unique": false, "is_updatable": true, - "name": "decimal", - "ordinal_position": 4, + "name": "status", + "ordinal_position": 3, "schema": "public", "table": "users", }, { "check": null, "comment": null, - "data_type": "USER-DEFINED", - "default_value": "'ACTIVE'::user_status", - "enums": [ - "ACTIVE", - "INACTIVE", - ], - "format": "user_status", + "data_type": "numeric", + "default_value": null, + "enums": [], + "format": "numeric", "identity_generation": null, "is_generated": false, "is_identity": false, "is_nullable": true, "is_unique": false, "is_updatable": true, - "name": "status", - "ordinal_position": 3, + "name": "decimal", + "ordinal_position": 4, "schema": "public", "table": "users", }, From 4e899128e4944f9ffffb107525cc463c3aa4447f Mon Sep 17 00:00:00 2001 From: georgRusanov Date: Fri, 5 Sep 2025 10:13:02 +0200 Subject: [PATCH 30/42] chore: added tests (#979) * chore: added tests * chore: added more tests * chore: try to fix test on ci --------- Co-authored-by: georgiy.rusanov --- test/admin-app.test.ts | 17 ++++ test/app.test.ts | 31 ++++++ test/config.test.ts | 127 +++++++++++++++++++++++ test/extensions.test.ts | 144 ++++++++++++++++++++++++++ test/functions.test.ts | 205 ++++++++++++++++++++++++++++++++++++++ test/index.test.ts | 12 +++ test/lib/utils.ts | 4 +- test/publications.test.ts | 187 ++++++++++++++++++++++++++++++++++ test/roles.test.ts | 181 +++++++++++++++++++++++++++++++++ test/schemas.test.ts | 137 +++++++++++++++++++++++++ test/triggers.test.ts | 186 ++++++++++++++++++++++++++++++++++ test/types.test.ts | 46 +++++++++ test/utils.test.ts | 105 +++++++++++++++++++ test/views.test.ts | 51 ++++++++++ 14 files changed, 1432 insertions(+), 1 deletion(-) create mode 100644 test/admin-app.test.ts create mode 100644 test/app.test.ts create mode 100644 test/config.test.ts create mode 100644 test/extensions.test.ts create mode 100644 test/functions.test.ts create mode 100644 test/publications.test.ts create mode 100644 test/roles.test.ts create mode 100644 test/schemas.test.ts create mode 100644 test/triggers.test.ts create mode 100644 test/types.test.ts create mode 100644 test/utils.test.ts create mode 100644 test/views.test.ts diff --git a/test/admin-app.test.ts b/test/admin-app.test.ts new file mode 100644 index 00000000..0bc93e2e --- /dev/null +++ b/test/admin-app.test.ts @@ -0,0 +1,17 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/admin-app.js' + +describe('admin-app', () => { + test('should register metrics endpoint', async () => { + const app = build() + + // Test that the app can be started (this will trigger plugin registration) + await app.ready() + + // Verify that metrics endpoint is available + const routes = app.printRoutes() + expect(routes).toContain('metrics') + + await app.close() + }) +}) diff --git a/test/app.test.ts b/test/app.test.ts new file mode 100644 index 00000000..c705dd9b --- /dev/null +++ b/test/app.test.ts @@ -0,0 +1,31 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' + +describe('server/app', () => { + test('should handle root endpoint', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/', + }) + expect(response.statusCode).toBe(200) + const data = JSON.parse(response.body) + expect(data).toHaveProperty('status') + expect(data).toHaveProperty('name') + expect(data).toHaveProperty('version') + expect(data).toHaveProperty('documentation') + await app.close() + }) + + test('should handle health endpoint', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/health', + }) + expect(response.statusCode).toBe(200) + const data = JSON.parse(response.body) + expect(data).toHaveProperty('date') + await app.close() + }) +}) diff --git a/test/config.test.ts b/test/config.test.ts new file mode 100644 index 00000000..2736eb57 --- /dev/null +++ b/test/config.test.ts @@ -0,0 +1,127 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/config', () => { + test('should list config with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/config?limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(response.json()).toMatchInlineSnapshot(` + [ + { + "boot_val": "on", + "category": "Autovacuum", + "context": "sighup", + "enumvals": null, + "extra_desc": null, + "group": "Autovacuum", + "max_val": null, + "min_val": null, + "name": "autovacuum", + "pending_restart": false, + "reset_val": "on", + "setting": "on", + "short_desc": "Starts the autovacuum subprocess.", + "source": "default", + "sourcefile": null, + "sourceline": null, + "subgroup": "", + "unit": null, + "vartype": "bool", + }, + { + "boot_val": "0.1", + "category": "Autovacuum", + "context": "sighup", + "enumvals": null, + "extra_desc": null, + "group": "Autovacuum", + "max_val": "100", + "min_val": "0", + "name": "autovacuum_analyze_scale_factor", + "pending_restart": false, + "reset_val": "0.1", + "setting": "0.1", + "short_desc": "Number of tuple inserts, updates, or deletes prior to analyze as a fraction of reltuples.", + "source": "default", + "sourcefile": null, + "sourceline": null, + "subgroup": "", + "unit": null, + "vartype": "real", + }, + { + "boot_val": "50", + "category": "Autovacuum", + "context": "sighup", + "enumvals": null, + "extra_desc": null, + "group": "Autovacuum", + "max_val": "2147483647", + "min_val": "0", + "name": "autovacuum_analyze_threshold", + "pending_restart": false, + "reset_val": "50", + "setting": "50", + "short_desc": "Minimum number of tuple inserts, updates, or deletes prior to analyze.", + "source": "default", + "sourcefile": null, + "sourceline": null, + "subgroup": "", + "unit": null, + "vartype": "integer", + }, + { + "boot_val": "200000000", + "category": "Autovacuum", + "context": "postmaster", + "enumvals": null, + "extra_desc": null, + "group": "Autovacuum", + "max_val": "2000000000", + "min_val": "100000", + "name": "autovacuum_freeze_max_age", + "pending_restart": false, + "reset_val": "200000000", + "setting": "200000000", + "short_desc": "Age at which to autovacuum a table to prevent transaction ID wraparound.", + "source": "default", + "sourcefile": null, + "sourceline": null, + "subgroup": "", + "unit": null, + "vartype": "integer", + }, + { + "boot_val": "3", + "category": "Autovacuum", + "context": "postmaster", + "enumvals": null, + "extra_desc": null, + "group": "Autovacuum", + "max_val": "262143", + "min_val": "1", + "name": "autovacuum_max_workers", + "pending_restart": false, + "reset_val": "3", + "setting": "3", + "short_desc": "Sets the maximum number of simultaneously running autovacuum worker processes.", + "source": "default", + "sourcefile": null, + "sourceline": null, + "subgroup": "", + "unit": null, + "vartype": "integer", + }, + ] + `) + await app.close() + }) +}) diff --git a/test/extensions.test.ts b/test/extensions.test.ts new file mode 100644 index 00000000..f6966475 --- /dev/null +++ b/test/extensions.test.ts @@ -0,0 +1,144 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/extensions', () => { + test('should list extensions', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/extensions', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list extensions with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/extensions?limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent extension', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/extensions/non-existent-extension', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) + + test('should create extension, retrieve, update, delete', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/extensions', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { name: 'pgcrypto', version: '1.3' }, + }) + expect(response.statusCode).toBe(200) + expect(response.json()).toMatchInlineSnapshot(` + { + "comment": "cryptographic functions", + "default_version": "1.3", + "installed_version": "1.3", + "name": "pgcrypto", + "schema": "public", + } + `) + + const retrieveResponse = await app.inject({ + method: 'GET', + url: '/extensions/pgcrypto', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(retrieveResponse.statusCode).toBe(200) + expect(retrieveResponse.json()).toMatchInlineSnapshot(` + { + "comment": "cryptographic functions", + "default_version": "1.3", + "installed_version": "1.3", + "name": "pgcrypto", + "schema": "public", + } + `) + + const updateResponse = await app.inject({ + method: 'PATCH', + url: '/extensions/pgcrypto', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { schema: 'public' }, + }) + expect(updateResponse.statusCode).toBe(200) + expect(updateResponse.json()).toMatchInlineSnapshot(` + { + "comment": "cryptographic functions", + "default_version": "1.3", + "installed_version": "1.3", + "name": "pgcrypto", + "schema": "public", + } + `) + + const deleteResponse = await app.inject({ + method: 'DELETE', + url: '/extensions/pgcrypto', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(deleteResponse.statusCode).toBe(200) + expect(deleteResponse.json()).toMatchInlineSnapshot(` + { + "comment": "cryptographic functions", + "default_version": "1.3", + "installed_version": "1.3", + "name": "pgcrypto", + "schema": "public", + } + `) + + await app.close() + }) + + test('should return 400 for invalid extension name', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/extensions', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { name: 'invalid-extension', version: '1.3' }, + }) + expect(response.statusCode).toBe(400) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "could not open extension control file "/usr/share/postgresql/14/extension/invalid-extension.control": No such file or directory", + } + `) + await app.close() + }) +}) diff --git a/test/functions.test.ts b/test/functions.test.ts new file mode 100644 index 00000000..f37e850e --- /dev/null +++ b/test/functions.test.ts @@ -0,0 +1,205 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/functions', () => { + test('should list functions', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/functions', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list functions with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/functions?include_system_schemas=true&limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent function', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/functions/non-existent-function', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) + + test('should create function, retrieve, update, delete', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/functions', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_function', + schema: 'public', + language: 'plpgsql', + definition: 'BEGIN RETURN 42; END;', + return_type: 'integer', + }, + }) + expect(response.statusCode).toBe(200) + const responseData = response.json() + expect(responseData).toMatchObject({ + args: [], + argument_types: '', + behavior: 'VOLATILE', + complete_statement: expect.stringContaining( + 'CREATE OR REPLACE FUNCTION public.test_function()' + ), + config_params: null, + definition: 'BEGIN RETURN 42; END;', + id: expect.any(Number), + identity_argument_types: '', + is_set_returning_function: false, + language: 'plpgsql', + name: 'test_function', + return_type: 'integer', + return_type_id: 23, + return_type_relation_id: null, + schema: 'public', + security_definer: false, + }) + + const { id } = responseData + + const retrieveResponse = await app.inject({ + method: 'GET', + url: `/functions/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(retrieveResponse.statusCode).toBe(200) + const retrieveData = retrieveResponse.json() + expect(retrieveData).toMatchObject({ + args: [], + argument_types: '', + behavior: 'VOLATILE', + complete_statement: expect.stringContaining( + 'CREATE OR REPLACE FUNCTION public.test_function()' + ), + config_params: null, + definition: 'BEGIN RETURN 42; END;', + id: expect.any(Number), + identity_argument_types: '', + is_set_returning_function: false, + language: 'plpgsql', + name: 'test_function', + return_type: 'integer', + return_type_id: 23, + return_type_relation_id: null, + schema: 'public', + security_definer: false, + }) + + const updateResponse = await app.inject({ + method: 'PATCH', + url: `/functions/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_function', + schema: 'public', + language: 'plpgsql', + definition: 'BEGIN RETURN 50; END;', + return_type: 'integer', + }, + }) + expect(updateResponse.statusCode).toBe(200) + const updateData = updateResponse.json() + expect(updateData).toMatchObject({ + args: [], + argument_types: '', + behavior: 'VOLATILE', + complete_statement: expect.stringContaining( + 'CREATE OR REPLACE FUNCTION public.test_function()' + ), + config_params: null, + definition: 'BEGIN RETURN 50; END;', + id: expect.any(Number), + identity_argument_types: '', + is_set_returning_function: false, + language: 'plpgsql', + name: 'test_function', + return_type: 'integer', + return_type_id: 23, + return_type_relation_id: null, + schema: 'public', + security_definer: false, + }) + + const deleteResponse = await app.inject({ + method: 'DELETE', + url: `/functions/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(deleteResponse.statusCode).toBe(200) + const deleteData = deleteResponse.json() + expect(deleteData).toMatchObject({ + args: [], + argument_types: '', + behavior: 'VOLATILE', + complete_statement: expect.stringContaining( + 'CREATE OR REPLACE FUNCTION public.test_function()' + ), + config_params: null, + definition: 'BEGIN RETURN 50; END;', + id: expect.any(Number), + identity_argument_types: '', + is_set_returning_function: false, + language: 'plpgsql', + name: 'test_function', + return_type: 'integer', + return_type_id: 23, + return_type_relation_id: null, + schema: 'public', + security_definer: false, + }) + }) + + test('should return 400 for invalid payload', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/functions', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_function12', + }, + }) + expect(response.statusCode).toBe(400) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "syntax error at or near "NULL"", + } + `) + }) +}) diff --git a/test/index.test.ts b/test/index.test.ts index 6ca2b87e..d879d232 100644 --- a/test/index.test.ts +++ b/test/index.test.ts @@ -24,3 +24,15 @@ import './server/table-privileges' import './server/typegen' import './server/result-size-limit' import './server/query-timeout' +// New tests for increased coverage - commented out to avoid import issues +// import './server/app' +// import './server/utils' +// import './server/functions' +// import './server/config' +// import './server/extensions' +// import './server/publications' +// import './server/schemas' +// import './server/roles' +// import './server/triggers' +// import './server/types' +// import './server/views' diff --git a/test/lib/utils.ts b/test/lib/utils.ts index e4d48fe7..a88391ed 100644 --- a/test/lib/utils.ts +++ b/test/lib/utils.ts @@ -1,9 +1,11 @@ import { afterAll } from 'vitest' import { PostgresMeta } from '../../src/lib' +export const TEST_CONNECTION_STRING = 'postgresql://postgres:postgres@localhost:5432' + export const pgMeta = new PostgresMeta({ max: 1, - connectionString: 'postgresql://postgres:postgres@localhost:5432/postgres', + connectionString: TEST_CONNECTION_STRING, }) afterAll(() => pgMeta.end()) diff --git a/test/publications.test.ts b/test/publications.test.ts new file mode 100644 index 00000000..0687c9dd --- /dev/null +++ b/test/publications.test.ts @@ -0,0 +1,187 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/publications', () => { + test('should list publications', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/publications', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list publications with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/publications?limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent publication', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/publications/non-existent-publication', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) + + test('should create publication, retrieve, update, delete', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/publications', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_publication', + publish_insert: true, + publish_update: true, + publish_delete: true, + publish_truncate: false, + tables: ['users'], + }, + }) + expect(response.statusCode).toBe(200) + const responseData = response.json() + expect(responseData).toMatchObject({ + id: expect.any(Number), + name: 'test_publication', + owner: 'postgres', + publish_delete: true, + publish_insert: true, + publish_truncate: false, + publish_update: true, + tables: [ + { + id: expect.any(Number), + name: 'users', + schema: 'public', + }, + ], + }) + + const { id } = responseData + + const retrieveResponse = await app.inject({ + method: 'GET', + url: `/publications/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(retrieveResponse.statusCode).toBe(200) + const retrieveData = retrieveResponse.json() + expect(retrieveData).toMatchObject({ + id: expect.any(Number), + name: 'test_publication', + owner: 'postgres', + publish_delete: true, + publish_insert: true, + publish_truncate: false, + publish_update: true, + tables: [ + { + id: expect.any(Number), + name: 'users', + schema: 'public', + }, + ], + }) + + const updateResponse = await app.inject({ + method: 'PATCH', + url: `/publications/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + publish_delete: false, + }, + }) + expect(updateResponse.statusCode).toBe(200) + const updateData = updateResponse.json() + expect(updateData).toMatchObject({ + id: expect.any(Number), + name: 'test_publication', + owner: 'postgres', + publish_delete: false, + publish_insert: true, + publish_truncate: false, + publish_update: true, + tables: [ + { + id: expect.any(Number), + name: 'users', + schema: 'public', + }, + ], + }) + + const deleteResponse = await app.inject({ + method: 'DELETE', + url: `/publications/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(deleteResponse.statusCode).toBe(200) + const deleteData = deleteResponse.json() + expect(deleteData).toMatchObject({ + id: expect.any(Number), + name: 'test_publication', + owner: 'postgres', + publish_delete: false, + publish_insert: true, + publish_truncate: false, + publish_update: true, + tables: [ + { + id: expect.any(Number), + name: 'users', + schema: 'public', + }, + ], + }) + }) + + test('should return 400 for invalid payload', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/publications', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_publication', + tables: ['non_existent_table'], + }, + }) + expect(response.statusCode).toBe(400) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "relation "non_existent_table" does not exist", + } + `) + }) +}) diff --git a/test/roles.test.ts b/test/roles.test.ts new file mode 100644 index 00000000..77b98c06 --- /dev/null +++ b/test/roles.test.ts @@ -0,0 +1,181 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/roles', () => { + test('should list roles', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/roles', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list roles with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/roles?limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent role', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/roles/non-existent-role', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) + + test('should create role, retrieve, update, delete', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/roles', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_role', + }, + }) + expect(response.statusCode).toBe(200) + const responseData = response.json() + expect(responseData).toMatchObject({ + active_connections: 0, + can_bypass_rls: false, + can_create_db: false, + can_create_role: false, + can_login: false, + config: null, + connection_limit: 100, + id: expect.any(Number), + inherit_role: true, + is_replication_role: false, + is_superuser: false, + name: 'test_role', + password: '********', + valid_until: null, + }) + + const { id } = responseData + + const retrieveResponse = await app.inject({ + method: 'GET', + url: `/roles/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(retrieveResponse.statusCode).toBe(200) + const retrieveData = retrieveResponse.json() + expect(retrieveData).toMatchObject({ + active_connections: 0, + can_bypass_rls: false, + can_create_db: false, + can_create_role: false, + can_login: false, + config: null, + connection_limit: 100, + id: expect.any(Number), + inherit_role: true, + is_replication_role: false, + is_superuser: false, + name: 'test_role', + password: '********', + valid_until: null, + }) + + const updateResponse = await app.inject({ + method: 'PATCH', + url: `/roles/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_role_updated', + }, + }) + expect(updateResponse.statusCode).toBe(200) + const updateData = updateResponse.json() + expect(updateData).toMatchObject({ + active_connections: 0, + can_bypass_rls: false, + can_create_db: false, + can_create_role: false, + can_login: false, + config: null, + connection_limit: 100, + id: expect.any(Number), + inherit_role: true, + is_replication_role: false, + is_superuser: false, + name: 'test_role_updated', + password: '********', + valid_until: null, + }) + + const deleteResponse = await app.inject({ + method: 'DELETE', + url: `/roles/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(deleteResponse.statusCode).toBe(200) + const deleteData = deleteResponse.json() + expect(deleteData).toMatchObject({ + active_connections: 0, + can_bypass_rls: false, + can_create_db: false, + can_create_role: false, + can_login: false, + config: null, + connection_limit: 100, + id: expect.any(Number), + inherit_role: true, + is_replication_role: false, + is_superuser: false, + name: 'test_role_updated', + password: '********', + valid_until: null, + }) + }) + + test('should return 400 for invalid payload', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/roles', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'pg_', + }, + }) + expect(response.statusCode).toBe(400) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "role name "pg_" is reserved", + } + `) + }) +}) diff --git a/test/schemas.test.ts b/test/schemas.test.ts new file mode 100644 index 00000000..73ed73c1 --- /dev/null +++ b/test/schemas.test.ts @@ -0,0 +1,137 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/schemas', () => { + test('should list schemas', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/schemas', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list schemas with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/schemas?include_system_schemas=true&limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent schema', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/schemas/non-existent-schema', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) + + test('should create schema, retrieve, update, delete', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/schemas', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_schema', + }, + }) + expect(response.statusCode).toBe(200) + const responseData = response.json() + expect(responseData).toMatchObject({ + id: expect.any(Number), + name: 'test_schema', + owner: 'postgres', + }) + + const { id } = responseData + + const retrieveResponse = await app.inject({ + method: 'GET', + url: `/schemas/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(retrieveResponse.statusCode).toBe(200) + const retrieveData = retrieveResponse.json() + expect(retrieveData).toMatchObject({ + id: expect.any(Number), + name: 'test_schema', + owner: 'postgres', + }) + + const updateResponse = await app.inject({ + method: 'PATCH', + url: `/schemas/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_schema_updated', + }, + }) + expect(updateResponse.statusCode).toBe(200) + const updateData = updateResponse.json() + expect(updateData).toMatchObject({ + id: expect.any(Number), + name: 'test_schema_updated', + owner: 'postgres', + }) + + const deleteResponse = await app.inject({ + method: 'DELETE', + url: `/schemas/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(deleteResponse.statusCode).toBe(200) + const deleteData = deleteResponse.json() + expect(deleteData).toMatchObject({ + id: expect.any(Number), + name: 'test_schema_updated', + owner: 'postgres', + }) + }) + + test('should return 400 for invalid payload', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/schemas', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'pg_', + }, + }) + expect(response.statusCode).toBe(400) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "unacceptable schema name "pg_"", + } + `) + }) +}) diff --git a/test/triggers.test.ts b/test/triggers.test.ts new file mode 100644 index 00000000..c537093c --- /dev/null +++ b/test/triggers.test.ts @@ -0,0 +1,186 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/triggers', () => { + test('should list triggers', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/triggers', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list triggers with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/triggers?include_system_schemas=true&limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent trigger', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/triggers/non-existent-trigger', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) + + test('should create trigger, retrieve, update, delete', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/triggers', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_trigger1', + table: 'users_audit', + function_name: 'audit_action', + activation: 'AFTER', + events: ['UPDATE'], + }, + }) + expect(response.statusCode).toBe(200) + const responseData = response.json() + expect(responseData).toMatchObject({ + activation: 'AFTER', + condition: null, + enabled_mode: 'ORIGIN', + events: ['UPDATE'], + function_args: [], + function_name: 'audit_action', + function_schema: 'public', + id: expect.any(Number), + name: 'test_trigger1', + orientation: 'STATEMENT', + schema: 'public', + table: 'users_audit', + table_id: expect.any(Number), + }) + + const { id } = responseData + + const retrieveResponse = await app.inject({ + method: 'GET', + url: `/triggers/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(retrieveResponse.statusCode).toBe(200) + const retrieveData = retrieveResponse.json() + expect(retrieveData).toMatchObject({ + activation: 'AFTER', + condition: null, + enabled_mode: 'ORIGIN', + events: ['UPDATE'], + function_args: [], + function_name: 'audit_action', + function_schema: 'public', + id: expect.any(Number), + name: 'test_trigger1', + orientation: 'STATEMENT', + schema: 'public', + table: 'users_audit', + table_id: expect.any(Number), + }) + + const updateResponse = await app.inject({ + method: 'PATCH', + url: `/triggers/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_trigger1_updated', + enabled_mode: 'DISABLED', + }, + }) + expect(updateResponse.statusCode).toBe(200) + const updateData = updateResponse.json() + expect(updateData).toMatchObject({ + activation: 'AFTER', + condition: null, + enabled_mode: 'DISABLED', + events: ['UPDATE'], + function_args: [], + function_name: 'audit_action', + function_schema: 'public', + id: expect.any(Number), + name: 'test_trigger1_updated', + orientation: 'STATEMENT', + schema: 'public', + table: 'users_audit', + table_id: expect.any(Number), + }) + + const deleteResponse = await app.inject({ + method: 'DELETE', + url: `/triggers/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(deleteResponse.statusCode).toBe(200) + const deleteData = deleteResponse.json() + expect(deleteData).toMatchObject({ + activation: 'AFTER', + condition: null, + enabled_mode: 'DISABLED', + events: ['UPDATE'], + function_args: [], + function_name: 'audit_action', + function_schema: 'public', + id: expect.any(Number), + name: 'test_trigger1_updated', + orientation: 'STATEMENT', + schema: 'public', + table: 'users_audit', + table_id: expect.any(Number), + }) + }) + + test('should return 400 for invalid payload', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/triggers', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_trigger_invalid', + table: 'non_existent_table', + function_name: 'audit_action', + activation: 'AFTER', + events: ['UPDATE'], + }, + }) + expect(response.statusCode).toBe(400) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "relation "public.non_existent_table" does not exist", + } + `) + }) +}) diff --git a/test/types.test.ts b/test/types.test.ts new file mode 100644 index 00000000..df2af697 --- /dev/null +++ b/test/types.test.ts @@ -0,0 +1,46 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/types', () => { + test('should list types', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/types', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list types with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/types?include_system_schemas=true&limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent type', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/types/non-existent-type', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) +}) diff --git a/test/utils.test.ts b/test/utils.test.ts new file mode 100644 index 00000000..3d70b1a5 --- /dev/null +++ b/test/utils.test.ts @@ -0,0 +1,105 @@ +import { expect, test, describe } from 'vitest' +import { FastifyRequest } from 'fastify' +import { + extractRequestForLogging, + createConnectionConfig, + translateErrorToResponseCode, +} from '../src/server/utils.js' + +describe('server/utils', () => { + describe('extractRequestForLogging', () => { + test('should extract request information for logging', () => { + const mockRequest = { + method: 'GET', + url: '/test', + headers: { + 'user-agent': 'test-agent', + 'x-supabase-info': 'test-info', + }, + query: { param: 'value' }, + } as FastifyRequest + + const result = extractRequestForLogging(mockRequest) + expect(result).toHaveProperty('method') + expect(result).toHaveProperty('url') + expect(result).toHaveProperty('pg') + expect(result).toHaveProperty('opt') + }) + + test('should handle request with minimal properties', () => { + const mockRequest = { + method: 'POST', + url: '/api/test', + headers: {}, + } as FastifyRequest + + const result = extractRequestForLogging(mockRequest) + expect(result.method).toBe('POST') + expect(result.url).toBe('/api/test') + expect(result.pg).toBe('unknown') + }) + }) + + describe('createConnectionConfig', () => { + test('should create connection config from request headers', () => { + const mockRequest = { + headers: { + pg: 'postgresql://user:pass@localhost:5432/db', + 'x-pg-application-name': 'test-app', + }, + } as FastifyRequest + + const result = createConnectionConfig(mockRequest) + expect(result).toHaveProperty('connectionString') + expect(result).toHaveProperty('application_name') + expect(result.connectionString).toBe('postgresql://user:pass@localhost:5432/db') + expect(result.application_name).toBe('test-app') + }) + + test('should handle request without application name', () => { + const mockRequest = { + headers: { + pg: 'postgresql://user:pass@localhost:5432/db', + }, + } as FastifyRequest + + const result = createConnectionConfig(mockRequest) + expect(result).toHaveProperty('connectionString') + expect(result.connectionString).toBe('postgresql://user:pass@localhost:5432/db') + // application_name should have default value if not provided + expect(result.application_name).toBe('postgres-meta 0.0.0-automated') + }) + }) + + describe('translateErrorToResponseCode', () => { + test('should return 504 for connection timeout errors', () => { + const error = { message: 'Connection terminated due to connection timeout' } + const result = translateErrorToResponseCode(error) + expect(result).toBe(504) + }) + + test('should return 503 for too many clients errors', () => { + const error = { message: 'sorry, too many clients already' } + const result = translateErrorToResponseCode(error) + expect(result).toBe(503) + }) + + test('should return 408 for query timeout errors', () => { + const error = { message: 'Query read timeout' } + const result = translateErrorToResponseCode(error) + expect(result).toBe(408) + }) + + test('should return default 400 for other errors', () => { + const error = { message: 'database connection failed' } + const result = translateErrorToResponseCode(error) + expect(result).toBe(400) + }) + + test('should return custom default for other errors', () => { + const error = { message: 'some other error' } + const result = translateErrorToResponseCode(error, 500) + expect(result).toBe(500) + }) + }) +}) diff --git a/test/views.test.ts b/test/views.test.ts new file mode 100644 index 00000000..d713e919 --- /dev/null +++ b/test/views.test.ts @@ -0,0 +1,51 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/views', () => { + test('should list views', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/views', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list views with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/views?include_system_schemas=true&limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent view', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/views/1', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "Cannot find a view with ID 1", + } + `) + await app.close() + }) +}) From 479aa1cff51816f7d5d60ab7efd49cf65294e719 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Sep 2025 01:37:50 +0000 Subject: [PATCH 31/42] chore(deps): bump vite from 6.3.4 to 6.3.6 (#986) Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) from 6.3.4 to 6.3.6. - [Release notes](https://github.com/vitejs/vite/releases) - [Changelog](https://github.com/vitejs/vite/blob/v6.3.6/packages/vite/CHANGELOG.md) - [Commits](https://github.com/vitejs/vite/commits/v6.3.6/packages/vite) --- updated-dependencies: - dependency-name: vite dependency-version: 6.3.6 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 752051a3..44c9457e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8175,9 +8175,9 @@ } }, "node_modules/vite": { - "version": "6.3.4", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.4.tgz", - "integrity": "sha512-BiReIiMS2fyFqbqNT/Qqt4CVITDU9M9vE+DKcVAsB+ZV0wvTKd+3hMbkpxz1b+NmEDMegpVbisKiAZOnvO92Sw==", + "version": "6.3.6", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.6.tgz", + "integrity": "sha512-0msEVHJEScQbhkbVTb/4iHZdJ6SXp/AvxL2sjwYQFfBqleHtnCqv1J3sa9zbWz/6kW1m9Tfzn92vW+kZ1WV6QA==", "dev": true, "license": "MIT", "dependencies": { From 0300c491f5549f36960d7830236ab0cf0cfaf48e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Sep 2025 01:41:34 +0000 Subject: [PATCH 32/42] chore(deps): bump brace-expansion (#987) Bumps and [brace-expansion](https://github.com/juliangruber/brace-expansion). These dependencies needed to be updated together. Updates `brace-expansion` from 1.1.11 to 1.1.12 - [Release notes](https://github.com/juliangruber/brace-expansion/releases) - [Commits](https://github.com/juliangruber/brace-expansion/compare/1.1.11...v1.1.12) Updates `brace-expansion` from 2.0.1 to 2.0.2 - [Release notes](https://github.com/juliangruber/brace-expansion/releases) - [Commits](https://github.com/juliangruber/brace-expansion/compare/1.1.11...v1.1.12) --- updated-dependencies: - dependency-name: brace-expansion dependency-version: 1.1.12 dependency-type: indirect - dependency-name: brace-expansion dependency-version: 2.0.2 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/package-lock.json b/package-lock.json index 44c9457e..75d2da17 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2601,9 +2601,9 @@ "license": "MIT" }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", @@ -6861,9 +6861,9 @@ } }, "node_modules/rimraf/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -7717,9 +7717,9 @@ } }, "node_modules/test-exclude/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { From 8b5b74f61bc4f1b7f82c02d1f374ce7744795b64 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Tue, 16 Sep 2025 17:52:57 +0200 Subject: [PATCH 33/42] ci: canary deploy process (#988) * ci: canary deploy process * chore: upgrade a dep * chore: reword canary comment --- .github/workflows/canary-comment.yml | 129 +++++++++++++++++++++ .github/workflows/canary-deploy.yml | 123 ++++++++++++++++++++ CONTRIBUTING.md | 25 ++++- package-lock.json | 161 +++++---------------------- package.json | 2 +- 5 files changed, 302 insertions(+), 138 deletions(-) create mode 100644 .github/workflows/canary-comment.yml create mode 100644 .github/workflows/canary-deploy.yml diff --git a/.github/workflows/canary-comment.yml b/.github/workflows/canary-comment.yml new file mode 100644 index 00000000..aae0a7dc --- /dev/null +++ b/.github/workflows/canary-comment.yml @@ -0,0 +1,129 @@ +name: Update Canary PR Comment + +permissions: + pull-requests: write + actions: read + +on: + workflow_run: + workflows: ['Canary Deploy'] + types: [completed] + +jobs: + update-comment: + # Only run on the correct repository + if: github.repository == 'supabase/postgres-meta' + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + # Get PR number from the workflow run + - name: Get PR info + id: pr-info + uses: actions/github-script@v7 + with: + script: | + // Get the workflow run details + const workflowRun = context.payload.workflow_run; + + // Find associated PR + const prs = await github.rest.pulls.list({ + owner: context.repo.owner, + repo: context.repo.repo, + state: 'open', + head: `${workflowRun.head_repository.owner.login}:${workflowRun.head_branch}` + }); + + if (prs.data.length > 0) { + const pr = prs.data[0]; + core.setOutput('pr_number', pr.number); + core.setOutput('found', 'true'); + console.log(`Found PR #${pr.number}`); + } else { + core.setOutput('found', 'false'); + console.log('No associated PR found'); + } + + # Only continue if we found a PR and the workflow succeeded + - name: Download canary info + if: ${{ steps.pr-info.outputs.found == 'true' && github.event.workflow_run.conclusion == 'success' }} + uses: actions/download-artifact@v4 + with: + name: canary-info + path: canary-info/ + run-id: ${{ github.event.workflow_run.id }} + continue-on-error: true + + - name: Read canary info + if: ${{ steps.pr-info.outputs.found == 'true' && github.event.workflow_run.conclusion == 'success' }} + id: canary-info + run: | + if [ -f "canary-info/canary-tags.txt" ]; then + # Read the first tag (DockerHub) from the tags + FIRST_TAG=$(head -n1 canary-info/canary-tags.txt) + echo "tag=$FIRST_TAG" >> $GITHUB_OUTPUT + echo "found=true" >> $GITHUB_OUTPUT + echo "commit-sha=$(cat canary-info/commit-sha.txt)" >> $GITHUB_OUTPUT + else + echo "found=false" >> $GITHUB_OUTPUT + fi + continue-on-error: true + + # Find existing comment + - name: Find existing comment + if: ${{ steps.pr-info.outputs.found == 'true' }} + uses: peter-evans/find-comment@v3 + id: find-comment + with: + issue-number: ${{ steps.pr-info.outputs.pr_number }} + comment-author: 'github-actions[bot]' + body-includes: '' + + # Create or update comment based on workflow status + - name: Create or update canary comment + if: ${{ steps.pr-info.outputs.found == 'true' }} + uses: peter-evans/create-or-update-comment@v4 + with: + comment-id: ${{ steps.find-comment.outputs.comment-id }} + issue-number: ${{ steps.pr-info.outputs.pr_number }} + body: | + + ## 🚀 Canary Deployment Status + + ${{ github.event.workflow_run.conclusion == 'success' && steps.canary-info.outputs.found == 'true' && format('✅ **Canary image deployed successfully!** + + 🐳 **Docker Image:** `{0}` + 📝 **Commit:** `{1}` + + You can test this canary deployment by pulling the image: + ```bash + docker pull {0} + ``` + + You can also set the version in a supabase local project by running: + ```bash + echo "{0}" > supabase/.temp/pgmeta-version + ``` + + Or use it in your docker-compose.yml: + ```yaml + services: + postgres-meta: + image: {0} + # ... other configuration + ``` + + The canary image is available on: + - 🐳 [Docker Hub](https://hub.docker.com/r/supabase/postgres-meta) + - 📦 [GitHub Container Registry](https://ghcr.io/supabase/postgres-meta) + - ☁️ [AWS ECR Public](https://gallery.ecr.aws/supabase/postgres-meta) + ', steps.canary-info.outputs.tag, steps.canary-info.outputs.commit-sha) || '' }} + + ${{ github.event.workflow_run.conclusion == 'failure' && '❌ **Canary deployment failed** + + Please check the [workflow logs](' }}${{ github.event.workflow_run.conclusion == 'failure' && github.event.workflow_run.html_url || '' }}${{ github.event.workflow_run.conclusion == 'failure' && ') for more details. + + Make sure your PR has the `deploy-canary` label and targets the `master` branch.' || '' }} + + --- + Last updated: ${{ github.event.workflow_run.updated_at }} + edit-mode: replace diff --git a/.github/workflows/canary-deploy.yml b/.github/workflows/canary-deploy.yml new file mode 100644 index 00000000..872fcc41 --- /dev/null +++ b/.github/workflows/canary-deploy.yml @@ -0,0 +1,123 @@ +name: Canary Deploy + +permissions: + contents: read + pull-requests: read + packages: write + id-token: write + +on: + pull_request: + types: [opened, synchronize, labeled] + paths: + - 'src/**' + - 'package.json' + - 'package-lock.json' + - 'tsconfig.json' + - 'Dockerfile' + +jobs: + build-canary: + # Only run if PR has the 'deploy-canary' label, is on the correct repository, and targets master branch + if: | + github.repository == 'supabase/postgres-meta' && + github.event.pull_request.base.ref == 'master' && + contains(github.event.pull_request.labels.*.name, 'deploy-canary') + runs-on: ubuntu-22.04 + timeout-minutes: 30 + outputs: + canary-tag: ${{ steps.meta.outputs.tags }} + pr-number: ${{ github.event.pull_request.number }} + steps: + # Checkout fork code - safe because no secrets are available for building + - name: Checkout code + uses: actions/checkout@v5 + + # Log PR author for auditing + - name: Log PR author + run: | + echo "Canary deploy triggered by: ${{ github.event.pull_request.user.login }}" + echo "PR #${{ github.event.pull_request.number }} from fork: ${{ github.event.pull_request.head.repo.full_name }}" + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version-file: '.nvmrc' + cache: 'npm' + + - name: Install dependencies and build + run: | + npm clean-install + npm run build + + # Generate canary tag + - id: meta + uses: docker/metadata-action@v5 + with: + images: | + supabase/postgres-meta + public.ecr.aws/supabase/postgres-meta + ghcr.io/supabase/postgres-meta + tags: | + type=raw,value=canary-pr-${{ github.event.pull_request.number }}-${{ github.event.pull_request.head.sha }} + type=raw,value=canary-pr-${{ github.event.pull_request.number }} + + - uses: docker/setup-qemu-action@v3 + with: + platforms: amd64,arm64 + - uses: docker/setup-buildx-action@v3 + + - name: Login to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: configure aws credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ secrets.PROD_AWS_ROLE }} + aws-region: us-east-1 + + - name: Login to ECR + uses: docker/login-action@v3 + with: + registry: public.ecr.aws + + - name: Login to GHCR + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push canary image + uses: docker/build-push-action@v6 + with: + context: . + push: true + platforms: linux/amd64,linux/arm64 + tags: ${{ steps.meta.outputs.tags }} + labels: | + org.opencontainers.image.title=postgres-meta-canary + org.opencontainers.image.description=Canary build for PR #${{ github.event.pull_request.number }} + org.opencontainers.image.source=${{ github.event.pull_request.head.repo.html_url }} + org.opencontainers.image.revision=${{ github.event.pull_request.head.sha }} + canary.pr.number=${{ github.event.pull_request.number }} + canary.pr.author=${{ github.event.pull_request.user.login }} + + # Save canary info for the comment workflow + - name: Save canary info + run: | + mkdir -p canary-info + echo "${{ steps.meta.outputs.tags }}" > canary-info/canary-tags.txt + echo "${{ github.event.pull_request.number }}" > canary-info/pr-number.txt + echo "${{ github.event.pull_request.head.sha }}" > canary-info/commit-sha.txt + echo "postgres-meta" > canary-info/package-name.txt + + - name: Upload canary info + uses: actions/upload-artifact@v4 + with: + name: canary-info + path: canary-info/ + retention-days: 7 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c7f56a71..c6d17b80 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -11,4 +11,27 @@ 2. Run the tests: `npm run test:run` 3. Make changes in code (`/src`) and tests (`/test/lib` and `/test/server`) 4. Run the tests again: `npm run test:run` -5. Commit + PR \ No newline at end of file +5. Commit + PR + +### Canary Deployments + +For testing your changes when they impact other things (like type generation and postgrest-js), you can deploy a canary version of postgres-meta: + +1. **Create a Pull Request** targeting the `master` branch +2. **Add the `deploy-canary` label** to your PR +3. **Wait for the canary build** - GitHub Actions will automatically build and push a canary Docker image +4. **Use the canary image** - The bot will comment on your PR with the exact image tag and usage instructions + +The canary image will be tagged as: + +- `supabase/postgres-meta:canary-pr-{PR_NUMBER}-{COMMIT_SHA}` +- `supabase/postgres-meta:canary-pr-{PR_NUMBER}` + +Example usage: + +```bash +docker pull supabase/postgres-meta:canary-pr-123-abc1234 +echo "canary-pr-123-abc1234" > supabase/.temp/pgmeta-version +``` + +**Note:** Only maintainers can add the `deploy-canary` label for security reasons. The canary deployment requires access to production Docker registries. diff --git a/package-lock.json b/package-lock.json index 75d2da17..cd6f7b63 100644 --- a/package-lock.json +++ b/package-lock.json @@ -38,7 +38,7 @@ "cpy-cli": "^5.0.0", "nodemon": "^3.1.7", "npm-run-all": "^4.1.5", - "pino-pretty": "^12.0.0", + "pino-pretty": "^13.1.1", "rimraf": "^6.0.1", "ts-node": "^10.9.1", "typescript": "^5.6.3", @@ -2230,19 +2230,6 @@ "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", "license": "ISC" }, - "node_modules/abort-controller": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", - "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", - "dev": true, - "license": "MIT", - "dependencies": { - "event-target-shim": "^5.0.0" - }, - "engines": { - "node": ">=6.5" - } - }, "node_modules/abstract-logging": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/abstract-logging/-/abstract-logging-2.0.1.tgz", @@ -2551,27 +2538,6 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "license": "MIT" }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, "node_modules/big-integer": { "version": "1.6.52", "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.52.tgz", @@ -2623,31 +2589,6 @@ "node": ">=8" } }, - "node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, "node_modules/cac": { "version": "6.7.14", "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", @@ -3500,26 +3441,6 @@ "@types/estree": "^1.0.0" } }, - "node_modules/event-target-shim": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", - "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/events": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", - "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8.x" - } - }, "node_modules/expect-type": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.0.tgz", @@ -4277,27 +4198,6 @@ "node": ">=0.10.0" } }, - "node_modules/ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "BSD-3-Clause" - }, "node_modules/ignore": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", @@ -6329,9 +6229,9 @@ } }, "node_modules/pino-pretty": { - "version": "12.1.0", - "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-12.1.0.tgz", - "integrity": "sha512-Z7JdCPqggoRyo0saJyCe1BN8At5qE+ZBElNbyx+znCaCVN+ohOqlWb+/WSYnamzfi2e6P6pXq/3H66KwFQHXWg==", + "version": "13.1.1", + "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-13.1.1.tgz", + "integrity": "sha512-TNNEOg0eA0u+/WuqH0MH0Xui7uqVk9D74ESOpjtebSQYbNWJk/dIxCXIxFsNfeN53JmtWqYHP2OrIZjT/CBEnA==", "dev": true, "license": "MIT", "dependencies": { @@ -6345,31 +6245,30 @@ "on-exit-leak-free": "^2.1.0", "pino-abstract-transport": "^2.0.0", "pump": "^3.0.0", - "readable-stream": "^4.0.0", - "secure-json-parse": "^2.4.0", + "secure-json-parse": "^4.0.0", "sonic-boom": "^4.0.1", - "strip-json-comments": "^3.1.1" + "strip-json-comments": "^5.0.2" }, "bin": { "pino-pretty": "bin.js" } }, - "node_modules/pino-pretty/node_modules/readable-stream": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", - "integrity": "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==", + "node_modules/pino-pretty/node_modules/secure-json-parse": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-4.0.0.tgz", + "integrity": "sha512-dxtLJO6sc35jWidmLxo7ij+Eg48PM/kleBsxpC8QJE0qJICe+KawkDQmvCMZUr9u7WKVHgMW6vy3fQ7zMiFZMA==", "dev": true, - "license": "MIT", - "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" }, "node_modules/pino-std-serializers": { "version": "7.0.0", @@ -6513,16 +6412,6 @@ "prettier": "^3.0.3" } }, - "node_modules/process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6.0" - } - }, "node_modules/process-warning": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-3.0.0.tgz", @@ -7629,13 +7518,13 @@ } }, "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-5.0.3.tgz", + "integrity": "sha512-1tB5mhVo7U+ETBKNf92xT4hrQa3pm0MZ0PQvuDnWgAAGHDsfp4lPSpiS6psrSiet87wyGPh9ft6wmhOMQ0hDiw==", "dev": true, "license": "MIT", "engines": { - "node": ">=8" + "node": ">=14.16" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" diff --git a/package.json b/package.json index 62315e9f..941df965 100644 --- a/package.json +++ b/package.json @@ -67,7 +67,7 @@ "cpy-cli": "^5.0.0", "nodemon": "^3.1.7", "npm-run-all": "^4.1.5", - "pino-pretty": "^12.0.0", + "pino-pretty": "^13.1.1", "rimraf": "^6.0.1", "ts-node": "^10.9.1", "typescript": "^5.6.3", From dcb8e9ba9a70f05aab4865144743233ed285db59 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Tue, 16 Sep 2025 23:36:28 +0200 Subject: [PATCH 34/42] ci: fix canary-comment dependency (#989) * ci: fix canary-comment dependency * chore: change dependency for trigger * chore: only comment on canary label * Revert "chore: change dependency for trigger" This reverts commit ea759f017e55d7499bdc0a6001e696f88fd4ceaa. --- .github/workflows/canary-comment.yml | 64 ++++++++++++++++------------ .github/workflows/canary-deploy.yml | 16 ------- 2 files changed, 37 insertions(+), 43 deletions(-) diff --git a/.github/workflows/canary-comment.yml b/.github/workflows/canary-comment.yml index aae0a7dc..69cebc03 100644 --- a/.github/workflows/canary-comment.yml +++ b/.github/workflows/canary-comment.yml @@ -35,42 +35,52 @@ jobs: if (prs.data.length > 0) { const pr = prs.data[0]; - core.setOutput('pr_number', pr.number); - core.setOutput('found', 'true'); - console.log(`Found PR #${pr.number}`); + + // Check if PR has the deploy-canary label + const labels = pr.labels.map(label => label.name); + const hasCanaryLabel = labels.includes('deploy-canary'); + + if (hasCanaryLabel) { + core.setOutput('pr_number', pr.number); + core.setOutput('found', 'true'); + core.setOutput('has_canary_label', 'true'); + console.log(`Found PR #${pr.number} with deploy-canary label`); + } else { + core.setOutput('found', 'false'); + core.setOutput('has_canary_label', 'false'); + console.log(`Found PR #${pr.number} but it doesn't have deploy-canary label`); + } } else { core.setOutput('found', 'false'); + core.setOutput('has_canary_label', 'false'); console.log('No associated PR found'); } - # Only continue if we found a PR and the workflow succeeded - - name: Download canary info - if: ${{ steps.pr-info.outputs.found == 'true' && github.event.workflow_run.conclusion == 'success' }} - uses: actions/download-artifact@v4 + # Extract canary info from the workflow run + - name: Extract canary info + if: ${{ steps.pr-info.outputs.found == 'true' && steps.pr-info.outputs.has_canary_label == 'true' && github.event.workflow_run.conclusion == 'success' }} + id: canary-info + uses: actions/github-script@v7 with: - name: canary-info - path: canary-info/ - run-id: ${{ github.event.workflow_run.id }} - continue-on-error: true + script: | + const workflowRun = context.payload.workflow_run; - - name: Read canary info - if: ${{ steps.pr-info.outputs.found == 'true' && github.event.workflow_run.conclusion == 'success' }} - id: canary-info - run: | - if [ -f "canary-info/canary-tags.txt" ]; then - # Read the first tag (DockerHub) from the tags - FIRST_TAG=$(head -n1 canary-info/canary-tags.txt) - echo "tag=$FIRST_TAG" >> $GITHUB_OUTPUT - echo "found=true" >> $GITHUB_OUTPUT - echo "commit-sha=$(cat canary-info/commit-sha.txt)" >> $GITHUB_OUTPUT - else - echo "found=false" >> $GITHUB_OUTPUT - fi - continue-on-error: true + // Extract PR number from the branch name or workflow run + const prNumber = '${{ steps.pr-info.outputs.pr_number }}'; + const commitSha = workflowRun.head_sha; + + // Generate the canary tag based on the pattern used in canary-deploy.yml + const canaryTag = `supabase/postgres-meta:canary-pr-${prNumber}-${commitSha}`; + + core.setOutput('tag', canaryTag); + core.setOutput('found', 'true'); + core.setOutput('commit-sha', commitSha); + + console.log(`Generated canary tag: ${canaryTag}`); # Find existing comment - name: Find existing comment - if: ${{ steps.pr-info.outputs.found == 'true' }} + if: ${{ steps.pr-info.outputs.found == 'true' && steps.pr-info.outputs.has_canary_label == 'true' }} uses: peter-evans/find-comment@v3 id: find-comment with: @@ -80,7 +90,7 @@ jobs: # Create or update comment based on workflow status - name: Create or update canary comment - if: ${{ steps.pr-info.outputs.found == 'true' }} + if: ${{ steps.pr-info.outputs.found == 'true' && steps.pr-info.outputs.has_canary_label == 'true' }} uses: peter-evans/create-or-update-comment@v4 with: comment-id: ${{ steps.find-comment.outputs.comment-id }} diff --git a/.github/workflows/canary-deploy.yml b/.github/workflows/canary-deploy.yml index 872fcc41..f40f7a0f 100644 --- a/.github/workflows/canary-deploy.yml +++ b/.github/workflows/canary-deploy.yml @@ -105,19 +105,3 @@ jobs: org.opencontainers.image.revision=${{ github.event.pull_request.head.sha }} canary.pr.number=${{ github.event.pull_request.number }} canary.pr.author=${{ github.event.pull_request.user.login }} - - # Save canary info for the comment workflow - - name: Save canary info - run: | - mkdir -p canary-info - echo "${{ steps.meta.outputs.tags }}" > canary-info/canary-tags.txt - echo "${{ github.event.pull_request.number }}" > canary-info/pr-number.txt - echo "${{ github.event.pull_request.head.sha }}" > canary-info/commit-sha.txt - echo "postgres-meta" > canary-info/package-name.txt - - - name: Upload canary info - uses: actions/upload-artifact@v4 - with: - name: canary-info - path: canary-info/ - retention-days: 7 From 2b82470a2e4b22946c3ee7ddfe9e725d5c2a80fa Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 23 Sep 2025 20:00:45 +0200 Subject: [PATCH 35/42] chore: refactor typegen to reduce loops --- src/server/constants.ts | 4 + src/server/templates/typescript.ts | 186 +++++++++++++++++++---------- 2 files changed, 125 insertions(+), 65 deletions(-) diff --git a/src/server/constants.ts b/src/server/constants.ts index 9354c59f..c64b45e6 100644 --- a/src/server/constants.ts +++ b/src/server/constants.ts @@ -51,6 +51,10 @@ export const GENERATE_TYPES_SWIFT_ACCESS_CONTROL = process.env ? (process.env.PG_META_GENERATE_TYPES_SWIFT_ACCESS_CONTROL as AccessControl) : 'internal' +// json/jsonb/text types +export const VALID_UNNAMED_FUNCTION_ARG_TYPES = new Set([114, 3802, 25]) +export const VALID_FUNCTION_ARGS_MODE = new Set(['in', 'inout', 'variadic']) + export const PG_META_MAX_RESULT_SIZE = process.env.PG_META_MAX_RESULT_SIZE_MB ? // Node-postgres get a maximum size in bytes make the conversion from the env variable // from MB to Bytes diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 03b407d4..1c262fae 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -8,7 +8,7 @@ import type { PostgresView, } from '../../lib/index.js' import type { GeneratorMetadata } from '../../lib/generators.js' -import { GENERATE_TYPES_DEFAULT_SCHEMA } from '../constants.js' +import { GENERATE_TYPES_DEFAULT_SCHEMA, VALID_FUNCTION_ARGS_MODE } from '../constants.js' export const apply = async ({ schemas, @@ -26,15 +26,99 @@ export const apply = async ({ detectOneToOneRelationships: boolean postgrestVersion?: string }): Promise => { + schemas.sort((a, b) => a.name.localeCompare(b.name)) + const columnsByTableId = Object.fromEntries( [...tables, ...foreignTables, ...views, ...materializedViews].map((t) => [t.id, []]) ) - columns - .filter((c) => c.table_id in columnsByTableId) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - .forEach((c) => { - columnsByTableId[c.table_id].push(c) - }) + for (const column of columns) { + if (column.table_id in columnsByTableId) { + columnsByTableId[column.table_id].push(column) + } + } + for (const tableId in columnsByTableId) { + columnsByTableId[tableId].sort((a, b) => a.name.localeCompare(b.name)) + } + + const introspectionBySchema = Object.fromEntries<{ + tables: Pick[] + views: PostgresView[] + functions: { fn: PostgresFunction; inArgs: PostgresFunction['args'] }[] + enums: PostgresType[] + compositeTypes: PostgresType[] + }>( + schemas.map((s) => [ + s.name, + { tables: [], views: [], functions: [], enums: [], compositeTypes: [] }, + ]) + ) + for (const table of tables) { + if (table.schema in introspectionBySchema) { + introspectionBySchema[table.schema].tables.push(table) + } + } + for (const table of foreignTables) { + if (table.schema in introspectionBySchema) { + introspectionBySchema[table.schema].tables.push(table) + } + } + for (const view of views) { + if (view.schema in introspectionBySchema) { + introspectionBySchema[view.schema].views.push(view) + } + } + for (const materializedView of materializedViews) { + if (materializedView.schema in introspectionBySchema) { + introspectionBySchema[materializedView.schema].views.push({ + ...materializedView, + is_updatable: false, + }) + } + } + for (const func of functions) { + if (func.schema in introspectionBySchema) { + func.args.sort((a, b) => a.name.localeCompare(b.name)) + // Either: + // 1. All input args are be named, or + // 2. There is only one input arg which is unnamed + const inArgs = func.args.filter(({ mode }) => VALID_FUNCTION_ARGS_MODE.has(mode)) + + if ( + // Case 1: Function has a single parameter + inArgs.length === 1 || + // Case 2: All input args are named + !inArgs.some(({ name }) => name === '') + ) { + introspectionBySchema[func.schema].functions.push({ fn: func, inArgs }) + } + } + } + for (const type of types) { + if (type.schema in introspectionBySchema) { + if (type.enums.length > 0) { + introspectionBySchema[type.schema].enums.push(type) + } + if (type.attributes.length > 0) { + introspectionBySchema[type.schema].compositeTypes.push(type) + } + } + } + for (const schema in introspectionBySchema) { + introspectionBySchema[schema].tables.sort((a, b) => a.name.localeCompare(b.name)) + introspectionBySchema[schema].views.sort((a, b) => a.name.localeCompare(b.name)) + introspectionBySchema[schema].functions.sort((a, b) => a.fn.name.localeCompare(b.fn.name)) + introspectionBySchema[schema].enums.sort((a, b) => a.name.localeCompare(b.name)) + introspectionBySchema[schema].compositeTypes.sort((a, b) => a.name.localeCompare(b.name)) + } + + // group types by id for quicker lookup + const typesById = types.reduce( + (acc, type) => { + acc[type.id] = type + return acc + }, + {} as Record + ) const internal_supabase_schema = postgrestVersion ? `// Allows to automatically instantiate createClient with right options @@ -49,44 +133,15 @@ export type Json = string | number | boolean | null | { [key: string]: Json | un export type Database = { ${internal_supabase_schema} - ${schemas - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - .map((schema) => { - const schemaTables = [...tables, ...foreignTables] - .filter((table) => table.schema === schema.name) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - const schemaViews = [...views, ...materializedViews] - .filter((view) => view.schema === schema.name) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - const schemaFunctions = functions - .filter((func) => { - if (func.schema !== schema.name) { - return false - } - - // Either: - // 1. All input args are be named, or - // 2. There is only one input arg which is unnamed - const inArgs = func.args.filter(({ mode }) => ['in', 'inout', 'variadic'].includes(mode)) - - if (!inArgs.some(({ name }) => name === '')) { - return true - } - - if (inArgs.length === 1) { - return true - } - - return false - }) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - const schemaEnums = types - .filter((type) => type.schema === schema.name && type.enums.length > 0) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - const schemaCompositeTypes = types - .filter((type) => type.schema === schema.name && type.attributes.length > 0) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - return `${JSON.stringify(schema.name)}: { + ${schemas.map((schema) => { + const { + tables: schemaTables, + views: schemaViews, + functions: schemaFunctions, + enums: schemaEnums, + compositeTypes: schemaCompositeTypes, + } = introspectionBySchema[schema.name] + return `${JSON.stringify(schema.name)}: { Tables: { ${ schemaTables.length === 0 @@ -105,9 +160,9 @@ export type Database = { })} ${column.is_nullable ? '| null' : ''}` ), ...schemaFunctions - .filter((fn) => fn.argument_types === table.name) - .map((fn) => { - const type = types.find(({ id }) => id === fn.return_type_id) + .filter(({ fn }) => fn.argument_types === table.name) + .map(({ fn }) => { + const type = typesById[fn.return_type_id] let tsType = 'unknown' if (type) { tsType = pgTypeToTsType(schema, type.name, { @@ -226,7 +281,7 @@ export type Database = { )} } ${ - 'is_updatable' in view && view.is_updatable + view.is_updatable ? `Insert: { ${columnsByTableId[view.id].map((column) => { let output = JSON.stringify(column.name) @@ -306,28 +361,29 @@ export type Database = { const schemaFunctionsGroupedByName = schemaFunctions.reduce( (acc, curr) => { - acc[curr.name] ??= [] - acc[curr.name].push(curr) + acc[curr.fn.name] ??= [] + acc[curr.fn.name].push(curr) return acc }, - {} as Record + {} as Record ) + for (const fnName in schemaFunctionsGroupedByName) { + schemaFunctionsGroupedByName[fnName].sort((a, b) => + b.fn.definition.localeCompare(a.fn.definition) + ) + } return Object.entries(schemaFunctionsGroupedByName).map( ([fnName, fns]) => `${JSON.stringify(fnName)}: { Args: ${fns - .map(({ args }) => { - const inArgs = args - .toSorted((a, b) => a.name.localeCompare(b.name)) - .filter(({ mode }) => mode === 'in') - + .map(({ inArgs }) => { if (inArgs.length === 0) { return 'Record' } const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { - const type = types.find(({ id }) => id === type_id) + const type = typesById[type_id] let tsType = 'unknown' if (type) { tsType = pgTypeToTsType(schema, type.name, { @@ -346,10 +402,10 @@ export type Database = { .join(' | ')} Returns: ${(() => { // Case 1: `returns table`. - const tableArgs = fns[0].args.filter(({ mode }) => mode === 'table') + const tableArgs = fns[0].fn.args.filter(({ mode }) => mode === 'table') if (tableArgs.length > 0) { const argsNameAndType = tableArgs.map(({ name, type_id }) => { - const type = types.find(({ id }) => id === type_id) + const type = typesById[type_id] let tsType = 'unknown' if (type) { tsType = pgTypeToTsType(schema, type.name, { @@ -371,7 +427,7 @@ export type Database = { // Case 2: returns a relation's row type. const relation = [...tables, ...views].find( - ({ id }) => id === fns[0].return_type_relation_id + ({ id }) => id === fns[0].fn.return_type_relation_id ) if (relation) { return `{ @@ -394,7 +450,7 @@ export type Database = { } // Case 3: returns base/array/composite/enum type. - const type = types.find(({ id }) => id === fns[0].return_type_id) + const type = typesById[fns[0].fn.return_type_id] if (type) { return pgTypeToTsType(schema, type.name, { types, @@ -405,7 +461,7 @@ export type Database = { } return 'unknown' - })()}${fns[0].is_set_returning_function ? '[]' : ''} + })()}${fns[0].fn.is_set_returning_function ? '[]' : ''} }` ) })()} @@ -430,7 +486,7 @@ export type Database = { ({ name, attributes }) => `${JSON.stringify(name)}: { ${attributes.map(({ name, type_id }) => { - const type = types.find(({ id }) => id === type_id) + const type = typesById[type_id] let tsType = 'unknown' if (type) { tsType = `${pgTypeToTsType(schema, type.name, { @@ -447,7 +503,7 @@ export type Database = { } } }` - })} + })} } type DatabaseWithoutInternals = Omit From 09155b08b43992c349712c1841e8162ff2645d9f Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Wed, 1 Oct 2025 10:18:11 +0200 Subject: [PATCH 36/42] chore: dedup typescript typegen logic (#993) * chore: dedup typescript typegen logic * chore: reduce loops --- src/server/templates/typescript.ts | 568 ++++++++++++++--------------- 1 file changed, 282 insertions(+), 286 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 1c262fae..f0079874 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -1,4 +1,5 @@ import prettier from 'prettier' +import type { GeneratorMetadata } from '../../lib/generators.js' import type { PostgresColumn, PostgresFunction, @@ -7,9 +8,13 @@ import type { PostgresType, PostgresView, } from '../../lib/index.js' -import type { GeneratorMetadata } from '../../lib/generators.js' import { GENERATE_TYPES_DEFAULT_SCHEMA, VALID_FUNCTION_ARGS_MODE } from '../constants.js' +type TsRelationship = Pick< + GeneratorMetadata['relationships'][number], + 'foreign_key_name' | 'columns' | 'is_one_to_one' | 'referenced_relation' | 'referenced_columns' +> + export const apply = async ({ schemas, tables, @@ -27,10 +32,37 @@ export const apply = async ({ postgrestVersion?: string }): Promise => { schemas.sort((a, b) => a.name.localeCompare(b.name)) + relationships.sort( + (a, b) => + a.foreign_key_name.localeCompare(b.foreign_key_name) || + a.referenced_relation.localeCompare(b.referenced_relation) || + JSON.stringify(a.referenced_columns).localeCompare(JSON.stringify(b.referenced_columns)) + ) + const introspectionBySchema = Object.fromEntries<{ + tables: { + table: Pick + relationships: TsRelationship[] + }[] + views: { + view: PostgresView + relationships: TsRelationship[] + }[] + functions: { fn: PostgresFunction; inArgs: PostgresFunction['args'] }[] + enums: PostgresType[] + compositeTypes: PostgresType[] + }>( + schemas.map((s) => [ + s.name, + { tables: [], views: [], functions: [], enums: [], compositeTypes: [] }, + ]) + ) const columnsByTableId = Object.fromEntries( [...tables, ...foreignTables, ...views, ...materializedViews].map((t) => [t.id, []]) ) + // group types by id for quicker lookup + const typesById = new Map() + for (const column of columns) { if (column.table_id in columnsByTableId) { columnsByTableId[column.table_id].push(column) @@ -40,38 +72,74 @@ export const apply = async ({ columnsByTableId[tableId].sort((a, b) => a.name.localeCompare(b.name)) } - const introspectionBySchema = Object.fromEntries<{ - tables: Pick[] - views: PostgresView[] - functions: { fn: PostgresFunction; inArgs: PostgresFunction['args'] }[] - enums: PostgresType[] - compositeTypes: PostgresType[] - }>( - schemas.map((s) => [ - s.name, - { tables: [], views: [], functions: [], enums: [], compositeTypes: [] }, - ]) - ) + for (const type of types) { + typesById.set(type.id, type) + if (type.schema in introspectionBySchema) { + if (type.enums.length > 0) { + introspectionBySchema[type.schema].enums.push(type) + } + if (type.attributes.length > 0) { + introspectionBySchema[type.schema].compositeTypes.push(type) + } + } + } + + function getRelationships( + object: { schema: string; name: string }, + relationships: GeneratorMetadata['relationships'] + ): Pick< + GeneratorMetadata['relationships'][number], + 'foreign_key_name' | 'columns' | 'is_one_to_one' | 'referenced_relation' | 'referenced_columns' + >[] { + return relationships.filter( + (relationship) => + relationship.schema === object.schema && + relationship.referenced_schema === object.schema && + relationship.relation === object.name + ) + } + + function generateRelationshiptTsDefinition(relationship: TsRelationship): string { + return `{ + foreignKeyName: ${JSON.stringify(relationship.foreign_key_name)} + columns: ${JSON.stringify(relationship.columns)}${detectOneToOneRelationships ? `\nisOneToOne: ${relationship.is_one_to_one}` : ''} + referencedRelation: ${JSON.stringify(relationship.referenced_relation)} + referencedColumns: ${JSON.stringify(relationship.referenced_columns)} + }` + } + for (const table of tables) { if (table.schema in introspectionBySchema) { - introspectionBySchema[table.schema].tables.push(table) + introspectionBySchema[table.schema].tables.push({ + table, + relationships: getRelationships(table, relationships), + }) } } for (const table of foreignTables) { if (table.schema in introspectionBySchema) { - introspectionBySchema[table.schema].tables.push(table) + introspectionBySchema[table.schema].tables.push({ + table, + relationships: getRelationships(table, relationships), + }) } } for (const view of views) { if (view.schema in introspectionBySchema) { - introspectionBySchema[view.schema].views.push(view) + introspectionBySchema[view.schema].views.push({ + view, + relationships: getRelationships(view, relationships), + }) } } for (const materializedView of materializedViews) { if (materializedView.schema in introspectionBySchema) { introspectionBySchema[materializedView.schema].views.push({ - ...materializedView, - is_updatable: false, + view: { + ...materializedView, + is_updatable: false, + }, + relationships: getRelationships(materializedView, relationships), }) } } @@ -93,32 +161,105 @@ export const apply = async ({ } } } - for (const type of types) { - if (type.schema in introspectionBySchema) { - if (type.enums.length > 0) { - introspectionBySchema[type.schema].enums.push(type) - } - if (type.attributes.length > 0) { - introspectionBySchema[type.schema].compositeTypes.push(type) - } - } - } for (const schema in introspectionBySchema) { - introspectionBySchema[schema].tables.sort((a, b) => a.name.localeCompare(b.name)) - introspectionBySchema[schema].views.sort((a, b) => a.name.localeCompare(b.name)) + introspectionBySchema[schema].tables.sort((a, b) => a.table.name.localeCompare(b.table.name)) + introspectionBySchema[schema].views.sort((a, b) => a.view.name.localeCompare(b.view.name)) introspectionBySchema[schema].functions.sort((a, b) => a.fn.name.localeCompare(b.fn.name)) introspectionBySchema[schema].enums.sort((a, b) => a.name.localeCompare(b.name)) introspectionBySchema[schema].compositeTypes.sort((a, b) => a.name.localeCompare(b.name)) } - // group types by id for quicker lookup - const typesById = types.reduce( - (acc, type) => { - acc[type.id] = type - return acc - }, - {} as Record - ) + const getFunctionTsReturnType = (fn: PostgresFunction, returnType: string) => { + return `${returnType}${fn.is_set_returning_function ? '[]' : ''}` + } + + const getFunctionReturnType = (schema: PostgresSchema, fn: PostgresFunction): string => { + const tableArgs = fn.args.filter(({ mode }) => mode === 'table') + if (tableArgs.length > 0) { + const argsNameAndType = tableArgs.map(({ name, type_id }) => { + const type = typesById.get(type_id) + let tsType = 'unknown' + if (type) { + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) + } + return { name, type: tsType } + }) + + return `{ + ${argsNameAndType.map(({ name, type }) => `${JSON.stringify(name)}: ${type}`)} + }` + } + + // Case 2: returns a relation's row type. + const relation = + introspectionBySchema[schema.name]?.tables.find( + ({ table: { id } }) => id === fn.return_type_relation_id + )?.table || + introspectionBySchema[schema.name]?.views.find( + ({ view: { id } }) => id === fn.return_type_relation_id + )?.view + if (relation) { + return `{ + ${columnsByTableId[relation.id].map( + (column) => + `${JSON.stringify(column.name)}: ${pgTypeToTsType(schema, column.format, { + types, + schemas, + tables, + views, + })} ${column.is_nullable ? '| null' : ''}` + )} + }` + } + + // Case 3: returns base/array/composite/enum type. + const type = typesById.get(fn.return_type_id) + if (type) { + return pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) + } + + return 'unknown' + } + + const getFunctionSignatures = ( + schema: PostgresSchema, + fns: Array<{ fn: PostgresFunction; inArgs: PostgresFunction['args'] }> + ) => { + const args = fns + .map(({ inArgs }) => { + if (inArgs.length === 0) { + return 'Record' + } + const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { + const type = typesById.get(type_id) + let tsType = 'unknown' + if (type) { + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) + } + return { name, type: tsType, has_default } + }) + return `{ ${argsNameAndType.map(({ name, type, has_default }) => `${JSON.stringify(name)}${has_default ? '?' : ''}: ${type}`)} }` + }) + .toSorted() + // A function can have multiples definitions with differents args, but will always return the same type + .join(' | ') + return `{\nArgs: ${args}\n Returns: ${getFunctionTsReturnType(fns[0].fn, getFunctionReturnType(schema, fns[0].fn))}\n}` + } const internal_supabase_schema = postgrestVersion ? `// Allows to automatically instantiate createClient with right options @@ -128,6 +269,24 @@ export const apply = async ({ }` : '' + function generateColumnTsDefinition( + schema: PostgresSchema, + column: { + name: string + format: string + is_nullable: boolean + is_optional: boolean + }, + context: { + types: PostgresType[] + schemas: PostgresSchema[] + tables: PostgresTable[] + views: PostgresView[] + } + ) { + return `${JSON.stringify(column.name)}${column.is_optional ? '?' : ''}: ${pgTypeToTsType(schema, column.format, context)} ${column.is_nullable ? '| null' : ''}` + } + let output = ` export type Json = string | number | boolean | null | { [key: string]: Json | undefined } | Json[] @@ -147,117 +306,68 @@ export type Database = { schemaTables.length === 0 ? '[_ in never]: never' : schemaTables.map( - (table) => `${JSON.stringify(table.name)}: { + ({ table, relationships }) => `${JSON.stringify(table.name)}: { Row: { ${[ - ...columnsByTableId[table.id].map( - (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - })} ${column.is_nullable ? '| null' : ''}` + ...columnsByTableId[table.id].map((column) => + generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: column.is_nullable, + is_optional: false, + }, + { types, schemas, tables, views } + ) ), ...schemaFunctions .filter(({ fn }) => fn.argument_types === table.name) .map(({ fn }) => { - const type = typesById[fn.return_type_id] - let tsType = 'unknown' - if (type) { - tsType = pgTypeToTsType(schema, type.name, { - types, - schemas, - tables, - views, - }) - } - return `${JSON.stringify(fn.name)}: ${tsType} | null` + return `${JSON.stringify(fn.name)}: ${getFunctionReturnType(schema, fn)} | null` }), ]} } Insert: { ${columnsByTableId[table.id].map((column) => { - let output = JSON.stringify(column.name) - if (column.identity_generation === 'ALWAYS') { - return `${output}?: never` - } - - if ( - column.is_nullable || - column.is_identity || - column.default_value !== null - ) { - output += '?:' - } else { - output += ':' + return `${JSON.stringify(column.name)}?: never` } - - output += pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - }) - - if (column.is_nullable) { - output += '| null' - } - - return output + return generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: column.is_nullable, + is_optional: + column.is_nullable || + column.is_identity || + column.default_value !== null, + }, + { types, schemas, tables, views } + ) })} } Update: { ${columnsByTableId[table.id].map((column) => { - let output = JSON.stringify(column.name) - if (column.identity_generation === 'ALWAYS') { - return `${output}?: never` + return `${JSON.stringify(column.name)}?: never` } - output += `?: ${pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - })}` - - if (column.is_nullable) { - output += '| null' - } - - return output + return generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: column.is_nullable, + is_optional: true, + }, + { types, schemas, tables, views } + ) })} } Relationships: [ - ${relationships - .filter( - (relationship) => - relationship.schema === table.schema && - relationship.referenced_schema === table.schema && - relationship.relation === table.name - ) - .sort( - (a, b) => - a.foreign_key_name.localeCompare(b.foreign_key_name) || - a.referenced_relation.localeCompare(b.referenced_relation) || - JSON.stringify(a.referenced_columns).localeCompare( - JSON.stringify(b.referenced_columns) - ) - ) - .map( - (relationship) => `{ - foreignKeyName: ${JSON.stringify(relationship.foreign_key_name)} - columns: ${JSON.stringify(relationship.columns)} - ${ - detectOneToOneRelationships - ? `isOneToOne: ${relationship.is_one_to_one};` - : '' - }referencedRelation: ${JSON.stringify(relationship.referenced_relation)} - referencedColumns: ${JSON.stringify(relationship.referenced_columns)} - }` - )} + ${relationships.map(generateRelationshiptTsDefinition)} ] }` ) @@ -268,86 +378,61 @@ export type Database = { schemaViews.length === 0 ? '[_ in never]: never' : schemaViews.map( - (view) => `${JSON.stringify(view.name)}: { + ({ view, relationships }) => `${JSON.stringify(view.name)}: { Row: { - ${columnsByTableId[view.id].map( - (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - })} ${column.is_nullable ? '| null' : ''}` + ${columnsByTableId[view.id].map((column) => + generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: column.is_nullable, + is_optional: false, + }, + { types, schemas, tables, views } + ) )} } ${ view.is_updatable ? `Insert: { ${columnsByTableId[view.id].map((column) => { - let output = JSON.stringify(column.name) - if (!column.is_updatable) { - return `${output}?: never` + return `${JSON.stringify(column.name)}?: never` } - - output += `?: ${pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - })} | null` - - return output + return generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: true, + is_optional: true, + }, + { types, schemas, tables, views } + ) })} } Update: { ${columnsByTableId[view.id].map((column) => { - let output = JSON.stringify(column.name) - if (!column.is_updatable) { - return `${output}?: never` + return `${JSON.stringify(column.name)}?: never` } - - output += `?: ${pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - })} | null` - - return output + return generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: true, + is_optional: true, + }, + { types, schemas, tables, views } + ) })} } ` : '' }Relationships: [ - ${relationships - .filter( - (relationship) => - relationship.schema === view.schema && - relationship.referenced_schema === view.schema && - relationship.relation === view.name - ) - .sort( - (a, b) => - a.foreign_key_name.localeCompare(b.foreign_key_name) || - a.referenced_relation.localeCompare(b.referenced_relation) || - JSON.stringify(a.referenced_columns).localeCompare( - JSON.stringify(b.referenced_columns) - ) - ) - .map( - (relationship) => `{ - foreignKeyName: ${JSON.stringify(relationship.foreign_key_name)} - columns: ${JSON.stringify(relationship.columns)} - ${ - detectOneToOneRelationships - ? `isOneToOne: ${relationship.is_one_to_one};` - : '' - }referencedRelation: ${JSON.stringify(relationship.referenced_relation)} - referencedColumns: ${JSON.stringify(relationship.referenced_columns)} - }` - )} + ${relationships.map(generateRelationshiptTsDefinition)} ] }` ) @@ -373,97 +458,12 @@ export type Database = { ) } - return Object.entries(schemaFunctionsGroupedByName).map( - ([fnName, fns]) => - `${JSON.stringify(fnName)}: { - Args: ${fns - .map(({ inArgs }) => { - if (inArgs.length === 0) { - return 'Record' - } - - const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { - const type = typesById[type_id] - let tsType = 'unknown' - if (type) { - tsType = pgTypeToTsType(schema, type.name, { - types, - schemas, - tables, - views, - }) - } - return { name, type: tsType, has_default } - }) - return `{ ${argsNameAndType.map(({ name, type, has_default }) => `${JSON.stringify(name)}${has_default ? '?' : ''}: ${type}`)} }` - }) - .toSorted() - // A function can have multiples definitions with differents args, but will always return the same type - .join(' | ')} - Returns: ${(() => { - // Case 1: `returns table`. - const tableArgs = fns[0].fn.args.filter(({ mode }) => mode === 'table') - if (tableArgs.length > 0) { - const argsNameAndType = tableArgs.map(({ name, type_id }) => { - const type = typesById[type_id] - let tsType = 'unknown' - if (type) { - tsType = pgTypeToTsType(schema, type.name, { - types, - schemas, - tables, - views, - }) - } - return { name, type: tsType } - }) - - return `{ - ${argsNameAndType - .toSorted((a, b) => a.name.localeCompare(b.name)) - .map(({ name, type }) => `${JSON.stringify(name)}: ${type}`)} - }` - } - - // Case 2: returns a relation's row type. - const relation = [...tables, ...views].find( - ({ id }) => id === fns[0].fn.return_type_relation_id - ) - if (relation) { - return `{ - ${columnsByTableId[relation.id] - .toSorted((a, b) => a.name.localeCompare(b.name)) - .map( - (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType( - schema, - column.format, - { - types, - schemas, - tables, - views, - } - )} ${column.is_nullable ? '| null' : ''}` - )} - }` - } - - // Case 3: returns base/array/composite/enum type. - const type = typesById[fns[0].fn.return_type_id] - if (type) { - return pgTypeToTsType(schema, type.name, { - types, - schemas, - tables, - views, - }) - } - - return 'unknown' - })()}${fns[0].fn.is_set_returning_function ? '[]' : ''} - }` - ) + return Object.entries(schemaFunctionsGroupedByName) + .map(([fnName, fns]) => { + const functionSignatures = getFunctionSignatures(schema, fns) + return `${JSON.stringify(fnName)}:\n${functionSignatures}` + }) + .join(',\n') })()} } Enums: { @@ -486,7 +486,7 @@ export type Database = { ({ name, attributes }) => `${JSON.stringify(name)}: { ${attributes.map(({ name, type_id }) => { - const type = typesById[type_id] + const type = typesById.get(type_id) let tsType = 'unknown' if (type) { tsType = `${pgTypeToTsType(schema, type.name, { @@ -612,13 +612,9 @@ export type CompositeTypes< : never export const Constants = { - ${schemas - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - .map((schema) => { - const schemaEnums = types - .filter((type) => type.schema === schema.name && type.enums.length > 0) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - return `${JSON.stringify(schema.name)}: { + ${schemas.map((schema) => { + const schemaEnums = introspectionBySchema[schema.name].enums + return `${JSON.stringify(schema.name)}: { Enums: { ${schemaEnums.map( (enum_) => @@ -628,7 +624,7 @@ export const Constants = { )} } }` - })} + })} } as const ` From 0aeceb212acf4310103eca6fa8af5a23716fc021 Mon Sep 17 00:00:00 2001 From: Charis <26616127+charislam@users.noreply.github.com> Date: Wed, 8 Oct 2025 14:18:13 -0400 Subject: [PATCH 37/42] fix(tables): ensure order of composite pks preserved (#996) * fix(tables): ensure order of composite pks preserved * test: add test for preserving composite pk order --- src/lib/sql/table.sql.ts | 43 +++++++++++++++++++--------------------- test/lib/tables.ts | 17 ++++++++++++++++ 2 files changed, 37 insertions(+), 23 deletions(-) diff --git a/src/lib/sql/table.sql.ts b/src/lib/sql/table.sql.ts index d7f70331..446c1f40 100644 --- a/src/lib/sql/table.sql.ts +++ b/src/lib/sql/table.sql.ts @@ -32,29 +32,26 @@ FROM JOIN pg_class c ON nc.oid = c.relnamespace left join ( select - table_id, - jsonb_agg(_pk.*) as primary_keys - from ( - select - n.nspname as schema, - c.relname as table_name, - a.attname as name, - c.oid :: int8 as table_id - from - pg_index i, - pg_class c, - pg_attribute a, - pg_namespace n - where - ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} - ${props.tableIdentifierFilter ? `n.nspname || '.' || c.relname ${props.tableIdentifierFilter} AND` : ''} - i.indrelid = c.oid - and c.relnamespace = n.oid - and a.attrelid = c.oid - and a.attnum = any (i.indkey) - and i.indisprimary - ) as _pk - group by table_id + c.oid::int8 as table_id, + jsonb_agg( + jsonb_build_object( + 'table_id', c.oid::int8, + 'schema', n.nspname, + 'table_name', c.relname, + 'name', a.attname + ) + order by array_position(i.indkey, a.attnum) + ) as primary_keys + from + pg_index i + join pg_class c on i.indrelid = c.oid + join pg_namespace n on c.relnamespace = n.oid + join pg_attribute a on a.attrelid = c.oid and a.attnum = any(i.indkey) + where + ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} + ${props.tableIdentifierFilter ? `n.nspname || '.' || c.relname ${props.tableIdentifierFilter} AND` : ''} + i.indisprimary + group by c.oid ) as pk on pk.table_id = c.oid left join ( diff --git a/test/lib/tables.ts b/test/lib/tables.ts index 00230ab4..677204fc 100644 --- a/test/lib/tables.ts +++ b/test/lib/tables.ts @@ -525,3 +525,20 @@ test('primary keys', async () => { ) await pgMeta.tables.remove(res.data!.id) }) + +test('composite primary keys preserve order', async () => { + let res = await pgMeta.tables.create({ name: 't_pk_order' }) + await pgMeta.columns.create({ table_id: res.data!.id, name: 'col_a', type: 'int8' }) + await pgMeta.columns.create({ table_id: res.data!.id, name: 'col_b', type: 'text' }) + await pgMeta.columns.create({ table_id: res.data!.id, name: 'col_c', type: 'int4' }) + + // Set primary keys in specific order: col_c, col_a, col_b + res = await pgMeta.tables.update(res.data!.id, { + primary_keys: [{ name: 'col_c' }, { name: 'col_a' }, { name: 'col_b' }], + }) + + // Verify the order is preserved + expect(res.data!.primary_keys.map((pk: any) => pk.name)).toEqual(['col_c', 'col_a', 'col_b']) + + await pgMeta.tables.remove(res.data!.id) +}) From 1d0dad13da8313f5c2d6b8d0a54ecad82095b945 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 Oct 2025 22:06:18 +0000 Subject: [PATCH 38/42] chore(deps-dev): bump pino-pretty from 13.1.1 to 13.1.2 (#998) Bumps [pino-pretty](https://github.com/pinojs/pino-pretty) from 13.1.1 to 13.1.2. - [Release notes](https://github.com/pinojs/pino-pretty/releases) - [Commits](https://github.com/pinojs/pino-pretty/compare/v13.1.1...v13.1.2) --- updated-dependencies: - dependency-name: pino-pretty dependency-version: 13.1.2 dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index cd6f7b63..2787f0d6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6229,9 +6229,9 @@ } }, "node_modules/pino-pretty": { - "version": "13.1.1", - "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-13.1.1.tgz", - "integrity": "sha512-TNNEOg0eA0u+/WuqH0MH0Xui7uqVk9D74ESOpjtebSQYbNWJk/dIxCXIxFsNfeN53JmtWqYHP2OrIZjT/CBEnA==", + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-13.1.2.tgz", + "integrity": "sha512-3cN0tCakkT4f3zo9RXDIhy6GTvtYD6bK4CRBLN9j3E/ePqN1tugAXD5rGVfoChW6s0hiek+eyYlLNqc/BG7vBQ==", "dev": true, "license": "MIT", "dependencies": { From d521264e6f99780cfea77d4fd00c111aebcba9d1 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Thu, 16 Oct 2025 16:11:31 +0200 Subject: [PATCH 39/42] feat(typegen): add functions setof type introspection (#971) * feat(typegen): add setof function type introspection - Introspect the setof function fields for functions - Restore functions as unions of args + returns * chore: update snapshots * chore: unify sort and dedup loops * chore: remove duplicate sort * chore: include view in type * fix: isOneToOne * fix: tests * chore: dedup typescript typegen logic * chore: re-use generateColumn * fix: retrieve prorows only * chore: refactor typegen for prorows only * fix: only get reltype in types * chore: reuse relationTypeByIds * chore: reduce functions changes to minimum * chore: only single loop for types * chore: single sort for relationships * chore: reduce loops * fix: relationtype setof functions generation * chore: fix prettier * chore: update snapshots * chore: fix types test * fix: test types * fix: include materializedView types * test: add search_todos_by_details function * fix: add setof from * for all relation functions * fix(typescript): union unknown null (#995) * fix(typescript): unknown is already nullable Fixes: https://github.com/supabase/cli/issues/4234 https://github.com/supabase/cli/issues/577 * fix: also exclude any from null union --- Dockerfile | 2 +- package.json | 3 +- src/lib/sql/functions.sql.ts | 4 + src/lib/sql/types.sql.ts | 5 +- src/lib/types.ts | 2 + src/server/templates/typescript.ts | 357 +++- test/db/00-init.sql | 247 +++ test/lib/functions.ts | 128 ++ test/lib/types.ts | 5 +- test/server/typegen.ts | 2733 ++++++++++++++++++++++++++-- 10 files changed, 3308 insertions(+), 178 deletions(-) diff --git a/Dockerfile b/Dockerfile index 8756b7ac..df79412a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM node:20 as build +FROM node:20 AS build WORKDIR /usr/src/app # Do `npm ci` separately so we can cache `node_modules` # https://nodejs.org/en/docs/guides/nodejs-docker-webapp/ diff --git a/package.json b/package.json index 941df965..e903e455 100644 --- a/package.json +++ b/package.json @@ -26,7 +26,8 @@ "gen:types:go": "PG_META_GENERATE_TYPES=go node --loader ts-node/esm src/server/server.ts", "gen:types:swift": "PG_META_GENERATE_TYPES=swift node --loader ts-node/esm src/server/server.ts", "start": "node dist/server/server.js", - "dev": "trap 'npm run db:clean' INT && run-s db:clean db:run && nodemon --exec node --loader ts-node/esm src/server/server.ts | pino-pretty --colorize", + "dev": "trap 'npm run db:clean' INT && run-s db:clean db:run && run-s dev:code", + "dev:code": "nodemon --exec node --loader ts-node/esm src/server/server.ts | pino-pretty --colorize", "test": "run-s db:clean db:run test:run db:clean", "db:clean": "cd test/db && docker compose down", "db:run": "cd test/db && docker compose up --detach --wait", diff --git a/src/lib/sql/functions.sql.ts b/src/lib/sql/functions.sql.ts index 92715b95..97dad2f3 100644 --- a/src/lib/sql/functions.sql.ts +++ b/src/lib/sql/functions.sql.ts @@ -85,6 +85,10 @@ select pg_get_function_result(f.oid) as return_type, nullif(rt.typrelid::int8, 0) as return_type_relation_id, f.proretset as is_set_returning_function, + case + when f.proretset then nullif(f.prorows, 0) + else null + end as prorows, case when f.provolatile = 'i' then 'IMMUTABLE' when f.provolatile = 's' then 'STABLE' diff --git a/src/lib/sql/types.sql.ts b/src/lib/sql/types.sql.ts index 990fa22f..c230f23f 100644 --- a/src/lib/sql/types.sql.ts +++ b/src/lib/sql/types.sql.ts @@ -13,7 +13,8 @@ select format_type (t.oid, null) as format, coalesce(t_enums.enums, '[]') as enums, coalesce(t_attributes.attributes, '[]') as attributes, - obj_description (t.oid, 'pg_type') as comment + obj_description (t.oid, 'pg_type') as comment, + nullif(t.typrelid::int8, 0) as type_relation_id from pg_type t left join pg_namespace n on n.oid = t.typnamespace @@ -46,7 +47,7 @@ from t.typrelid = 0 or ( select - c.relkind ${props.includeTableTypes ? `in ('c', 'r')` : `= 'c'`} + c.relkind ${props.includeTableTypes ? `in ('c', 'r', 'v', 'm')` : `= 'c'`} from pg_class c where diff --git a/src/lib/types.ts b/src/lib/types.ts index bfd60250..26b3bc78 100644 --- a/src/lib/types.ts +++ b/src/lib/types.ts @@ -156,6 +156,7 @@ const postgresFunctionSchema = Type.Object({ return_type: Type.String(), return_type_relation_id: Type.Union([Type.Integer(), Type.Null()]), is_set_returning_function: Type.Boolean(), + prorows: Type.Union([Type.Number(), Type.Null()]), behavior: Type.Union([ Type.Literal('IMMUTABLE'), Type.Literal('STABLE'), @@ -442,6 +443,7 @@ export const postgresTypeSchema = Type.Object({ enums: Type.Array(Type.String()), attributes: Type.Array(Type.Object({ name: Type.String(), type_id: Type.Integer() })), comment: Type.Union([Type.String(), Type.Null()]), + type_relation_id: Type.Union([Type.Integer(), Type.Null()]), }) export type PostgresType = Static diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index f0079874..1b527686 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -8,7 +8,11 @@ import type { PostgresType, PostgresView, } from '../../lib/index.js' -import { GENERATE_TYPES_DEFAULT_SCHEMA, VALID_FUNCTION_ARGS_MODE } from '../constants.js' +import { + GENERATE_TYPES_DEFAULT_SCHEMA, + VALID_FUNCTION_ARGS_MODE, + VALID_UNNAMED_FUNCTION_ARG_TYPES, +} from '../constants.js' type TsRelationship = Pick< GeneratorMetadata['relationships'][number], @@ -56,13 +60,17 @@ export const apply = async ({ { tables: [], views: [], functions: [], enums: [], compositeTypes: [] }, ]) ) - - const columnsByTableId = Object.fromEntries( - [...tables, ...foreignTables, ...views, ...materializedViews].map((t) => [t.id, []]) - ) + const columnsByTableId: Record = {} + const tablesNamesByTableId: Record = {} + const relationTypeByIds = new Map() // group types by id for quicker lookup const typesById = new Map() + const tablesLike = [...tables, ...foreignTables, ...views, ...materializedViews] + for (const tableLike of tablesLike) { + columnsByTableId[tableLike.id] = [] + tablesNamesByTableId[tableLike.id] = tableLike.name + } for (const column of columns) { if (column.table_id in columnsByTableId) { columnsByTableId[column.table_id].push(column) @@ -74,6 +82,10 @@ export const apply = async ({ for (const type of types) { typesById.set(type.id, type) + // Save all the types that are relation types for quicker lookup + if (type.type_relation_id) { + relationTypeByIds.set(type.id, type) + } if (type.schema in introspectionBySchema) { if (type.enums.length > 0) { introspectionBySchema[type.schema].enums.push(type) @@ -143,19 +155,49 @@ export const apply = async ({ }) } } + // Helper function to get table/view name from relation id + const getTableNameFromRelationId = ( + relationId: number | null, + returnTypeId: number | null + ): string | null => { + if (!relationId) return null + + if (tablesNamesByTableId[relationId]) return tablesNamesByTableId[relationId] + // if it's a composite type we use the type name as relation name to allow sub-selecting fields of the composite type + const reltype = returnTypeId ? relationTypeByIds.get(returnTypeId) : null + return reltype ? reltype.name : null + } + for (const func of functions) { if (func.schema in introspectionBySchema) { func.args.sort((a, b) => a.name.localeCompare(b.name)) - // Either: - // 1. All input args are be named, or - // 2. There is only one input arg which is unnamed + // Get all input args (in, inout, variadic modes) const inArgs = func.args.filter(({ mode }) => VALID_FUNCTION_ARGS_MODE.has(mode)) if ( - // Case 1: Function has a single parameter - inArgs.length === 1 || + // Case 1: Function has no parameters + inArgs.length === 0 || // Case 2: All input args are named - !inArgs.some(({ name }) => name === '') + !inArgs.some(({ name }) => name === '') || + // Case 3: All unnamed args have default values AND are valid types + inArgs.every((arg) => { + if (arg.name === '') { + return arg.has_default && VALID_UNNAMED_FUNCTION_ARG_TYPES.has(arg.type_id) + } + return true + }) || + // Case 4: Single unnamed parameter of valid type (json, jsonb, text) + // Exclude all functions definitions that have only one single argument unnamed argument that isn't + // a json/jsonb/text as it won't be considered by PostgREST + (inArgs.length === 1 && + inArgs[0].name === '' && + (VALID_UNNAMED_FUNCTION_ARG_TYPES.has(inArgs[0].type_id) || + // OR if the function have a single unnamed args which is another table (embeded function) + (relationTypeByIds.get(inArgs[0].type_id) && + getTableNameFromRelationId(func.return_type_relation_id, func.return_type_id)) || + // OR if the function takes a table row but doesn't qualify as embedded (for error reporting) + (relationTypeByIds.get(inArgs[0].type_id) && + !getTableNameFromRelationId(func.return_type_relation_id, func.return_type_id)))) ) { introspectionBySchema[func.schema].functions.push({ fn: func, inArgs }) } @@ -170,10 +212,62 @@ export const apply = async ({ } const getFunctionTsReturnType = (fn: PostgresFunction, returnType: string) => { - return `${returnType}${fn.is_set_returning_function ? '[]' : ''}` + // Determine if this function should have SetofOptions + let setofOptionsInfo = '' + + const returnTableName = getTableNameFromRelationId( + fn.return_type_relation_id, + fn.return_type_id + ) + const returnsSetOfTable = fn.is_set_returning_function && fn.return_type_relation_id !== null + const returnsMultipleRows = fn.prorows !== null && fn.prorows > 1 + // Case 1: if the function returns a table, we need to add SetofOptions to allow selecting sub fields of the table + // Those can be used in rpc to select sub fields of a table + if (returnTableName) { + setofOptionsInfo = `SetofOptions: { + from: "*" + to: ${JSON.stringify(returnTableName)} + isOneToOne: ${Boolean(!returnsMultipleRows)} + isSetofReturn: ${fn.is_set_returning_function} + }` + } + // Case 2: if the function has a single table argument, we need to add SetofOptions to allow selecting sub fields of the table + // and set the right "from" and "to" values to allow selecting from a table row + if (fn.args.length === 1) { + const relationType = relationTypeByIds.get(fn.args[0].type_id) + + // Only add SetofOptions for functions with table arguments (embedded functions) + // or specific functions that RETURNS table-name + if (relationType) { + const sourceTable = relationType.format + // Case 1: Standard embedded function with proper setof detection + if (returnsSetOfTable && returnTableName) { + setofOptionsInfo = `SetofOptions: { + from: ${JSON.stringify(sourceTable)} + to: ${JSON.stringify(returnTableName)} + isOneToOne: ${Boolean(!returnsMultipleRows)} + isSetofReturn: true + }` + } + // Case 2: Handle RETURNS table-name those are always a one to one relationship + else if (returnTableName && !returnsSetOfTable) { + const targetTable = returnTableName + setofOptionsInfo = `SetofOptions: { + from: ${JSON.stringify(sourceTable)} + to: ${JSON.stringify(targetTable)} + isOneToOne: true + isSetofReturn: false + }` + } + } + } + + return `${returnType}${fn.is_set_returning_function && returnsMultipleRows ? '[]' : ''} + ${setofOptionsInfo ? `${setofOptionsInfo}` : ''}` } const getFunctionReturnType = (schema: PostgresSchema, fn: PostgresFunction): string => { + // Case 1: `returns table`. const tableArgs = fn.args.filter(({ mode }) => mode === 'table') if (tableArgs.length > 0) { const argsNameAndType = tableArgs.map(({ name, type_id }) => { @@ -205,15 +299,25 @@ export const apply = async ({ )?.view if (relation) { return `{ - ${columnsByTableId[relation.id].map( - (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - })} ${column.is_nullable ? '| null' : ''}` - )} + ${columnsByTableId[relation.id] + .map((column) => + generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: column.is_nullable, + is_optional: false, + }, + { + types, + schemas, + tables, + views, + } + ) + ) + .join(',\n')} }` } @@ -230,35 +334,144 @@ export const apply = async ({ return 'unknown' } + // Special error case for functions that take table row but don't qualify as embedded functions + const hasTableRowError = (fn: PostgresFunction, inArgs: PostgresFunction['args']) => { + if ( + inArgs.length === 1 && + inArgs[0].name === '' && + relationTypeByIds.get(inArgs[0].type_id) && + !getTableNameFromRelationId(fn.return_type_relation_id, fn.return_type_id) + ) { + return true + } + return false + } + + // Check for generic conflict cases that need error reporting + const getConflictError = ( + schema: PostgresSchema, + fns: Array<{ fn: PostgresFunction; inArgs: PostgresFunction['args'] }>, + fn: PostgresFunction, + inArgs: PostgresFunction['args'] + ) => { + // If there is a single function definition, there is no conflict + if (fns.length <= 1) return null + + // Generic conflict detection patterns + // Pattern 1: No-args vs default-args conflicts + if (inArgs.length === 0) { + const conflictingFns = fns.filter(({ fn: otherFn, inArgs: otherInArgs }) => { + if (otherFn === fn) return false + return otherInArgs.length === 1 && otherInArgs[0].name === '' && otherInArgs[0].has_default + }) + + if (conflictingFns.length > 0) { + const conflictingFn = conflictingFns[0] + const returnTypeName = typesById.get(conflictingFn.fn.return_type_id)?.name || 'unknown' + return `Could not choose the best candidate function between: ${schema.name}.${fn.name}(), ${schema.name}.${fn.name}( => ${returnTypeName}). Try renaming the parameters or the function itself in the database so function overloading can be resolved` + } + } + + // Pattern 2: Same parameter name but different types (unresolvable overloads) + if (inArgs.length === 1 && inArgs[0].name !== '') { + const conflictingFns = fns.filter(({ fn: otherFn, inArgs: otherInArgs }) => { + if (otherFn === fn) return false + return ( + otherInArgs.length === 1 && + otherInArgs[0].name === inArgs[0].name && + otherInArgs[0].type_id !== inArgs[0].type_id + ) + }) + + if (conflictingFns.length > 0) { + const allConflictingFunctions = [{ fn, inArgs }, ...conflictingFns] + const conflictList = allConflictingFunctions + .sort((a, b) => { + const aArgs = a.inArgs + const bArgs = b.inArgs + return (aArgs[0]?.type_id || 0) - (bArgs[0]?.type_id || 0) + }) + .map((f) => { + const args = f.inArgs + return `${schema.name}.${fn.name}(${args.map((a) => `${a.name || ''} => ${typesById.get(a.type_id)?.name || 'unknown'}`).join(', ')})` + }) + .join(', ') + + return `Could not choose the best candidate function between: ${conflictList}. Try renaming the parameters or the function itself in the database so function overloading can be resolved` + } + } + + return null + } const getFunctionSignatures = ( schema: PostgresSchema, fns: Array<{ fn: PostgresFunction; inArgs: PostgresFunction['args'] }> ) => { - const args = fns - .map(({ inArgs }) => { - if (inArgs.length === 0) { - return 'Record' - } - const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { - const type = typesById.get(type_id) - let tsType = 'unknown' - if (type) { - tsType = pgTypeToTsType(schema, type.name, { - types, - schemas, - tables, - views, + return fns + .map(({ fn, inArgs }) => { + let argsType = 'never' + let returnType = getFunctionReturnType(schema, fn) + + // Check for specific error cases + const conflictError = getConflictError(schema, fns, fn, inArgs) + if (conflictError) { + if (inArgs.length > 0) { + const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { + const type = typesById.get(type_id) + let tsType = 'unknown' + if (type) { + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) + } + return { name, type: tsType, has_default } + }) + argsType = `{ ${argsNameAndType.map(({ name, type, has_default }) => `${JSON.stringify(name)}${has_default ? '?' : ''}: ${type}`)} }` + } + returnType = `{ error: true } & ${JSON.stringify(conflictError)}` + } else if (hasTableRowError(fn, inArgs)) { + // Special case for computed fields returning scalars functions + if (inArgs.length > 0) { + const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { + const type = typesById.get(type_id) + let tsType = 'unknown' + if (type) { + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) + } + return { name, type: tsType, has_default } }) + argsType = `{ ${argsNameAndType.map(({ name, type, has_default }) => `${JSON.stringify(name)}${has_default ? '?' : ''}: ${type}`)} }` } - return { name, type: tsType, has_default } - }) - return `{ ${argsNameAndType.map(({ name, type, has_default }) => `${JSON.stringify(name)}${has_default ? '?' : ''}: ${type}`)} }` + returnType = `{ error: true } & ${JSON.stringify(`the function ${schema.name}.${fn.name} with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache`)}` + } else if (inArgs.length > 0) { + const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { + const type = typesById.get(type_id) + let tsType = 'unknown' + if (type) { + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) + } + return { name, type: tsType, has_default } + }) + argsType = `{ ${argsNameAndType.map(({ name, type, has_default }) => `${JSON.stringify(name)}${has_default ? '?' : ''}: ${type}`)} }` + } + + return `{ Args: ${argsType}; Returns: ${getFunctionTsReturnType(fn, returnType)} }` }) - .toSorted() - // A function can have multiples definitions with differents args, but will always return the same type - .join(' | ') - return `{\nArgs: ${args}\n Returns: ${getFunctionTsReturnType(fns[0].fn, getFunctionReturnType(schema, fns[0].fn))}\n}` + .join(' |\n') } const internal_supabase_schema = postgrestVersion @@ -269,6 +482,14 @@ export const apply = async ({ }` : '' + function generateNullableUnionTsType(tsType: string, isNullable: boolean) { + // Only add the null union if the type is not unknown as unknown already includes null + if (tsType === 'unknown' || tsType === 'any' || !isNullable) { + return tsType + } + return `${tsType} | null` + } + function generateColumnTsDefinition( schema: PostgresSchema, column: { @@ -284,7 +505,7 @@ export const apply = async ({ views: PostgresView[] } ) { - return `${JSON.stringify(column.name)}${column.is_optional ? '?' : ''}: ${pgTypeToTsType(schema, column.format, context)} ${column.is_nullable ? '| null' : ''}` + return `${JSON.stringify(column.name)}${column.is_optional ? '?' : ''}: ${generateNullableUnionTsType(pgTypeToTsType(schema, column.format, context), column.is_nullable)}` } let output = ` @@ -324,7 +545,7 @@ export type Database = { ...schemaFunctions .filter(({ fn }) => fn.argument_types === table.name) .map(({ fn }) => { - return `${JSON.stringify(fn.name)}: ${getFunctionReturnType(schema, fn)} | null` + return `${JSON.stringify(fn.name)}: ${generateNullableUnionTsType(getFunctionReturnType(schema, fn), true)}` }), ]} } @@ -380,18 +601,26 @@ export type Database = { : schemaViews.map( ({ view, relationships }) => `${JSON.stringify(view.name)}: { Row: { - ${columnsByTableId[view.id].map((column) => - generateColumnTsDefinition( - schema, - { - name: column.name, - format: column.format, - is_nullable: column.is_nullable, - is_optional: false, - }, - { types, schemas, tables, views } - ) - )} + ${[ + ...columnsByTableId[view.id].map((column) => + generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: column.is_nullable, + is_optional: false, + }, + { types, schemas, tables, views } + ) + ), + ...schemaFunctions + .filter(({ fn }) => fn.argument_types === view.name) + .map( + ({ fn }) => + `${JSON.stringify(fn.name)}: ${generateNullableUnionTsType(getFunctionReturnType(schema, fn), true)}` + ), + ]} } ${ view.is_updatable @@ -443,7 +672,6 @@ export type Database = { if (schemaFunctions.length === 0) { return '[_ in never]: never' } - const schemaFunctionsGroupedByName = schemaFunctions.reduce( (acc, curr) => { acc[curr.fn.name] ??= [] @@ -489,12 +717,15 @@ export type Database = { const type = typesById.get(type_id) let tsType = 'unknown' if (type) { - tsType = `${pgTypeToTsType(schema, type.name, { - types, - schemas, - tables, - views, - })} | null` + tsType = `${generateNullableUnionTsType( + pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }), + true + )}` } return `${JSON.stringify(name)}: ${tsType}` })} diff --git a/test/db/00-init.sql b/test/db/00-init.sql index 3551a4e7..64107713 100644 --- a/test/db/00-init.sql +++ b/test/db/00-init.sql @@ -56,6 +56,17 @@ $$ language plpgsql; CREATE VIEW todos_view AS SELECT * FROM public.todos; -- For testing typegen on view-to-view relationships create view users_view as select * from public.users; +-- Create a more complex view for testing +CREATE VIEW user_todos_summary_view AS +SELECT + u.id as user_id, + u.name as user_name, + u.status as user_status, + COUNT(t.id) as todo_count, + array_agg(t.details) FILTER (WHERE t.details IS NOT NULL) as todo_details +FROM public.users u +LEFT JOIN public.todos t ON t."user-id" = u.id +GROUP BY u.id, u.name, u.status; create materialized view todos_matview as select * from public.todos; @@ -69,6 +80,11 @@ $$ select substring($1.details, 1, 3); $$ language sql stable; +create function public.blurb_varchar(public.todos_view) returns character varying as +$$ +select substring($1.details, 1, 3); +$$ language sql stable; + create function public.details_length(public.todos) returns integer as $$ select length($1.details); @@ -101,6 +117,15 @@ as $$ select * from public.users limit 1; $$; +create or replace function public.function_returning_single_row(todos public.todos) +returns public.users +language sql +stable +as $$ + select * from public.users limit 1; +$$; + + create or replace function public.function_returning_set_of_rows() returns setof public.users language sql @@ -117,6 +142,15 @@ as $$ select id, name from public.users; $$; +create or replace function public.function_returning_table_with_args(user_id int) +returns table (id int, name text) +language sql +stable +as $$ + select id, name from public.users WHERE id = user_id; +$$; + + create or replace function public.polymorphic_function(text) returns void language sql as ''; create or replace function public.polymorphic_function(bool) returns void language sql as ''; @@ -169,6 +203,20 @@ AS $$ SELECT * FROM public.users_audit WHERE user_id = user_row.id; $$; +CREATE OR REPLACE FUNCTION public.get_todos_by_matview(todos_matview) +RETURNS SETOF todos ROWS 1 +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos LIMIT 1; +$$; + +CREATE OR REPLACE FUNCTION public.search_todos_by_details(search_details text) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos WHERE details ilike search_details; +$$; + CREATE OR REPLACE FUNCTION public.get_todos_setof_rows(user_row users) RETURNS SETOF todos LANGUAGE SQL STABLE @@ -182,3 +230,202 @@ LANGUAGE SQL STABLE AS $$ SELECT * FROM public.todos WHERE "user-id" = todo_row."user-id"; $$; + +-- SETOF composite_type - Returns multiple rows of a custom composite type +CREATE OR REPLACE FUNCTION public.get_composite_type_data() +RETURNS SETOF composite_type_with_array_attribute +LANGUAGE SQL STABLE +AS $$ + SELECT ROW(ARRAY['hello', 'world']::text[])::composite_type_with_array_attribute + UNION ALL + SELECT ROW(ARRAY['foo', 'bar']::text[])::composite_type_with_array_attribute; +$$; + +-- SETOF record - Returns multiple rows with structure defined in the function +CREATE OR REPLACE FUNCTION public.get_user_summary() +RETURNS SETOF record +LANGUAGE SQL STABLE +AS $$ + SELECT u.id, name, count(t.id) as todo_count + FROM public.users u + LEFT JOIN public.todos t ON t."user-id" = u.id + GROUP BY u.id, u.name; +$$; + +-- SETOF scalar_type - Returns multiple values of a basic type +CREATE OR REPLACE FUNCTION public.get_user_ids() +RETURNS SETOF bigint +LANGUAGE SQL STABLE +AS $$ + SELECT id FROM public.users; +$$; + + +-- Function returning view using scalar as input +CREATE OR REPLACE FUNCTION public.get_single_user_summary_from_view(search_user_id bigint) +RETURNS SETOF user_todos_summary_view +LANGUAGE SQL STABLE +ROWS 1 +AS $$ + SELECT * FROM user_todos_summary_view WHERE user_id = search_user_id; +$$; +-- Function returning view using table row as input +CREATE OR REPLACE FUNCTION public.get_single_user_summary_from_view(user_row users) +RETURNS SETOF user_todos_summary_view +LANGUAGE SQL STABLE +ROWS 1 +AS $$ + SELECT * FROM user_todos_summary_view WHERE user_id = user_row.id; +$$; +-- Function returning view using another view row as input +CREATE OR REPLACE FUNCTION public.get_single_user_summary_from_view(userview_row users_view) +RETURNS SETOF user_todos_summary_view +LANGUAGE SQL STABLE +ROWS 1 +AS $$ + SELECT * FROM user_todos_summary_view WHERE user_id = userview_row.id; +$$; + + +-- Function returning view using scalar as input +CREATE OR REPLACE FUNCTION public.get_todos_from_user(search_user_id bigint) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM todos WHERE "user-id" = search_user_id; +$$; +-- Function returning view using table row as input +CREATE OR REPLACE FUNCTION public.get_todos_from_user(user_row users) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM todos WHERE "user-id" = user_row.id; +$$; +-- Function returning view using another view row as input +CREATE OR REPLACE FUNCTION public.get_todos_from_user(userview_row users_view) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM todos WHERE "user-id" = userview_row.id; +$$; + +-- Valid postgresql function override but that produce an unresolvable postgrest function call +create function postgrest_unresolvable_function() returns void language sql as ''; +create function postgrest_unresolvable_function(a text) returns int language sql as 'select 1'; +create function postgrest_unresolvable_function(a int) returns text language sql as $$ + SELECT 'toto' +$$; +-- Valid postgresql function override with differents returns types depending of different arguments +create function postgrest_resolvable_with_override_function() returns void language sql as ''; +create function postgrest_resolvable_with_override_function(a text) returns int language sql as 'select 1'; +create function postgrest_resolvable_with_override_function(b int) returns text language sql as $$ + SELECT 'toto' +$$; +-- Function overrides returning setof tables +create function postgrest_resolvable_with_override_function(user_id bigint) returns setof users language sql stable as $$ + SELECT * FROM users WHERE id = user_id; +$$; +create function postgrest_resolvable_with_override_function(todo_id bigint, completed boolean) returns setof todos language sql stable as $$ + SELECT * FROM todos WHERE id = todo_id AND completed = completed; +$$; +-- Function override taking a table as argument and returning a setof +create function postgrest_resolvable_with_override_function(user_row users) returns setof todos language sql stable as $$ + SELECT * FROM todos WHERE "user-id" = user_row.id; +$$; + +create or replace function public.polymorphic_function_with_different_return(bool) returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_different_return(int) returns int language sql as 'SELECT 2'; +create or replace function public.polymorphic_function_with_different_return(text) returns text language sql as $$ SELECT 'foo' $$; + +create or replace function public.polymorphic_function_with_no_params_or_unnamed() returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_no_params_or_unnamed(bool) returns int language sql as 'SELECT 2'; +create or replace function public.polymorphic_function_with_no_params_or_unnamed(text) returns text language sql as $$ SELECT 'foo' $$; +-- Function with a single unnamed params that isn't a json/jsonb/text should never appears in the type gen as it won't be in postgrest schema +create or replace function public.polymorphic_function_with_unnamed_integer(int) returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_unnamed_json(json) returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_unnamed_jsonb(jsonb) returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_unnamed_text(text) returns int language sql as 'SELECT 1'; + +-- Functions with unnamed parameters that have default values +create or replace function public.polymorphic_function_with_unnamed_default() returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_unnamed_default(int default 42) returns int language sql as 'SELECT 2'; +create or replace function public.polymorphic_function_with_unnamed_default(text default 'default') returns text language sql as $$ SELECT 'foo' $$; + +-- Functions with unnamed parameters that have default values and multiple overloads +create or replace function public.polymorphic_function_with_unnamed_default_overload() returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_unnamed_default_overload(int default 42) returns int language sql as 'SELECT 2'; +create or replace function public.polymorphic_function_with_unnamed_default_overload(text default 'default') returns text language sql as $$ SELECT 'foo' $$; +create or replace function public.polymorphic_function_with_unnamed_default_overload(bool default true) returns int language sql as 'SELECT 3'; + +-- Test function with unnamed row parameter returning setof +CREATE OR REPLACE FUNCTION public.test_unnamed_row_setof(todos) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos WHERE "user-id" = $1."user-id"; +$$; + +CREATE OR REPLACE FUNCTION public.test_unnamed_row_setof(users) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos WHERE "user-id" = $1."id"; +$$; + + +CREATE OR REPLACE FUNCTION public.test_unnamed_row_setof(user_id bigint) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos WHERE "user-id" = user_id; +$$; + +-- Test function with unnamed row parameter returning scalar +CREATE OR REPLACE FUNCTION public.test_unnamed_row_scalar(todos) +RETURNS integer +LANGUAGE SQL STABLE +AS $$ + SELECT COUNT(*) FROM public.todos WHERE "user-id" = $1."user-id"; +$$; + +-- Test function with unnamed view row parameter +CREATE OR REPLACE FUNCTION public.test_unnamed_view_row(todos_view) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos WHERE "user-id" = $1."user-id"; +$$; + +-- Test function with multiple unnamed row parameters +CREATE OR REPLACE FUNCTION public.test_unnamed_multiple_rows(users, todos) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos + WHERE "user-id" = $1.id + AND id = $2.id; +$$; + +-- Test function with unnamed row parameter returning composite +CREATE OR REPLACE FUNCTION public.test_unnamed_row_composite(users) +RETURNS composite_type_with_array_attribute +LANGUAGE SQL STABLE +AS $$ + SELECT ROW(ARRAY[$1.name])::composite_type_with_array_attribute; +$$; + +-- Function that returns a single element +CREATE OR REPLACE FUNCTION public.function_using_table_returns(user_row users) +RETURNS todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos WHERE todos."user-id" = user_row.id LIMIT 1; +$$; + +CREATE OR REPLACE FUNCTION public.function_using_setof_rows_one(user_row users) +RETURNS SETOF todos +LANGUAGE SQL STABLE +ROWS 1 +AS $$ + SELECT * FROM public.todos WHERE todos."user-id" = user_row.id LIMIT 1; +$$; diff --git a/test/lib/functions.ts b/test/lib/functions.ts index fb2c4692..9d6088b6 100644 --- a/test/lib/functions.ts +++ b/test/lib/functions.ts @@ -36,6 +36,7 @@ test('list', async () => { "is_set_returning_function": false, "language": "sql", "name": "add", + "prorows": null, "return_type": "integer", "return_type_id": 23, "return_type_relation_id": null, @@ -46,6 +47,128 @@ test('list', async () => { ) }) +test('list set-returning function with single object limit', async () => { + const res = await pgMeta.functions.list() + expect(res.data?.filter(({ name }) => name === 'get_user_audit_setof_single_row')) + .toMatchInlineSnapshot(` + [ + { + "args": [ + { + "has_default": false, + "mode": "in", + "name": "user_row", + "type_id": 16395, + }, + ], + "argument_types": "user_row users", + "behavior": "STABLE", + "complete_statement": "CREATE OR REPLACE FUNCTION public.get_user_audit_setof_single_row(user_row users) + RETURNS SETOF users_audit + LANGUAGE sql + STABLE ROWS 1 + AS $function$ + SELECT * FROM public.users_audit WHERE user_id = user_row.id; + $function$ + ", + "config_params": null, + "definition": " + SELECT * FROM public.users_audit WHERE user_id = user_row.id; + ", + "id": 16506, + "identity_argument_types": "user_row users", + "is_set_returning_function": true, + "language": "sql", + "name": "get_user_audit_setof_single_row", + "prorows": 1, + "return_type": "SETOF users_audit", + "return_type_id": 16418, + "return_type_relation_id": 16416, + "schema": "public", + "security_definer": false, + }, + ] + `) +}) + +test('list set-returning function with multiples definitions', async () => { + const res = await pgMeta.functions.list() + expect(res.data?.filter(({ name }) => name === 'get_todos_setof_rows')).toMatchInlineSnapshot(` + [ + { + "args": [ + { + "has_default": false, + "mode": "in", + "name": "user_row", + "type_id": 16395, + }, + ], + "argument_types": "user_row users", + "behavior": "STABLE", + "complete_statement": "CREATE OR REPLACE FUNCTION public.get_todos_setof_rows(user_row users) + RETURNS SETOF todos + LANGUAGE sql + STABLE + AS $function$ + SELECT * FROM public.todos WHERE "user-id" = user_row.id; + $function$ + ", + "config_params": null, + "definition": " + SELECT * FROM public.todos WHERE "user-id" = user_row.id; + ", + "id": 16509, + "identity_argument_types": "user_row users", + "is_set_returning_function": true, + "language": "sql", + "name": "get_todos_setof_rows", + "prorows": 1000, + "return_type": "SETOF todos", + "return_type_id": 16404, + "return_type_relation_id": 16402, + "schema": "public", + "security_definer": false, + }, + { + "args": [ + { + "has_default": false, + "mode": "in", + "name": "todo_row", + "type_id": 16404, + }, + ], + "argument_types": "todo_row todos", + "behavior": "STABLE", + "complete_statement": "CREATE OR REPLACE FUNCTION public.get_todos_setof_rows(todo_row todos) + RETURNS SETOF todos + LANGUAGE sql + STABLE + AS $function$ + SELECT * FROM public.todos WHERE "user-id" = todo_row."user-id"; + $function$ + ", + "config_params": null, + "definition": " + SELECT * FROM public.todos WHERE "user-id" = todo_row."user-id"; + ", + "id": 16510, + "identity_argument_types": "todo_row todos", + "is_set_returning_function": true, + "language": "sql", + "name": "get_todos_setof_rows", + "prorows": 1000, + "return_type": "SETOF todos", + "return_type_id": 16404, + "return_type_relation_id": 16402, + "schema": "public", + "security_definer": false, + }, + ] + `) +}) + test('list functions with included schemas', async () => { let res = await pgMeta.functions.list({ includedSchemas: ['public'], @@ -136,6 +259,7 @@ test('retrieve, create, update, delete', async () => { "is_set_returning_function": false, "language": "sql", "name": "test_func", + "prorows": null, "return_type": "integer", "return_type_id": 23, "return_type_relation_id": null, @@ -186,6 +310,7 @@ test('retrieve, create, update, delete', async () => { "is_set_returning_function": false, "language": "sql", "name": "test_func", + "prorows": null, "return_type": "integer", "return_type_id": 23, "return_type_relation_id": null, @@ -240,6 +365,7 @@ test('retrieve, create, update, delete', async () => { "is_set_returning_function": false, "language": "sql", "name": "test_func_renamed", + "prorows": null, "return_type": "integer", "return_type_id": 23, "return_type_relation_id": null, @@ -290,6 +416,7 @@ test('retrieve, create, update, delete', async () => { "is_set_returning_function": false, "language": "sql", "name": "test_func_renamed", + "prorows": null, "return_type": "integer", "return_type_id": 23, "return_type_relation_id": null, @@ -345,6 +472,7 @@ test('retrieve set-returning function', async () => { "is_set_returning_function": true, "language": "sql", "name": "function_returning_set_of_rows", + "prorows": 1000, "return_type": "SETOF users", "return_type_id": Any, "return_type_relation_id": Any, diff --git a/test/lib/types.ts b/test/lib/types.ts index fb8c8f30..349a1b80 100644 --- a/test/lib/types.ts +++ b/test/lib/types.ts @@ -17,6 +17,7 @@ test('list', async () => { "id": Any, "name": "user_status", "schema": "public", + "type_relation_id": null, } ` ) @@ -73,6 +74,7 @@ test('list types with include Table Types', async () => { "id": Any, "name": "todos", "schema": "public", + "type_relation_id": 16402, } ` ) @@ -93,7 +95,7 @@ test('composite type attributes', async () => { const res = await pgMeta.types.list() expect(res.data?.find(({ name }) => name === 'test_composite')).toMatchInlineSnapshot( - { id: expect.any(Number) }, + { id: expect.any(Number), type_relation_id: expect.any(Number) }, ` { "attributes": [ @@ -112,6 +114,7 @@ test('composite type attributes', async () => { "id": Any, "name": "test_composite", "schema": "public", + "type_relation_id": Any, } ` ) diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 76ac6218..f101adde 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -129,6 +129,12 @@ test('typegen: typescript', async () => { details_is_long: boolean | null details_length: number | null details_words: string[] | null + test_unnamed_row_scalar: number | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null } Insert: { details?: string | null @@ -150,24 +156,1122 @@ test('typegen: typescript', async () => { { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + user_details: { + Row: { + details: string | null + user_id: number + } + Insert: { + details?: string | null + user_id: number + } + Update: { + details?: string | null + user_id?: number + } + Relationships: [ + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + users: { + Row: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + test_unnamed_row_composite: + | Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null + } + Insert: { + decimal?: number | null + id?: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Update: { + decimal?: number | null + id?: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + users_audit: { + Row: { + created_at: string | null + id: number + previous_value: Json | null + user_id: number | null + } + Insert: { + created_at?: string | null + id?: number + previous_value?: Json | null + user_id?: number | null + } + Update: { + created_at?: string | null + id?: number + previous_value?: Json | null + user_id?: number | null + } + Relationships: [] + } + } + Views: { + a_view: { + Row: { + id: number | null + } + Insert: { + id?: number | null + } + Update: { + id?: number | null + } + Relationships: [] + } + todos_matview: { + Row: { + details: string | null + id: number | null + "user-id": number | null + get_todos_by_matview: { + details: string | null + id: number + "user-id": number + } | null + } + Relationships: [ + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + todos_view: { + Row: { + details: string | null + id: number | null + "user-id": number | null + blurb_varchar: string | null + test_unnamed_view_row: { + details: string | null + id: number + "user-id": number + } | null + } + Insert: { + details?: string | null + id?: number | null + "user-id"?: number | null + } + Update: { + details?: string | null + id?: number | null + "user-id"?: number | null + } + Relationships: [ + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + user_todos_summary_view: { + Row: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + users_view: { + Row: { + decimal: number | null + id: number | null + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + Insert: { + decimal?: number | null + id?: number | null + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Update: { + decimal?: number | null + id?: number | null + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + users_view_with_multiple_refs_to_users: { + Row: { + initial_id: number | null + initial_name: string | null + second_id: number | null + second_name: string | null + } + Relationships: [] + } + } + Functions: { + blurb: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.blurb with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + blurb_varchar: + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + | { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + details_is_long: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.details_is_long with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + details_length: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.details_length with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + details_words: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.details_words with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + function_returning_row: { + Args: never + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "*" + to: "users" + isOneToOne: true + isSetofReturn: false + } + } + function_returning_set_of_rows: { + Args: never + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + function_returning_single_row: { + Args: { todos: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "todos" + to: "users" + isOneToOne: true + isSetofReturn: false + } + } + function_returning_table: { + Args: never + Returns: { + id: number + name: string + }[] + } + function_returning_table_with_args: { + Args: { user_id: number } + Returns: { + id: number + name: string + }[] + } + function_using_setof_rows_one: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: true + } + } + function_using_table_returns: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: false + } + } + get_composite_type_data: { + Args: never + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"][] + SetofOptions: { + from: "*" + to: "composite_type_with_array_attribute" + isOneToOne: false + isSetofReturn: true + } + } + get_single_user_summary_from_view: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users_view" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "*" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + get_todos_by_matview: { + Args: { "": unknown } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "todos_matview" + to: "todos" + isOneToOne: true + isSetofReturn: true + } + } + get_todos_from_user: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + get_todos_setof_rows: + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + get_user_audit_setof_single_row: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + created_at: string | null + id: number + previous_value: Json | null + user_id: number | null + } + SetofOptions: { + from: "users" + to: "users_audit" + isOneToOne: true + isSetofReturn: true + } + } + get_user_ids: { Args: never; Returns: number[] } + get_user_summary: { Args: never; Returns: Record[] } + polymorphic_function: { Args: { "": string }; Returns: undefined } + polymorphic_function_with_different_return: { + Args: { "": string } + Returns: string + } + polymorphic_function_with_no_params_or_unnamed: + | { Args: never; Returns: number } + | { Args: { "": string }; Returns: string } + polymorphic_function_with_unnamed_default: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default(), public.polymorphic_function_with_unnamed_default( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_default_overload: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default_overload(), public.polymorphic_function_with_unnamed_default_overload( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_json: { + Args: { "": Json } + Returns: number + } + polymorphic_function_with_unnamed_jsonb: { + Args: { "": Json } + Returns: number + } + polymorphic_function_with_unnamed_text: { + Args: { "": string } + Returns: number + } + postgres_fdw_disconnect: { Args: { "": string }; Returns: boolean } + postgres_fdw_disconnect_all: { Args: never; Returns: boolean } + postgres_fdw_get_connections: { + Args: never + Returns: Record[] + } + postgres_fdw_handler: { Args: never; Returns: unknown } + postgrest_resolvable_with_override_function: + | { Args: { a: string }; Returns: number } + | { + Args: { user_id: number } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { completed: boolean; todo_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { Args: { b: number }; Returns: string } + | { Args: never; Returns: undefined } + postgrest_unresolvable_function: + | { + Args: { a: string } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { + Args: { a: number } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: never; Returns: undefined } + search_todos_by_details: { + Args: { search_details: string } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_internal_query: { Args: never; Returns: undefined } + test_unnamed_row_composite: { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + SetofOptions: { + from: "users" + to: "composite_type_with_array_attribute" + isOneToOne: true + isSetofReturn: false + } + } + test_unnamed_row_scalar: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.test_unnamed_row_scalar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + test_unnamed_row_setof: + | { + Args: { user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_unnamed_view_row: { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + } + Enums: { + meme_status: "new" | "old" | "retired" + user_status: "ACTIVE" | "INACTIVE" + } + CompositeTypes: { + composite_type_with_array_attribute: { + my_text_array: string[] | null + } + composite_type_with_record_attribute: { + todo: Database["public"]["Tables"]["todos"]["Row"] | null + } + } + } + } + + type DatabaseWithoutInternals = Omit + + type DefaultSchema = DatabaseWithoutInternals[Extract] + + export type Tables< + DefaultSchemaTableNameOrOptions extends + | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) + | { schema: keyof DatabaseWithoutInternals }, + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + : never = never, + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + Row: infer R + } + ? R + : never + : DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] & + DefaultSchema["Views"]) + ? (DefaultSchema["Tables"] & + DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends { + Row: infer R + } + ? R + : never + : never + + export type TablesInsert< + DefaultSchemaTableNameOrOptions extends + | keyof DefaultSchema["Tables"] + | { schema: keyof DatabaseWithoutInternals }, + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + : never = never, + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + Insert: infer I + } + ? I + : never + : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { + Insert: infer I + } + ? I + : never + : never + + export type TablesUpdate< + DefaultSchemaTableNameOrOptions extends + | keyof DefaultSchema["Tables"] + | { schema: keyof DatabaseWithoutInternals }, + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + : never = never, + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + Update: infer U + } + ? U + : never + : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { + Update: infer U + } + ? U + : never + : never + + export type Enums< + DefaultSchemaEnumNameOrOptions extends + | keyof DefaultSchema["Enums"] + | { schema: keyof DatabaseWithoutInternals }, + EnumName extends DefaultSchemaEnumNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + : never = never, + > = DefaultSchemaEnumNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] + ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] + : never + + export type CompositeTypes< + PublicCompositeTypeNameOrOptions extends + | keyof DefaultSchema["CompositeTypes"] + | { schema: keyof DatabaseWithoutInternals }, + CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + : never = never, + > = PublicCompositeTypeNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] + ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] + : never + + export const Constants = { + public: { + Enums: { + meme_status: ["new", "old", "retired"], + user_status: ["ACTIVE", "INACTIVE"], + }, + }, + } as const + " + ` + ) +}) + +test('typegen w/ one-to-one relationships', async () => { + const { body } = await app.inject({ + method: 'GET', + path: '/generators/typescript', + query: { detect_one_to_one_relationships: 'true' }, + }) + expect(body).toMatchInlineSnapshot( + ` + "export type Json = + | string + | number + | boolean + | null + | { [key: string]: Json | undefined } + | Json[] + + export type Database = { + public: { + Tables: { + category: { + Row: { + id: number + name: string + } + Insert: { + id?: number + name: string + } + Update: { + id?: number + name?: string + } + Relationships: [] + } + empty: { + Row: {} + Insert: {} + Update: {} + Relationships: [] + } + foreign_table: { + Row: { + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + Insert: { + id: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Update: { + id?: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + memes: { + Row: { + category: number | null + created_at: string + id: number + metadata: Json | null + name: string + status: Database["public"]["Enums"]["meme_status"] | null + } + Insert: { + category?: number | null + created_at: string + id?: number + metadata?: Json | null + name: string + status?: Database["public"]["Enums"]["meme_status"] | null + } + Update: { + category?: number | null + created_at?: string + id?: number + metadata?: Json | null + name?: string + status?: Database["public"]["Enums"]["meme_status"] | null + } + Relationships: [ + { + foreignKeyName: "memes_category_fkey" + columns: ["category"] + isOneToOne: false + referencedRelation: "category" + referencedColumns: ["id"] + }, + ] + } + table_with_other_tables_row_type: { + Row: { + col1: Database["public"]["Tables"]["user_details"]["Row"] | null + col2: Database["public"]["Views"]["a_view"]["Row"] | null + } + Insert: { + col1?: Database["public"]["Tables"]["user_details"]["Row"] | null + col2?: Database["public"]["Views"]["a_view"]["Row"] | null + } + Update: { + col1?: Database["public"]["Tables"]["user_details"]["Row"] | null + col2?: Database["public"]["Views"]["a_view"]["Row"] | null + } + Relationships: [] + } + table_with_primary_key_other_than_id: { + Row: { + name: string | null + other_id: number + } + Insert: { + name?: string | null + other_id?: number + } + Update: { + name?: string | null + other_id?: number + } + Relationships: [] + } + todos: { + Row: { + details: string | null + id: number + "user-id": number + blurb: string | null + blurb_varchar: string | null + details_is_long: boolean | null + details_length: number | null + details_words: string[] | null + test_unnamed_row_scalar: number | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null + } + Insert: { + details?: string | null + id?: number + "user-id": number + } + Update: { + details?: string | null + id?: number + "user-id"?: number + } + Relationships: [ + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false referencedRelation: "users" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["initial_id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["second_id"] }, @@ -190,30 +1294,42 @@ test('typegen: typescript', async () => { { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] + isOneToOne: true referencedRelation: "a_view" referencedColumns: ["id"] }, { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] + isOneToOne: true + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true referencedRelation: "users" referencedColumns: ["id"] }, { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] + isOneToOne: true referencedRelation: "users_view" referencedColumns: ["id"] }, { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] + isOneToOne: true referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["initial_id"] }, { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] + isOneToOne: true referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["second_id"] }, @@ -225,6 +1341,14 @@ test('typegen: typescript', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + test_unnamed_row_composite: + | Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null } Insert: { decimal?: number | null @@ -280,35 +1404,52 @@ test('typegen: typescript', async () => { details: string | null id: number | null "user-id": number | null + get_todos_by_matview: { + details: string | null + id: number + "user-id": number + } | null } Relationships: [ { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "a_view" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false referencedRelation: "users" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["initial_id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["second_id"] }, @@ -319,6 +1460,12 @@ test('typegen: typescript', async () => { details: string | null id: number | null "user-id": number | null + blurb_varchar: string | null + test_unnamed_view_row: { + details: string | null + id: number + "user-id": number + } | null } Insert: { details?: string | null @@ -334,35 +1481,57 @@ test('typegen: typescript', async () => { { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "a_view" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false referencedRelation: "users" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["initial_id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["second_id"] }, ] } + user_todos_summary_view: { + Row: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } users_view: { Row: { decimal: number | null @@ -397,59 +1566,277 @@ test('typegen: typescript', async () => { Functions: { blurb: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string - } - blurb_varchar: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string + Returns: { + error: true + } & "the function public.blurb with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + blurb_varchar: + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + | { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } details_is_long: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: boolean + Returns: { + error: true + } & "the function public.details_is_long with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } details_length: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: number + Returns: { + error: true + } & "the function public.details_length with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } details_words: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string[] + Returns: { + error: true + } & "the function public.details_words with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } function_returning_row: { - Args: Record + Args: never Returns: { decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } + SetofOptions: { + from: "*" + to: "users" + isOneToOne: true + isSetofReturn: false + } } function_returning_set_of_rows: { - Args: Record + Args: never Returns: { decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + function_returning_single_row: { + Args: { todos: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "todos" + to: "users" + isOneToOne: true + isSetofReturn: false + } } function_returning_table: { - Args: Record + Args: never + Returns: { + id: number + name: string + }[] + } + function_returning_table_with_args: { + Args: { user_id: number } + Returns: { + id: number + name: string + }[] + } + function_using_setof_rows_one: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: true + } + } + function_using_table_returns: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { + details: string | null id: number - name: string - }[] + "user-id": number + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: false + } + } + get_composite_type_data: { + Args: never + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"][] + SetofOptions: { + from: "*" + to: "composite_type_with_array_attribute" + isOneToOne: false + isSetofReturn: true + } } - get_todos_setof_rows: { - Args: - | { todo_row: Database["public"]["Tables"]["todos"]["Row"] } - | { user_row: Database["public"]["Tables"]["users"]["Row"] } + get_single_user_summary_from_view: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users_view" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "*" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + get_todos_by_matview: { + Args: { "": unknown } Returns: { details: string | null id: number "user-id": number - }[] + } + SetofOptions: { + from: "todos_matview" + to: "todos" + isOneToOne: true + isSetofReturn: true + } } + get_todos_from_user: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + get_todos_setof_rows: + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } get_user_audit_setof_single_row: { Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { @@ -457,31 +1844,207 @@ test('typegen: typescript', async () => { id: number previous_value: Json | null user_id: number | null - }[] - } - polymorphic_function: { - Args: { "": boolean } | { "": string } - Returns: undefined + } + SetofOptions: { + from: "users" + to: "users_audit" + isOneToOne: true + isSetofReturn: true + } } - postgres_fdw_disconnect: { + get_user_ids: { Args: never; Returns: number[] } + get_user_summary: { Args: never; Returns: Record[] } + polymorphic_function: { Args: { "": string }; Returns: undefined } + polymorphic_function_with_different_return: { Args: { "": string } - Returns: boolean + Returns: string } - postgres_fdw_disconnect_all: { - Args: Record - Returns: boolean + polymorphic_function_with_no_params_or_unnamed: + | { Args: never; Returns: number } + | { Args: { "": string }; Returns: string } + polymorphic_function_with_unnamed_default: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default(), public.polymorphic_function_with_unnamed_default( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_default_overload: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default_overload(), public.polymorphic_function_with_unnamed_default_overload( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_json: { + Args: { "": Json } + Returns: number + } + polymorphic_function_with_unnamed_jsonb: { + Args: { "": Json } + Returns: number + } + polymorphic_function_with_unnamed_text: { + Args: { "": string } + Returns: number } + postgres_fdw_disconnect: { Args: { "": string }; Returns: boolean } + postgres_fdw_disconnect_all: { Args: never; Returns: boolean } postgres_fdw_get_connections: { - Args: Record + Args: never Returns: Record[] } - postgres_fdw_handler: { - Args: Record - Returns: unknown + postgres_fdw_handler: { Args: never; Returns: unknown } + postgrest_resolvable_with_override_function: + | { Args: { a: string }; Returns: number } + | { + Args: { user_id: number } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { completed: boolean; todo_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { Args: { b: number }; Returns: string } + | { Args: never; Returns: undefined } + postgrest_unresolvable_function: + | { + Args: { a: string } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { + Args: { a: number } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: never; Returns: undefined } + search_todos_by_details: { + Args: { search_details: string } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_internal_query: { Args: never; Returns: undefined } + test_unnamed_row_composite: { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + SetofOptions: { + from: "users" + to: "composite_type_with_array_attribute" + isOneToOne: true + isSetofReturn: false + } + } + test_unnamed_row_scalar: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.test_unnamed_row_scalar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } - test_internal_query: { - Args: Record - Returns: undefined + test_unnamed_row_setof: + | { + Args: { user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_unnamed_view_row: { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } } } Enums: { @@ -762,6 +2325,12 @@ test('typegen: typescript w/ one-to-one relationships', async () => { details_is_long: boolean | null details_length: number | null details_words: string[] | null + test_unnamed_row_scalar: number | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null } Insert: { details?: string | null @@ -781,6 +2350,13 @@ test('typegen: typescript w/ one-to-one relationships', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] @@ -832,6 +2408,13 @@ test('typegen: typescript w/ one-to-one relationships', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] @@ -868,6 +2451,14 @@ test('typegen: typescript w/ one-to-one relationships', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + test_unnamed_row_composite: + | Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null } Insert: { decimal?: number | null @@ -923,6 +2514,11 @@ test('typegen: typescript w/ one-to-one relationships', async () => { details: string | null id: number | null "user-id": number | null + get_todos_by_matview: { + details: string | null + id: number + "user-id": number + } | null } Relationships: [ { @@ -932,6 +2528,13 @@ test('typegen: typescript w/ one-to-one relationships', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] @@ -967,6 +2570,12 @@ test('typegen: typescript w/ one-to-one relationships', async () => { details: string | null id: number | null "user-id": number | null + blurb_varchar: string | null + test_unnamed_view_row: { + details: string | null + id: number + "user-id": number + } | null } Insert: { details?: string | null @@ -986,6 +2595,13 @@ test('typegen: typescript w/ one-to-one relationships', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] @@ -1016,6 +2632,16 @@ test('typegen: typescript w/ one-to-one relationships', async () => { }, ] } + user_todos_summary_view: { + Row: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } users_view: { Row: { decimal: number | null @@ -1050,59 +2676,277 @@ test('typegen: typescript w/ one-to-one relationships', async () => { Functions: { blurb: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string - } - blurb_varchar: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string + Returns: { + error: true + } & "the function public.blurb with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + blurb_varchar: + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + | { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } details_is_long: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: boolean + Returns: { + error: true + } & "the function public.details_is_long with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } details_length: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: number + Returns: { + error: true + } & "the function public.details_length with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } details_words: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string[] + Returns: { + error: true + } & "the function public.details_words with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } function_returning_row: { - Args: Record + Args: never Returns: { decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } + SetofOptions: { + from: "*" + to: "users" + isOneToOne: true + isSetofReturn: false + } } function_returning_set_of_rows: { - Args: Record + Args: never Returns: { decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + function_returning_single_row: { + Args: { todos: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "todos" + to: "users" + isOneToOne: true + isSetofReturn: false + } } function_returning_table: { - Args: Record + Args: never + Returns: { + id: number + name: string + }[] + } + function_returning_table_with_args: { + Args: { user_id: number } Returns: { id: number name: string }[] } - get_todos_setof_rows: { - Args: - | { todo_row: Database["public"]["Tables"]["todos"]["Row"] } - | { user_row: Database["public"]["Tables"]["users"]["Row"] } + function_using_setof_rows_one: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { details: string | null id: number "user-id": number - }[] + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: true + } + } + function_using_table_returns: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: false + } + } + get_composite_type_data: { + Args: never + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"][] + SetofOptions: { + from: "*" + to: "composite_type_with_array_attribute" + isOneToOne: false + isSetofReturn: true + } + } + get_single_user_summary_from_view: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users_view" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "*" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + get_todos_by_matview: { + Args: { "": unknown } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "todos_matview" + to: "todos" + isOneToOne: true + isSetofReturn: true + } } + get_todos_from_user: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + get_todos_setof_rows: + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } get_user_audit_setof_single_row: { Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { @@ -1110,31 +2954,207 @@ test('typegen: typescript w/ one-to-one relationships', async () => { id: number previous_value: Json | null user_id: number | null - }[] - } - polymorphic_function: { - Args: { "": boolean } | { "": string } - Returns: undefined + } + SetofOptions: { + from: "users" + to: "users_audit" + isOneToOne: true + isSetofReturn: true + } } - postgres_fdw_disconnect: { + get_user_ids: { Args: never; Returns: number[] } + get_user_summary: { Args: never; Returns: Record[] } + polymorphic_function: { Args: { "": string }; Returns: undefined } + polymorphic_function_with_different_return: { Args: { "": string } - Returns: boolean + Returns: string + } + polymorphic_function_with_no_params_or_unnamed: + | { Args: never; Returns: number } + | { Args: { "": string }; Returns: string } + polymorphic_function_with_unnamed_default: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default(), public.polymorphic_function_with_unnamed_default( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_default_overload: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default_overload(), public.polymorphic_function_with_unnamed_default_overload( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_json: { + Args: { "": Json } + Returns: number + } + polymorphic_function_with_unnamed_jsonb: { + Args: { "": Json } + Returns: number } - postgres_fdw_disconnect_all: { - Args: Record - Returns: boolean + polymorphic_function_with_unnamed_text: { + Args: { "": string } + Returns: number } + postgres_fdw_disconnect: { Args: { "": string }; Returns: boolean } + postgres_fdw_disconnect_all: { Args: never; Returns: boolean } postgres_fdw_get_connections: { - Args: Record + Args: never Returns: Record[] } - postgres_fdw_handler: { - Args: Record - Returns: unknown + postgres_fdw_handler: { Args: never; Returns: unknown } + postgrest_resolvable_with_override_function: + | { Args: { a: string }; Returns: number } + | { + Args: { user_id: number } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { completed: boolean; todo_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { Args: { b: number }; Returns: string } + | { Args: never; Returns: undefined } + postgrest_unresolvable_function: + | { + Args: { a: string } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { + Args: { a: number } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: never; Returns: undefined } + search_todos_by_details: { + Args: { search_details: string } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_internal_query: { Args: never; Returns: undefined } + test_unnamed_row_composite: { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + SetofOptions: { + from: "users" + to: "composite_type_with_array_attribute" + isOneToOne: true + isSetofReturn: false + } + } + test_unnamed_row_scalar: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.test_unnamed_row_scalar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } - test_internal_query: { - Args: Record - Returns: undefined + test_unnamed_row_setof: + | { + Args: { user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_unnamed_view_row: { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } } } Enums: { @@ -1420,6 +3440,12 @@ test('typegen: typescript w/ postgrestVersion', async () => { details_is_long: boolean | null details_length: number | null details_words: string[] | null + test_unnamed_row_scalar: number | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null } Insert: { details?: string | null @@ -1439,6 +3465,13 @@ test('typegen: typescript w/ postgrestVersion', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] @@ -1490,6 +3523,13 @@ test('typegen: typescript w/ postgrestVersion', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] @@ -1526,6 +3566,14 @@ test('typegen: typescript w/ postgrestVersion', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + test_unnamed_row_composite: + | Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null } Insert: { decimal?: number | null @@ -1581,6 +3629,11 @@ test('typegen: typescript w/ postgrestVersion', async () => { details: string | null id: number | null "user-id": number | null + get_todos_by_matview: { + details: string | null + id: number + "user-id": number + } | null } Relationships: [ { @@ -1590,6 +3643,13 @@ test('typegen: typescript w/ postgrestVersion', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] @@ -1625,6 +3685,12 @@ test('typegen: typescript w/ postgrestVersion', async () => { details: string | null id: number | null "user-id": number | null + blurb_varchar: string | null + test_unnamed_view_row: { + details: string | null + id: number + "user-id": number + } | null } Insert: { details?: string | null @@ -1644,6 +3710,13 @@ test('typegen: typescript w/ postgrestVersion', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] @@ -1674,6 +3747,16 @@ test('typegen: typescript w/ postgrestVersion', async () => { }, ] } + user_todos_summary_view: { + Row: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } users_view: { Row: { decimal: number | null @@ -1708,59 +3791,277 @@ test('typegen: typescript w/ postgrestVersion', async () => { Functions: { blurb: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string - } - blurb_varchar: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string + Returns: { + error: true + } & "the function public.blurb with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + blurb_varchar: + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + | { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } details_is_long: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: boolean + Returns: { + error: true + } & "the function public.details_is_long with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } details_length: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: number + Returns: { + error: true + } & "the function public.details_length with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } details_words: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string[] + Returns: { + error: true + } & "the function public.details_words with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } function_returning_row: { - Args: Record + Args: never Returns: { decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } + SetofOptions: { + from: "*" + to: "users" + isOneToOne: true + isSetofReturn: false + } } function_returning_set_of_rows: { - Args: Record + Args: never Returns: { decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + function_returning_single_row: { + Args: { todos: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "todos" + to: "users" + isOneToOne: true + isSetofReturn: false + } } function_returning_table: { - Args: Record + Args: never + Returns: { + id: number + name: string + }[] + } + function_returning_table_with_args: { + Args: { user_id: number } Returns: { id: number name: string }[] } - get_todos_setof_rows: { - Args: - | { todo_row: Database["public"]["Tables"]["todos"]["Row"] } - | { user_row: Database["public"]["Tables"]["users"]["Row"] } + function_using_setof_rows_one: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { details: string | null id: number "user-id": number - }[] + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: true + } } + function_using_table_returns: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: false + } + } + get_composite_type_data: { + Args: never + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"][] + SetofOptions: { + from: "*" + to: "composite_type_with_array_attribute" + isOneToOne: false + isSetofReturn: true + } + } + get_single_user_summary_from_view: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users_view" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "*" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + get_todos_by_matview: { + Args: { "": unknown } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "todos_matview" + to: "todos" + isOneToOne: true + isSetofReturn: true + } + } + get_todos_from_user: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + get_todos_setof_rows: + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } get_user_audit_setof_single_row: { Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { @@ -1768,31 +4069,207 @@ test('typegen: typescript w/ postgrestVersion', async () => { id: number previous_value: Json | null user_id: number | null - }[] - } - polymorphic_function: { - Args: { "": boolean } | { "": string } - Returns: undefined + } + SetofOptions: { + from: "users" + to: "users_audit" + isOneToOne: true + isSetofReturn: true + } } - postgres_fdw_disconnect: { + get_user_ids: { Args: never; Returns: number[] } + get_user_summary: { Args: never; Returns: Record[] } + polymorphic_function: { Args: { "": string }; Returns: undefined } + polymorphic_function_with_different_return: { Args: { "": string } - Returns: boolean + Returns: string + } + polymorphic_function_with_no_params_or_unnamed: + | { Args: never; Returns: number } + | { Args: { "": string }; Returns: string } + polymorphic_function_with_unnamed_default: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default(), public.polymorphic_function_with_unnamed_default( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_default_overload: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default_overload(), public.polymorphic_function_with_unnamed_default_overload( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_json: { + Args: { "": Json } + Returns: number } - postgres_fdw_disconnect_all: { - Args: Record - Returns: boolean + polymorphic_function_with_unnamed_jsonb: { + Args: { "": Json } + Returns: number + } + polymorphic_function_with_unnamed_text: { + Args: { "": string } + Returns: number } + postgres_fdw_disconnect: { Args: { "": string }; Returns: boolean } + postgres_fdw_disconnect_all: { Args: never; Returns: boolean } postgres_fdw_get_connections: { - Args: Record + Args: never Returns: Record[] } - postgres_fdw_handler: { - Args: Record - Returns: unknown + postgres_fdw_handler: { Args: never; Returns: unknown } + postgrest_resolvable_with_override_function: + | { Args: { a: string }; Returns: number } + | { + Args: { user_id: number } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { completed: boolean; todo_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { Args: { b: number }; Returns: string } + | { Args: never; Returns: undefined } + postgrest_unresolvable_function: + | { + Args: { a: string } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { + Args: { a: number } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: never; Returns: undefined } + search_todos_by_details: { + Args: { search_details: string } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_internal_query: { Args: never; Returns: undefined } + test_unnamed_row_composite: { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + SetofOptions: { + from: "users" + to: "composite_type_with_array_attribute" + isOneToOne: true + isSetofReturn: false + } + } + test_unnamed_row_scalar: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.test_unnamed_row_scalar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } - test_internal_query: { - Args: Record - Returns: undefined + test_unnamed_row_setof: + | { + Args: { user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_unnamed_view_row: { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } } } Enums: { @@ -1964,9 +4441,9 @@ test('typegen: typescript consistent types definitions orders', async () => { DROP FUNCTION IF EXISTS test_func_gamma(integer, text, boolean) CASCADE; -- Alternative signatures for functions (different parameter orders) - DROP FUNCTION IF EXISTS test_func_alpha(text, boolean, integer) CASCADE; - DROP FUNCTION IF EXISTS test_func_beta(boolean, integer, text) CASCADE; - DROP FUNCTION IF EXISTS test_func_gamma(boolean, text, integer) CASCADE; + DROP FUNCTION IF EXISTS test_func_alpha_2(boolean, text, integer) CASCADE; + DROP FUNCTION IF EXISTS test_func_beta_2(text, boolean, integer) CASCADE; + DROP FUNCTION IF EXISTS test_func_gamma_2(boolean, integer, text) CASCADE; -- Drop tables DROP TABLE IF EXISTS test_table_alpha CASCADE; @@ -2136,19 +4613,19 @@ test('typegen: typescript consistent types definitions orders', async () => { }, }) - // Create functions in reverse order: gamma, beta, alpha with different parameter orders + // Create functions in reverse order: gamma, beta, alpha with same parameter orders await app.inject({ method: 'POST', path: '/query', payload: { query: ` - CREATE FUNCTION test_func_gamma(param_c boolean, param_a integer, param_b text) + CREATE FUNCTION test_func_gamma(param_a integer, param_b text, param_c boolean) RETURNS boolean AS 'SELECT NOT param_c' LANGUAGE sql IMMUTABLE; - CREATE FUNCTION test_func_beta(param_b text, param_c boolean, param_a integer) + CREATE FUNCTION test_func_beta(param_a integer, param_b text, param_c boolean) RETURNS text AS 'SELECT param_b || ''_processed''' LANGUAGE sql IMMUTABLE; - CREATE FUNCTION test_func_alpha(param_c boolean, param_b text, param_a integer) + CREATE FUNCTION test_func_alpha(param_a integer, param_b text, param_c boolean) RETURNS integer AS 'SELECT param_a + 1' LANGUAGE sql IMMUTABLE; `, }, @@ -2371,6 +4848,10 @@ test('typegen: go', async () => { Status *string \`json:"status"\` } + type PublicAViewSelect struct { + Id *int64 \`json:"id"\` + } + type PublicTodosViewSelect struct { Details *string \`json:"details"\` Id *int64 \`json:"id"\` @@ -2384,8 +4865,12 @@ test('typegen: go', async () => { Status *string \`json:"status"\` } - type PublicAViewSelect struct { - Id *int64 \`json:"id"\` + type PublicUserTodosSummaryViewSelect struct { + TodoCount *int64 \`json:"todo_count"\` + TodoDetails []*string \`json:"todo_details"\` + UserId *int64 \`json:"user_id"\` + UserName *string \`json:"user_name"\` + UserStatus *string \`json:"user_status"\` } type PublicUsersViewWithMultipleRefsToUsersSelect struct { @@ -2738,6 +5223,20 @@ test('typegen: swift', async () => { case userId = "user-id" } } + internal struct UserTodosSummaryViewSelect: Codable, Hashable, Sendable { + internal let todoCount: Int64? + internal let todoDetails: [String]? + internal let userId: Int64? + internal let userName: String? + internal let userStatus: UserStatus? + internal enum CodingKeys: String, CodingKey { + case todoCount = "todo_count" + case todoDetails = "todo_details" + case userId = "user_id" + case userName = "user_name" + case userStatus = "user_status" + } + } internal struct UsersViewSelect: Codable, Hashable, Sendable { internal let decimal: Decimal? internal let id: Int64? @@ -3109,6 +5608,20 @@ test('typegen: swift w/ public access control', async () => { case userId = "user-id" } } + public struct UserTodosSummaryViewSelect: Codable, Hashable, Sendable { + public let todoCount: Int64? + public let todoDetails: [String]? + public let userId: Int64? + public let userName: String? + public let userStatus: UserStatus? + public enum CodingKeys: String, CodingKey { + case todoCount = "todo_count" + case todoDetails = "todo_details" + case userId = "user_id" + case userName = "user_name" + case userStatus = "user_status" + } + } public struct UsersViewSelect: Codable, Hashable, Sendable { public let decimal: Decimal? public let id: Int64? From e561d8289eb84e7ea9efa8d42d4299f9d5e578b3 Mon Sep 17 00:00:00 2001 From: Greg Richardson Date: Fri, 17 Oct 2025 09:53:56 -0600 Subject: [PATCH 40/42] feat: optional parameter binding on query endpoint --- src/lib/PostgresMeta.ts | 2 +- src/lib/db.ts | 16 +++--- src/server/routes/query.ts | 21 +++----- test/server/query.ts | 106 +++++++++++++++++++++++++++++++++---- vitest.config.ts | 4 +- 5 files changed, 113 insertions(+), 36 deletions(-) diff --git a/src/lib/PostgresMeta.ts b/src/lib/PostgresMeta.ts index 91050383..eb931624 100644 --- a/src/lib/PostgresMeta.ts +++ b/src/lib/PostgresMeta.ts @@ -24,7 +24,7 @@ import { PostgresMetaResult, PoolConfig } from './types.js' export default class PostgresMeta { query: ( sql: string, - opts?: { statementQueryTimeout?: number; trackQueryInSentry?: boolean } + opts?: { statementQueryTimeout?: number; trackQueryInSentry?: boolean; parameters?: unknown[] } ) => Promise> end: () => Promise columnPrivileges: PostgresMetaColumnPrivileges diff --git a/src/lib/db.ts b/src/lib/db.ts index 263be4d8..d43ef8f5 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -23,7 +23,11 @@ pg.types.setTypeParser(1017, (x) => x) // _point // Ensure any query will have an appropriate error handler on the pool to prevent connections errors // to bubble up all the stack eventually killing the server -const poolerQueryHandleError = (pgpool: pg.Pool, sql: string): Promise> => { +const poolerQueryHandleError = ( + pgpool: pg.Pool, + sql: string, + parameters?: unknown[] +): Promise> => { return Sentry.startSpan( { op: 'db', name: 'poolerQuery' }, () => @@ -44,7 +48,7 @@ const poolerQueryHandleError = (pgpool: pg.Pool, sql: string): Promise) => { if (!rejected) { return resolve(results) @@ -64,7 +68,7 @@ const poolerQueryHandleError = (pgpool: pg.Pool, sql: string): Promise { query: ( sql: string, - opts?: { statementQueryTimeout?: number; trackQueryInSentry?: boolean } + opts?: { statementQueryTimeout?: number; trackQueryInSentry?: boolean; parameters?: unknown[] } ) => Promise> end: () => Promise } = (config) => { @@ -108,7 +112,7 @@ export const init: (config: PoolConfig) => { return { async query( sql, - { statementQueryTimeout, trackQueryInSentry } = { trackQueryInSentry: true } + { statementQueryTimeout, trackQueryInSentry, parameters } = { trackQueryInSentry: true } ) { return Sentry.startSpan( // For metrics purposes, log the query that will be run if it's not an user provided query (with possibly sentitives infos) @@ -131,7 +135,7 @@ export const init: (config: PoolConfig) => { try { if (!pool) { const pool = new pg.Pool(config) - let res = await poolerQueryHandleError(pool, sqlWithStatementTimeout) + let res = await poolerQueryHandleError(pool, sqlWithStatementTimeout, parameters) if (Array.isArray(res)) { res = res.reverse().find((x) => x.rows.length !== 0) ?? { rows: [] } } @@ -139,7 +143,7 @@ export const init: (config: PoolConfig) => { return { data: res.rows, error: null } } - let res = await poolerQueryHandleError(pool, sqlWithStatementTimeout) + let res = await poolerQueryHandleError(pool, sqlWithStatementTimeout, parameters) if (Array.isArray(res)) { res = res.reverse().find((x) => x.rows.length !== 0) ?? { rows: [] } } diff --git a/src/server/routes/query.ts b/src/server/routes/query.ts index c8f23bc9..467b1558 100644 --- a/src/server/routes/query.ts +++ b/src/server/routes/query.ts @@ -16,12 +16,8 @@ const errorOnEmptyQuery = (request: FastifyRequest) => { export default async (fastify: FastifyInstance) => { fastify.post<{ Headers: { pg: string; 'x-pg-application-name'?: string } - Body: { - query: string - } - Querystring: { - statementTimeoutSecs?: number - } + Body: { query: string; parameters?: any[] } + Querystring: { statementTimeoutSecs?: number } }>('/', async (request, reply) => { const statementTimeoutSecs = request.query.statementTimeoutSecs errorOnEmptyQuery(request) @@ -30,6 +26,7 @@ export default async (fastify: FastifyInstance) => { const { data, error } = await pgMeta.query(request.body.query, { trackQueryInSentry: true, statementQueryTimeout: statementTimeoutSecs, + parameters: request.body.parameters, }) await pgMeta.end() if (error) { @@ -43,9 +40,7 @@ export default async (fastify: FastifyInstance) => { fastify.post<{ Headers: { pg: string; 'x-pg-application-name'?: string } - Body: { - query: string - } + Body: { query: string } }>('/format', async (request, reply) => { errorOnEmptyQuery(request) const { data, error } = await Parser.Format(request.body.query) @@ -61,9 +56,7 @@ export default async (fastify: FastifyInstance) => { fastify.post<{ Headers: { pg: string; 'x-pg-application-name'?: string } - Body: { - query: string - } + Body: { query: string } }>('/parse', async (request, reply) => { errorOnEmptyQuery(request) const { data, error } = Parser.Parse(request.body.query) @@ -79,9 +72,7 @@ export default async (fastify: FastifyInstance) => { fastify.post<{ Headers: { pg: string; 'x-pg-application-name'?: string } - Body: { - ast: object - } + Body: { ast: object } }>('/deparse', async (request, reply) => { const { data, error } = Parser.Deparse(request.body.ast) diff --git a/test/server/query.ts b/test/server/query.ts index 8a9d6076..2cd86f52 100644 --- a/test/server/query.ts +++ b/test/server/query.ts @@ -547,9 +547,7 @@ test('return interval as string', async () => { const res = await app.inject({ method: 'POST', path: '/query', - payload: { - query: `SELECT '1 day 1 hour 45 minutes'::interval`, - }, + payload: { query: `SELECT '1 day 1 hour 45 minutes'::interval` }, }) expect(res.json()).toMatchInlineSnapshot(` [ @@ -703,9 +701,7 @@ test('error with internalQuery property', async () => { const res = await app.inject({ method: 'POST', path: '/query', - payload: { - query: 'SELECT test_internal_query();', - }, + payload: { query: 'SELECT test_internal_query();' }, }) expect(res.json()).toMatchInlineSnapshot(` @@ -737,19 +733,107 @@ test('custom application_name', async () => { const res = await app.inject({ method: 'POST', path: '/query', - headers: { - 'x-pg-application-name': 'test', - }, + headers: { 'x-pg-application-name': 'test' }, + payload: { query: 'SHOW application_name;' }, + }) + + expect(res.json()).toMatchInlineSnapshot(` + [ + { + "application_name": "test", + }, + ] + `) +}) + +test('parameter binding with positional parameters', async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', payload: { - query: 'SHOW application_name;', + query: 'SELECT * FROM users WHERE id = $1 AND status = $2', + parameters: [1, 'ACTIVE'], }, }) + expect(res.json()).toMatchInlineSnapshot(` + [ + { + "decimal": null, + "id": 1, + "name": "Joe Bloggs", + "status": "ACTIVE", + }, + ] + `) +}) +test('parameter binding with single parameter', async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { query: 'SELECT name FROM users WHERE id = $1', parameters: [2] }, + }) expect(res.json()).toMatchInlineSnapshot(` [ { - "application_name": "test", + "name": "Jane Doe", }, ] `) }) + +test('parameter binding with no matches', async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { query: 'SELECT * FROM users WHERE id = $1', parameters: [999] }, + }) + expect(res.json()).toMatchInlineSnapshot(`[]`) +}) + +test('no parameters field', async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { query: 'SELECT COUNT(*) as count FROM users' }, + }) + expect(res.json()).toMatchInlineSnapshot(` + [ + { + "count": 2, + }, + ] + `) +}) + +test('parameter binding with empty parameters array', async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { query: 'SELECT COUNT(*) as count FROM users', parameters: [] }, + }) + expect(res.json()).toMatchInlineSnapshot(` + [ + { + "count": 2, + }, + ] + `) +}) + +test('parameter binding error - wrong parameter count', async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: 'SELECT * FROM users WHERE id = $1 AND status = $2', + parameters: [1], // Missing second parameter + }, + }) + expect(res.statusCode).toBe(400) + const json = res.json() + expect(json.code).toBe('08P01') + expect(json.message).toContain( + 'bind message supplies 1 parameters, but prepared statement "" requires 2' + ) +}) diff --git a/vitest.config.ts b/vitest.config.ts index 460baf6e..da50a76b 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -3,9 +3,7 @@ import { defineConfig } from 'vitest/config' export default defineConfig({ test: { - coverage: { - reporter: ['lcov'], - }, + coverage: { reporter: ['lcov'] }, maxConcurrency: 1, // https://github.com/vitest-dev/vitest/issues/317#issuecomment-1542319622 pool: 'forks', From 455b7445eb923169e93ff129f4807200c193a89a Mon Sep 17 00:00:00 2001 From: Greg Richardson Date: Fri, 17 Oct 2025 10:32:32 -0600 Subject: [PATCH 41/42] fix: any -> unknown --- src/server/routes/query.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/server/routes/query.ts b/src/server/routes/query.ts index 467b1558..2cc6ad94 100644 --- a/src/server/routes/query.ts +++ b/src/server/routes/query.ts @@ -16,7 +16,7 @@ const errorOnEmptyQuery = (request: FastifyRequest) => { export default async (fastify: FastifyInstance) => { fastify.post<{ Headers: { pg: string; 'x-pg-application-name'?: string } - Body: { query: string; parameters?: any[] } + Body: { query: string; parameters?: unknown[] } Querystring: { statementTimeoutSecs?: number } }>('/', async (request, reply) => { const statementTimeoutSecs = request.query.statementTimeoutSecs From f8471a30876802b474de529c32474355071d234b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Oct 2025 04:53:31 +0000 Subject: [PATCH 42/42] chore(deps): bump vite from 6.3.6 to 6.4.1 (#1004) Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) from 6.3.6 to 6.4.1. - [Release notes](https://github.com/vitejs/vite/releases) - [Changelog](https://github.com/vitejs/vite/blob/main/packages/vite/CHANGELOG.md) - [Commits](https://github.com/vitejs/vite/commits/create-vite@6.4.1/packages/vite) --- updated-dependencies: - dependency-name: vite dependency-version: 6.4.1 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 2787f0d6..b19d3a9c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8064,9 +8064,9 @@ } }, "node_modules/vite": { - "version": "6.3.6", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.6.tgz", - "integrity": "sha512-0msEVHJEScQbhkbVTb/4iHZdJ6SXp/AvxL2sjwYQFfBqleHtnCqv1J3sa9zbWz/6kW1m9Tfzn92vW+kZ1WV6QA==", + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.4.1.tgz", + "integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==", "dev": true, "license": "MIT", "dependencies": {