From 80db2de0d1e4d7478e0a7bb950c9b63fefd2840c Mon Sep 17 00:00:00 2001 From: Yi-Lin Juang Date: Fri, 2 May 2025 21:52:59 +0800 Subject: [PATCH 01/72] Prioritize current schema for pg type generation --- src/server/templates/typescript.ts | 116 ++++++++++++++++++++++------- 1 file changed, 90 insertions(+), 26 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 460887b5..6e3fc750 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -84,7 +84,7 @@ export type Database = { ${[ ...columnsByTableId[table.id].map( (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType(column.format, { + `${JSON.stringify(column.name)}: ${pgTypeToTsType(schema, column.format, { types, schemas, tables, @@ -97,7 +97,12 @@ export type Database = { const type = types.find(({ id }) => id === fn.return_type_id) let tsType = 'unknown' if (type) { - tsType = pgTypeToTsType(type.name, { types, schemas, tables, views }) + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) } return `${JSON.stringify(fn.name)}: ${tsType} | null` }), @@ -121,7 +126,12 @@ export type Database = { output += ':' } - output += pgTypeToTsType(column.format, { types, schemas, tables, views }) + output += pgTypeToTsType(schema, column.format, { + types, + schemas, + tables, + views, + }) if (column.is_nullable) { output += '| null' @@ -138,7 +148,12 @@ export type Database = { return `${output}?: never` } - output += `?: ${pgTypeToTsType(column.format, { types, schemas, tables, views })}` + output += `?: ${pgTypeToTsType(schema, column.format, { + types, + schemas, + tables, + views, + })}` if (column.is_nullable) { output += '| null' @@ -189,7 +204,7 @@ export type Database = { Row: { ${columnsByTableId[view.id].map( (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType(column.format, { + `${JSON.stringify(column.name)}: ${pgTypeToTsType(schema, column.format, { types, schemas, tables, @@ -207,7 +222,12 @@ export type Database = { return `${output}?: never` } - output += `?: ${pgTypeToTsType(column.format, { types, schemas, tables, views })} | null` + output += `?: ${pgTypeToTsType(schema, column.format, { + types, + schemas, + tables, + views, + })} | null` return output })} @@ -220,7 +240,12 @@ export type Database = { return `${output}?: never` } - output += `?: ${pgTypeToTsType(column.format, { types, schemas, tables, views })} | null` + output += `?: ${pgTypeToTsType(schema, column.format, { + types, + schemas, + tables, + views, + })} | null` return output })} @@ -290,7 +315,12 @@ export type Database = { const type = types.find(({ id }) => id === type_id) let tsType = 'unknown' if (type) { - tsType = pgTypeToTsType(type.name, { types, schemas, tables, views }) + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) } return { name, type: tsType, has_default } }) @@ -307,7 +337,12 @@ export type Database = { const type = types.find(({ id }) => id === type_id) let tsType = 'unknown' if (type) { - tsType = pgTypeToTsType(type.name, { types, schemas, tables, views }) + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) } return { name, type: tsType } }) @@ -327,12 +362,16 @@ export type Database = { return `{ ${columnsByTableId[relation.id].map( (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType(column.format, { - types, - schemas, - tables, - views, - })} ${column.is_nullable ? '| null' : ''}` + `${JSON.stringify(column.name)}: ${pgTypeToTsType( + schema, + column.format, + { + types, + schemas, + tables, + views, + } + )} ${column.is_nullable ? '| null' : ''}` )} }` } @@ -340,7 +379,12 @@ export type Database = { // Case 3: returns base/array/composite/enum type. const type = types.find(({ id }) => id === fns[0].return_type_id) if (type) { - return pgTypeToTsType(type.name, { types, schemas, tables, views }) + return pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) } return 'unknown' @@ -372,7 +416,12 @@ export type Database = { const type = types.find(({ id }) => id === type_id) let tsType = 'unknown' if (type) { - tsType = `${pgTypeToTsType(type.name, { types, schemas, tables, views })} | null` + tsType = `${pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + })} | null` } return `${JSON.stringify(name)}: ${tsType}` })} @@ -519,6 +568,7 @@ export const Constants = { // TODO: Make this more robust. Currently doesn't handle range types - returns them as unknown. const pgTypeToTsType = ( + schema: PostgresSchema, pgType: string, { types, @@ -560,10 +610,16 @@ const pgTypeToTsType = ( } else if (pgType === 'record') { return 'Record' } else if (pgType.startsWith('_')) { - return `(${pgTypeToTsType(pgType.substring(1), { types, schemas, tables, views })})[]` + return `(${pgTypeToTsType(schema, pgType.substring(1), { + types, + schemas, + tables, + views, + })})[]` } else { - const enumType = types.find((type) => type.name === pgType && type.enums.length > 0) - if (enumType) { + const enumTypes = types.filter((type) => type.name === pgType && type.enums.length > 0) + if (enumTypes.length > 0) { + const enumType = enumTypes.find((type) => type.schema === schema.name) || enumTypes[0] if (schemas.some(({ name }) => name === enumType.schema)) { return `Database[${JSON.stringify(enumType.schema)}]['Enums'][${JSON.stringify( enumType.name @@ -572,8 +628,12 @@ const pgTypeToTsType = ( return enumType.enums.map((variant) => JSON.stringify(variant)).join('|') } - const compositeType = types.find((type) => type.name === pgType && type.attributes.length > 0) - if (compositeType) { + const compositeTypes = types.filter( + (type) => type.name === pgType && type.attributes.length > 0 + ) + if (compositeTypes.length > 0) { + const compositeType = + compositeTypes.find((type) => type.schema === schema.name) || compositeTypes[0] if (schemas.some(({ name }) => name === compositeType.schema)) { return `Database[${JSON.stringify( compositeType.schema @@ -582,8 +642,10 @@ const pgTypeToTsType = ( return 'unknown' } - const tableRowType = tables.find((table) => table.name === pgType) - if (tableRowType) { + const tableRowTypes = tables.filter((table) => table.name === pgType) + if (tableRowTypes.length > 0) { + const tableRowType = + tableRowTypes.find((type) => type.schema === schema.name) || tableRowTypes[0] if (schemas.some(({ name }) => name === tableRowType.schema)) { return `Database[${JSON.stringify(tableRowType.schema)}]['Tables'][${JSON.stringify( tableRowType.name @@ -592,8 +654,10 @@ const pgTypeToTsType = ( return 'unknown' } - const viewRowType = views.find((view) => view.name === pgType) - if (viewRowType) { + const viewRowTypes = views.filter((view) => view.name === pgType) + if (viewRowTypes.length > 0) { + const viewRowType = + viewRowTypes.find((type) => type.schema === schema.name) || viewRowTypes[0] if (schemas.some(({ name }) => name === viewRowType.schema)) { return `Database[${JSON.stringify(viewRowType.schema)}]['Views'][${JSON.stringify( viewRowType.name From fbdc28c9e8c1f71599931335107d8b61a2733856 Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 13 May 2025 15:13:43 +0200 Subject: [PATCH 02/72] chore: ignore sentryclirc --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index 8d050113..7a26dfc4 100644 --- a/.gitignore +++ b/.gitignore @@ -73,6 +73,9 @@ typings/ .env .env.test +# sentry cli config +.sentryclirc + # parcel-bundler cache (https://parceljs.org/) .cache From f58f5071e8ba2f47e72a23e15434ff490bad7374 Mon Sep 17 00:00:00 2001 From: avallete Date: Mon, 19 May 2025 15:50:21 +0200 Subject: [PATCH 03/72] fix(typescript): prefer current schema typescript typegen See: https://github.com/supabase/postgres-meta/commit/80db2de0d1e4d7478e0a7bb950c9b63fefd2840c Trigger new release for this fix From 54347546ac3502989193cef5cb84668ad78b37b6 Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 20 May 2025 13:11:29 +0200 Subject: [PATCH 04/72] fix(query): ensure that open connection are killed after timeout Without statement_timeout set, the query_timeout wont always kill the underlying database query connection leading to possible connections exhaustions --- package.json | 4 ++-- src/server/constants.ts | 3 +++ test/index.test.ts | 1 + test/server/query-timeout.ts | 33 +++++++++++++++++++++++++++++++++ 4 files changed, 39 insertions(+), 2 deletions(-) create mode 100644 test/server/query-timeout.ts diff --git a/package.json b/package.json index e521801c..570ada54 100644 --- a/package.json +++ b/package.json @@ -30,8 +30,8 @@ "test": "run-s db:clean db:run test:run db:clean", "db:clean": "cd test/db && docker compose down", "db:run": "cd test/db && docker compose up --detach --wait", - "test:run": "PG_META_MAX_RESULT_SIZE_MB=20 vitest run --coverage", - "test:update": "run-s db:clean db:run && PG_META_MAX_RESULT_SIZE_MB=20 vitest run --update && run-s db:clean" + "test:run": "PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=3 PG_CONN_TIMEOUT_SECS=30 vitest run --coverage", + "test:update": "run-s db:clean db:run && PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=3 PG_CONN_TIMEOUT_SECS=30 vitest run --update && run-s db:clean" }, "engines": { "node": ">=20", diff --git a/src/server/constants.ts b/src/server/constants.ts index 4d1965f9..731ca117 100644 --- a/src/server/constants.ts +++ b/src/server/constants.ts @@ -59,6 +59,9 @@ export const PG_META_MAX_RESULT_SIZE = process.env.PG_META_MAX_RESULT_SIZE_MB export const DEFAULT_POOL_CONFIG: PoolConfig = { max: 1, connectionTimeoutMillis: PG_CONN_TIMEOUT_SECS * 1000, + // node-postgrest need a statement_timeout to kill the connection when timeout is reached + // otherwise the query will keep running on the database even if query timeout was reached + statement_timeout: (PG_QUERY_TIMEOUT_SECS + 1) * 1000, query_timeout: PG_QUERY_TIMEOUT_SECS * 1000, ssl: PG_META_DB_SSL_ROOT_CERT ? { ca: PG_META_DB_SSL_ROOT_CERT } : undefined, application_name: `postgres-meta ${pkg.version}`, diff --git a/test/index.test.ts b/test/index.test.ts index 9a315921..6ca2b87e 100644 --- a/test/index.test.ts +++ b/test/index.test.ts @@ -23,3 +23,4 @@ import './server/ssl' import './server/table-privileges' import './server/typegen' import './server/result-size-limit' +import './server/query-timeout' diff --git a/test/server/query-timeout.ts b/test/server/query-timeout.ts new file mode 100644 index 00000000..c9064d00 --- /dev/null +++ b/test/server/query-timeout.ts @@ -0,0 +1,33 @@ +import { expect, test, describe } from 'vitest' +import { app } from './utils' +import { pgMeta } from '../lib/utils' + +describe('test query timeout', () => { + test('query timeout after 3s and connection cleanup', async () => { + const query = `SELECT pg_sleep(10);` + // Execute a query that will sleep for 10 seconds + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { + query, + }, + }) + + // Check that we get the proper timeout error response + expect(res.statusCode).toBe(408) // Request Timeout + expect(res.json()).toMatchObject({ + error: expect.stringContaining('Query read timeout'), + }) + // wait one second for the statement timeout to take effect + await new Promise((resolve) => setTimeout(resolve, 1000)) + + // Verify that the connection has been cleaned up by checking active connections + const connectionsRes = await pgMeta.query(` + SELECT * FROM pg_stat_activity where application_name = 'postgres-meta 0.0.0-automated' and query ILIKE '%${query}%'; + `) + + // Should have no active connections except for our current query + expect(connectionsRes.data).toHaveLength(0) + }, 5000) +}) From e41138a9c8843f9422a2eb2b926fb804cf4d1a82 Mon Sep 17 00:00:00 2001 From: avallete Date: Wed, 21 May 2025 14:55:40 +0200 Subject: [PATCH 05/72] fix: exclude pooler from statement_timeout --- src/lib/db.ts | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/lib/db.ts b/src/lib/db.ts index 7ac18783..1b42e538 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -80,6 +80,11 @@ export const init: (config: PoolConfig) => { u.searchParams.delete('sslrootcert') config.connectionString = u.toString() + // For pooler connections like pgbouncer, statement_timeout isn't supported + if (u.port !== '5432') { + config.statement_timeout = undefined + } + // sslmode: null, 'disable', 'prefer', 'require', 'verify-ca', 'verify-full', 'no-verify' // config.ssl: true, false, {} if (sslmode === null) { From 2fa2011dbb3c690388d763abf3fdb16168fef44b Mon Sep 17 00:00:00 2001 From: avallete Date: Thu, 22 May 2025 09:21:51 +0200 Subject: [PATCH 06/72] fix: set the statement_timeout at query level --- src/lib/db.ts | 17 +++++++++-------- src/server/constants.ts | 4 +--- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/src/lib/db.ts b/src/lib/db.ts index 1b42e538..f09b0bcb 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -2,6 +2,7 @@ import pg from 'pg' import * as Sentry from '@sentry/node' import { parse as parseArray } from 'postgres-array' import { PostgresMetaResult, PoolConfig } from './types.js' +import { PG_STATEMENT_TIMEOUT_SECS } from '../server/constants.js' pg.types.setTypeParser(pg.types.builtins.INT8, (x) => { const asNumber = Number(x) @@ -80,11 +81,6 @@ export const init: (config: PoolConfig) => { u.searchParams.delete('sslrootcert') config.connectionString = u.toString() - // For pooler connections like pgbouncer, statement_timeout isn't supported - if (u.port !== '5432') { - config.statement_timeout = undefined - } - // sslmode: null, 'disable', 'prefer', 'require', 'verify-ca', 'verify-full', 'no-verify' // config.ssl: true, false, {} if (sslmode === null) { @@ -117,10 +113,15 @@ export const init: (config: PoolConfig) => { attributes: { sql: trackQueryInSentry ? sql : 'custom' }, }, async () => { + // node-postgres need a statement_timeout to kill the connection when timeout is reached + // otherwise the query will keep running on the database even if query timeout was reached + // This need to be added at query and not connection level because poolers (pgbouncer) doesn't + // allow to set this parameter at connection time + const sqlWithStatementTimeout = `SET statement_timeout='${PG_STATEMENT_TIMEOUT_SECS}s';\n${sql}` try { if (!pool) { const pool = new pg.Pool(config) - let res = await poolerQueryHandleError(pool, sql) + let res = await poolerQueryHandleError(pool, sqlWithStatementTimeout) if (Array.isArray(res)) { res = res.reverse().find((x) => x.rows.length !== 0) ?? { rows: [] } } @@ -128,7 +129,7 @@ export const init: (config: PoolConfig) => { return { data: res.rows, error: null } } - let res = await poolerQueryHandleError(pool, sql) + let res = await poolerQueryHandleError(pool, sqlWithStatementTimeout) if (Array.isArray(res)) { res = res.reverse().find((x) => x.rows.length !== 0) ?? { rows: [] } } @@ -158,7 +159,7 @@ export const init: (config: PoolConfig) => { let lineNumber = 0 let lineOffset = 0 - const lines = sql.split('\n') + const lines = sqlWithStatementTimeout.split('\n') let currentOffset = 0 for (let i = 0; i < lines.length; i++) { if (currentOffset + lines[i].length > position) { diff --git a/src/server/constants.ts b/src/server/constants.ts index 731ca117..759aa8e0 100644 --- a/src/server/constants.ts +++ b/src/server/constants.ts @@ -17,6 +17,7 @@ const PG_META_DB_SSL_MODE = process.env.PG_META_DB_SSL_MODE || 'disable' const PG_CONN_TIMEOUT_SECS = Number(process.env.PG_CONN_TIMEOUT_SECS || 15) const PG_QUERY_TIMEOUT_SECS = Number(process.env.PG_QUERY_TIMEOUT_SECS || 55) +export const PG_STATEMENT_TIMEOUT_SECS = PG_QUERY_TIMEOUT_SECS + 1 export let PG_CONNECTION = process.env.PG_META_DB_URL if (!PG_CONNECTION) { @@ -59,9 +60,6 @@ export const PG_META_MAX_RESULT_SIZE = process.env.PG_META_MAX_RESULT_SIZE_MB export const DEFAULT_POOL_CONFIG: PoolConfig = { max: 1, connectionTimeoutMillis: PG_CONN_TIMEOUT_SECS * 1000, - // node-postgrest need a statement_timeout to kill the connection when timeout is reached - // otherwise the query will keep running on the database even if query timeout was reached - statement_timeout: (PG_QUERY_TIMEOUT_SECS + 1) * 1000, query_timeout: PG_QUERY_TIMEOUT_SECS * 1000, ssl: PG_META_DB_SSL_ROOT_CERT ? { ca: PG_META_DB_SSL_ROOT_CERT } : undefined, application_name: `postgres-meta ${pkg.version}`, From 18d61e21bcf813701c1acd41c6016e687962f3b3 Mon Sep 17 00:00:00 2001 From: avallete Date: Thu, 22 May 2025 10:54:35 +0200 Subject: [PATCH 07/72] fix: use query level statement timeout --- package.json | 4 +-- src/lib/db.ts | 12 ++++++-- test/server/query-timeout.ts | 50 ++++++++++++++++++-------------- test/server/result-size-limit.ts | 16 ++++++---- 4 files changed, 50 insertions(+), 32 deletions(-) diff --git a/package.json b/package.json index 570ada54..62315e9f 100644 --- a/package.json +++ b/package.json @@ -30,8 +30,8 @@ "test": "run-s db:clean db:run test:run db:clean", "db:clean": "cd test/db && docker compose down", "db:run": "cd test/db && docker compose up --detach --wait", - "test:run": "PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=3 PG_CONN_TIMEOUT_SECS=30 vitest run --coverage", - "test:update": "run-s db:clean db:run && PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=3 PG_CONN_TIMEOUT_SECS=30 vitest run --update && run-s db:clean" + "test:run": "PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=5 PG_CONN_TIMEOUT_SECS=30 vitest run --coverage", + "test:update": "run-s db:clean db:run && PG_META_MAX_RESULT_SIZE_MB=20 PG_QUERY_TIMEOUT_SECS=5 PG_CONN_TIMEOUT_SECS=30 vitest run --update && run-s db:clean" }, "engines": { "node": ">=20", diff --git a/src/lib/db.ts b/src/lib/db.ts index f09b0bcb..a1fe3591 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -4,6 +4,8 @@ import { parse as parseArray } from 'postgres-array' import { PostgresMetaResult, PoolConfig } from './types.js' import { PG_STATEMENT_TIMEOUT_SECS } from '../server/constants.js' +const STATEMENT_TIMEOUT_QUERY_PREFIX = `SET statement_timeout='${PG_STATEMENT_TIMEOUT_SECS}s';` + pg.types.setTypeParser(pg.types.builtins.INT8, (x) => { const asNumber = Number(x) if (Number.isSafeInteger(asNumber)) { @@ -117,7 +119,7 @@ export const init: (config: PoolConfig) => { // otherwise the query will keep running on the database even if query timeout was reached // This need to be added at query and not connection level because poolers (pgbouncer) doesn't // allow to set this parameter at connection time - const sqlWithStatementTimeout = `SET statement_timeout='${PG_STATEMENT_TIMEOUT_SECS}s';\n${sql}` + const sqlWithStatementTimeout = `${STATEMENT_TIMEOUT_QUERY_PREFIX}${sql}` try { if (!pool) { const pool = new pg.Pool(config) @@ -153,13 +155,17 @@ export const init: (config: PoolConfig) => { formattedError += '\n' if (error.position) { // error.position is 1-based - const position = Number(error.position) - 1 + // we also remove our `SET statement_timeout = 'XXs';\n` from the position + const position = + Number(error.position) - 1 - STATEMENT_TIMEOUT_QUERY_PREFIX.length + // we set the new error position + error.position = `${position + 1}` let line = '' let lineNumber = 0 let lineOffset = 0 - const lines = sqlWithStatementTimeout.split('\n') + const lines = sql.split('\n') let currentOffset = 0 for (let i = 0; i < lines.length; i++) { if (currentOffset + lines[i].length > position) { diff --git a/test/server/query-timeout.ts b/test/server/query-timeout.ts index c9064d00..3dc8010d 100644 --- a/test/server/query-timeout.ts +++ b/test/server/query-timeout.ts @@ -2,32 +2,38 @@ import { expect, test, describe } from 'vitest' import { app } from './utils' import { pgMeta } from '../lib/utils' +const TIMEOUT = (Number(process.env.PG_QUERY_TIMEOUT_SECS) ?? 10) + 2 + describe('test query timeout', () => { - test('query timeout after 3s and connection cleanup', async () => { - const query = `SELECT pg_sleep(10);` - // Execute a query that will sleep for 10 seconds - const res = await app.inject({ - method: 'POST', - path: '/query', - payload: { - query, - }, - }) + test( + `query timeout after ${TIMEOUT}s and connection cleanup`, + async () => { + const query = `SELECT pg_sleep(${TIMEOUT});` + // Execute a query that will sleep for 10 seconds + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { + query, + }, + }) - // Check that we get the proper timeout error response - expect(res.statusCode).toBe(408) // Request Timeout - expect(res.json()).toMatchObject({ - error: expect.stringContaining('Query read timeout'), - }) - // wait one second for the statement timeout to take effect - await new Promise((resolve) => setTimeout(resolve, 1000)) + // Check that we get the proper timeout error response + expect(res.statusCode).toBe(408) // Request Timeout + expect(res.json()).toMatchObject({ + error: expect.stringContaining('Query read timeout'), + }) + // wait one second for the statement timeout to take effect + await new Promise((resolve) => setTimeout(resolve, 1000)) - // Verify that the connection has been cleaned up by checking active connections - const connectionsRes = await pgMeta.query(` + // Verify that the connection has been cleaned up by checking active connections + const connectionsRes = await pgMeta.query(` SELECT * FROM pg_stat_activity where application_name = 'postgres-meta 0.0.0-automated' and query ILIKE '%${query}%'; `) - // Should have no active connections except for our current query - expect(connectionsRes.data).toHaveLength(0) - }, 5000) + // Should have no active connections except for our current query + expect(connectionsRes.data).toHaveLength(0) + }, + TIMEOUT * 1000 + ) }) diff --git a/test/server/result-size-limit.ts b/test/server/result-size-limit.ts index 15543d67..7dab1834 100644 --- a/test/server/result-size-limit.ts +++ b/test/server/result-size-limit.ts @@ -72,23 +72,29 @@ describe('test js parser error max result', () => { // Create a table with large data for testing beforeAll(async () => { // Create a table with a large text column - await pgMeta.query(` + await pgMeta.query( + ` CREATE TABLE very_large_data ( id SERIAL PRIMARY KEY, data TEXT ); - `) + `, + false + ) // Insert data that will exceed our limit in tests it's set around ~20MB - await pgMeta.query(` + await pgMeta.query( + ` INSERT INTO very_large_data (data) VALUES (repeat('x', 710 * 1024 * 1024)) -- 700+MB string will raise a JS exception at parse time - `) + `, + false + ) }) afterAll(async () => { // Clean up the test table - await pgMeta.query('DROP TABLE very_large_data;') + await pgMeta.query('DROP TABLE very_large_data;', false) }) test( From 4150b26d15f34073178b2362f3b47ff6a2c02cea Mon Sep 17 00:00:00 2001 From: avallete Date: Thu, 22 May 2025 11:04:15 +0200 Subject: [PATCH 08/72] chore: revert result-size test --- test/server/result-size-limit.ts | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/test/server/result-size-limit.ts b/test/server/result-size-limit.ts index 7dab1834..15543d67 100644 --- a/test/server/result-size-limit.ts +++ b/test/server/result-size-limit.ts @@ -72,29 +72,23 @@ describe('test js parser error max result', () => { // Create a table with large data for testing beforeAll(async () => { // Create a table with a large text column - await pgMeta.query( - ` + await pgMeta.query(` CREATE TABLE very_large_data ( id SERIAL PRIMARY KEY, data TEXT ); - `, - false - ) + `) // Insert data that will exceed our limit in tests it's set around ~20MB - await pgMeta.query( - ` + await pgMeta.query(` INSERT INTO very_large_data (data) VALUES (repeat('x', 710 * 1024 * 1024)) -- 700+MB string will raise a JS exception at parse time - `, - false - ) + `) }) afterAll(async () => { // Clean up the test table - await pgMeta.query('DROP TABLE very_large_data;', false) + await pgMeta.query('DROP TABLE very_large_data;') }) test( From 3d9fea49fdc8eb26815a7cf15b250336e8fd6c3e Mon Sep 17 00:00:00 2001 From: avallete Date: Thu, 22 May 2025 11:40:38 +0200 Subject: [PATCH 09/72] chore: fix secret module mock --- src/lib/secrets.ts | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/lib/secrets.ts b/src/lib/secrets.ts index 03df7639..c44578ec 100644 --- a/src/lib/secrets.ts +++ b/src/lib/secrets.ts @@ -1,6 +1,3 @@ -// Use dynamic import to support module mock -const fs = await import('node:fs/promises') - export const getSecret = async (key: string) => { if (!key) { return '' @@ -15,6 +12,8 @@ export const getSecret = async (key: string) => { if (!file) { return '' } + // Use dynamic import to support module mock + const fs = await import('node:fs/promises') return await fs.readFile(file, { encoding: 'utf8' }).catch((e) => { if (e.code == 'ENOENT') { From bf91a7e7c306723462a09349bbad8b566762ba93 Mon Sep 17 00:00:00 2001 From: avallete Date: Thu, 22 May 2025 16:01:26 +0200 Subject: [PATCH 10/72] feat(typegen): add postgrest_version parameter to typegen --- src/server/routes/generators/typescript.ts | 3 + src/server/templates/typescript.ts | 26 + test/server/typegen.ts | 653 +++++++++++++++++++++ 3 files changed, 682 insertions(+) diff --git a/src/server/routes/generators/typescript.ts b/src/server/routes/generators/typescript.ts index 3e615b32..259cd141 100644 --- a/src/server/routes/generators/typescript.ts +++ b/src/server/routes/generators/typescript.ts @@ -11,6 +11,7 @@ export default async (fastify: FastifyInstance) => { excluded_schemas?: string included_schemas?: string detect_one_to_one_relationships?: string + postgrest_version?: string } }>('/', async (request, reply) => { const config = createConnectionConfig(request) @@ -19,6 +20,7 @@ export default async (fastify: FastifyInstance) => { const includedSchemas = request.query.included_schemas?.split(',').map((schema) => schema.trim()) ?? [] const detectOneToOneRelationships = request.query.detect_one_to_one_relationships === 'true' + const postgrestVersion = request.query.postgrest_version const pgMeta: PostgresMeta = new PostgresMeta(config) const { data: generatorMeta, error: generatorMetaError } = await getGeneratorMetadata(pgMeta, { @@ -34,6 +36,7 @@ export default async (fastify: FastifyInstance) => { return applyTypescriptTemplate({ ...generatorMeta, detectOneToOneRelationships, + postgrestVersion, }) }) } diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 6e3fc750..5ebe9dc8 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -21,8 +21,10 @@ export const apply = async ({ functions, types, detectOneToOneRelationships, + postgrestVersion, }: GeneratorMetadata & { detectOneToOneRelationships: boolean + postgrestVersion?: string }): Promise => { const columnsByTableId = Object.fromEntries( [...tables, ...foreignTables, ...views, ...materializedViews].map((t) => [t.id, []]) @@ -32,6 +34,29 @@ export const apply = async ({ .sort(({ name: a }, { name: b }) => a.localeCompare(b)) .forEach((c) => columnsByTableId[c.table_id].push(c)) + const internal_supabase_schema = postgrestVersion + ? `// Allows to automatically instanciate createClient with right options + // instead of createClient(URL, KEY) + __internal_supabase: { + postgrestVersion: '${postgrestVersion}' + Tables: { + [_ in never]: never + } + Views: { + [_ in never]: never + } + Functions: { + [_ in never]: never + } + Enums: { + [_ in never]: never + } + CompositeTypes: { + [_ in never]: never + } + }` + : '' + let output = ` export type Json = string | number | boolean | null | { [key: string]: Json | undefined } | Json[] @@ -431,6 +456,7 @@ export type Database = { } }` })} + ${internal_supabase_schema} } type DefaultSchema = Database[Extract] diff --git a/test/server/typegen.ts b/test/server/typegen.ts index c0851ef1..9e534d78 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -1875,6 +1875,659 @@ test('typegen: typescript w/ one-to-one relationships', async () => { ) }) +test('typegen: typescript w/ postgrestVersion', async () => { + const { body } = await app.inject({ + method: 'GET', + path: '/generators/typescript', + query: { detect_one_to_one_relationships: 'true', postgrest_version: '13' }, + }) + expect(body).toMatchInlineSnapshot( + ` + "export type Json = + | string + | number + | boolean + | null + | { [key: string]: Json | undefined } + | Json[] + + export type Database = { + public: { + Tables: { + category: { + Row: { + id: number + name: string + } + Insert: { + id?: number + name: string + } + Update: { + id?: number + name?: string + } + Relationships: [] + } + empty: { + Row: {} + Insert: {} + Update: {} + Relationships: [] + } + foreign_table: { + Row: { + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + Insert: { + id: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Update: { + id?: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + memes: { + Row: { + category: number | null + created_at: string + id: number + metadata: Json | null + name: string + status: Database["public"]["Enums"]["meme_status"] | null + } + Insert: { + category?: number | null + created_at: string + id?: number + metadata?: Json | null + name: string + status?: Database["public"]["Enums"]["meme_status"] | null + } + Update: { + category?: number | null + created_at?: string + id?: number + metadata?: Json | null + name?: string + status?: Database["public"]["Enums"]["meme_status"] | null + } + Relationships: [ + { + foreignKeyName: "memes_category_fkey" + columns: ["category"] + isOneToOne: false + referencedRelation: "category" + referencedColumns: ["id"] + }, + ] + } + table_with_other_tables_row_type: { + Row: { + col1: Database["public"]["Tables"]["user_details"]["Row"] | null + col2: Database["public"]["Views"]["a_view"]["Row"] | null + } + Insert: { + col1?: Database["public"]["Tables"]["user_details"]["Row"] | null + col2?: Database["public"]["Views"]["a_view"]["Row"] | null + } + Update: { + col1?: Database["public"]["Tables"]["user_details"]["Row"] | null + col2?: Database["public"]["Views"]["a_view"]["Row"] | null + } + Relationships: [] + } + table_with_primary_key_other_than_id: { + Row: { + name: string | null + other_id: number + } + Insert: { + name?: string | null + other_id?: number + } + Update: { + name?: string | null + other_id?: number + } + Relationships: [] + } + todos: { + Row: { + details: string | null + id: number + "user-id": number + blurb: string | null + blurb_varchar: string | null + details_is_long: boolean | null + details_length: number | null + details_words: string[] | null + } + Insert: { + details?: string | null + id?: number + "user-id": number + } + Update: { + details?: string | null + id?: number + "user-id"?: number + } + Relationships: [ + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + user_details: { + Row: { + details: string | null + user_id: number + } + Insert: { + details?: string | null + user_id: number + } + Update: { + details?: string | null + user_id?: number + } + Relationships: [ + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + users: { + Row: { + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + Insert: { + id?: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Update: { + id?: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + users_audit: { + Row: { + created_at: string | null + id: number + previous_value: Json | null + user_id: number | null + } + Insert: { + created_at?: string | null + id?: number + previous_value?: Json | null + user_id?: number | null + } + Update: { + created_at?: string | null + id?: number + previous_value?: Json | null + user_id?: number | null + } + Relationships: [] + } + } + Views: { + a_view: { + Row: { + id: number | null + } + Insert: { + id?: number | null + } + Update: { + id?: number | null + } + Relationships: [] + } + todos_matview: { + Row: { + details: string | null + id: number | null + "user-id": number | null + } + Relationships: [ + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + todos_view: { + Row: { + details: string | null + id: number | null + "user-id": number | null + } + Insert: { + details?: string | null + id?: number | null + "user-id"?: number | null + } + Update: { + details?: string | null + id?: number | null + "user-id"?: number | null + } + Relationships: [ + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + users_view: { + Row: { + id: number | null + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + Insert: { + id?: number | null + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Update: { + id?: number | null + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + users_view_with_multiple_refs_to_users: { + Row: { + initial_id: number | null + initial_name: string | null + second_id: number | null + second_name: string | null + } + Relationships: [] + } + } + Functions: { + blurb: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: string + } + blurb_varchar: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: string + } + details_is_long: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: boolean + } + details_length: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: number + } + details_words: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: string[] + } + function_returning_row: { + Args: Record + Returns: { + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + } + function_returning_set_of_rows: { + Args: Record + Returns: { + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + }[] + } + function_returning_table: { + Args: Record + Returns: { + id: number + name: string + }[] + } + get_todos_setof_rows: { + Args: + | { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + | { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + } + get_user_audit_setof_single_row: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + created_at: string | null + id: number + previous_value: Json | null + user_id: number | null + }[] + } + polymorphic_function: { + Args: { "": boolean } | { "": string } + Returns: undefined + } + postgres_fdw_disconnect: { + Args: { "": string } + Returns: boolean + } + postgres_fdw_disconnect_all: { + Args: Record + Returns: boolean + } + postgres_fdw_get_connections: { + Args: Record + Returns: Record[] + } + postgres_fdw_handler: { + Args: Record + Returns: unknown + } + test_internal_query: { + Args: Record + Returns: undefined + } + } + Enums: { + meme_status: "new" | "old" | "retired" + user_status: "ACTIVE" | "INACTIVE" + } + CompositeTypes: { + composite_type_with_array_attribute: { + my_text_array: string[] | null + } + composite_type_with_record_attribute: { + todo: Database["public"]["Tables"]["todos"]["Row"] | null + } + } + } + // Allows to automatically instanciate createClient with right options + // instead of createClient(URL, KEY) + __internal_supabase: { + postgrestVersion: "13" + Tables: { + [_ in never]: never + } + Views: { + [_ in never]: never + } + Functions: { + [_ in never]: never + } + Enums: { + [_ in never]: never + } + CompositeTypes: { + [_ in never]: never + } + } + } + + type DefaultSchema = Database[Extract] + + export type Tables< + DefaultSchemaTableNameOrOptions extends + | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) + | { schema: keyof Database }, + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof Database + } + ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + : never = never, + > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } + ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + Row: infer R + } + ? R + : never + : DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] & + DefaultSchema["Views"]) + ? (DefaultSchema["Tables"] & + DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends { + Row: infer R + } + ? R + : never + : never + + export type TablesInsert< + DefaultSchemaTableNameOrOptions extends + | keyof DefaultSchema["Tables"] + | { schema: keyof Database }, + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof Database + } + ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + : never = never, + > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } + ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + Insert: infer I + } + ? I + : never + : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { + Insert: infer I + } + ? I + : never + : never + + export type TablesUpdate< + DefaultSchemaTableNameOrOptions extends + | keyof DefaultSchema["Tables"] + | { schema: keyof Database }, + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof Database + } + ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + : never = never, + > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } + ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + Update: infer U + } + ? U + : never + : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { + Update: infer U + } + ? U + : never + : never + + export type Enums< + DefaultSchemaEnumNameOrOptions extends + | keyof DefaultSchema["Enums"] + | { schema: keyof Database }, + EnumName extends DefaultSchemaEnumNameOrOptions extends { + schema: keyof Database + } + ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + : never = never, + > = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } + ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] + ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] + : never + + export type CompositeTypes< + PublicCompositeTypeNameOrOptions extends + | keyof DefaultSchema["CompositeTypes"] + | { schema: keyof Database }, + CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { + schema: keyof Database + } + ? keyof Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + : never = never, + > = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } + ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] + ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] + : never + + export const Constants = { + public: { + Enums: { + meme_status: ["new", "old", "retired"], + user_status: ["ACTIVE", "INACTIVE"], + }, + }, + } as const + " + ` + ) +}) + test('typegen: go', async () => { const { body } = await app.inject({ method: 'GET', path: '/generators/go' }) expect(body).toMatchInlineSnapshot(` From b45bb4431cb91fe51872af82d22ae93384ec9241 Mon Sep 17 00:00:00 2001 From: avallete Date: Thu, 22 May 2025 16:08:07 +0200 Subject: [PATCH 11/72] chore: add env parameter for cli --- src/server/constants.ts | 1 + src/server/server.ts | 2 ++ 2 files changed, 3 insertions(+) diff --git a/src/server/constants.ts b/src/server/constants.ts index 731ca117..8e6a1508 100644 --- a/src/server/constants.ts +++ b/src/server/constants.ts @@ -45,6 +45,7 @@ export const GENERATE_TYPES_DEFAULT_SCHEMA = process.env.PG_META_GENERATE_TYPES_DEFAULT_SCHEMA || 'public' export const GENERATE_TYPES_DETECT_ONE_TO_ONE_RELATIONSHIPS = process.env.PG_META_GENERATE_TYPES_DETECT_ONE_TO_ONE_RELATIONSHIPS === 'true' +export const POSTGREST_VERSION = process.env.PG_META_POSTGREST_VERSION export const GENERATE_TYPES_SWIFT_ACCESS_CONTROL = process.env .PG_META_GENERATE_TYPES_SWIFT_ACCESS_CONTROL ? (process.env.PG_META_GENERATE_TYPES_SWIFT_ACCESS_CONTROL as AccessControl) diff --git a/src/server/server.ts b/src/server/server.ts index 5396f9e0..9ac3152e 100644 --- a/src/server/server.ts +++ b/src/server/server.ts @@ -13,6 +13,7 @@ import { PG_CONNECTION, PG_META_HOST, PG_META_PORT, + POSTGREST_VERSION, } from './constants.js' import { apply as applyTypescriptTemplate } from './templates/typescript.js' import { apply as applyGoTemplate } from './templates/go.js' @@ -129,6 +130,7 @@ async function getTypeOutput(): Promise { ), types: types!, detectOneToOneRelationships: GENERATE_TYPES_DETECT_ONE_TO_ONE_RELATIONSHIPS, + postgresVersion: POSTGREST_VERSION, } switch (GENERATE_TYPES?.toLowerCase()) { From 6ca485028cc5cced9c131c5c2e108b18313f2327 Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 27 May 2025 18:18:35 +0200 Subject: [PATCH 12/72] chore: set statement_timeout as /query params --- src/lib/PostgresMeta.ts | 5 ++++- src/lib/db.ts | 21 +++++++++++++-------- src/server/routes/query.ts | 9 ++++++++- test/server/query-timeout.ts | 33 +++++++++++++++++++++++++++++++++ 4 files changed, 58 insertions(+), 10 deletions(-) diff --git a/src/lib/PostgresMeta.ts b/src/lib/PostgresMeta.ts index 379fbb23..91050383 100644 --- a/src/lib/PostgresMeta.ts +++ b/src/lib/PostgresMeta.ts @@ -22,7 +22,10 @@ import { init } from './db.js' import { PostgresMetaResult, PoolConfig } from './types.js' export default class PostgresMeta { - query: (sql: string, trackQueryInSentry?: boolean) => Promise> + query: ( + sql: string, + opts?: { statementQueryTimeout?: number; trackQueryInSentry?: boolean } + ) => Promise> end: () => Promise columnPrivileges: PostgresMetaColumnPrivileges columns: PostgresMetaColumns diff --git a/src/lib/db.ts b/src/lib/db.ts index a1fe3591..6f7e906a 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -2,9 +2,6 @@ import pg from 'pg' import * as Sentry from '@sentry/node' import { parse as parseArray } from 'postgres-array' import { PostgresMetaResult, PoolConfig } from './types.js' -import { PG_STATEMENT_TIMEOUT_SECS } from '../server/constants.js' - -const STATEMENT_TIMEOUT_QUERY_PREFIX = `SET statement_timeout='${PG_STATEMENT_TIMEOUT_SECS}s';` pg.types.setTypeParser(pg.types.builtins.INT8, (x) => { const asNumber = Number(x) @@ -65,7 +62,10 @@ const poolerQueryHandleError = (pgpool: pg.Pool, sql: string): Promise { - query: (sql: string, trackQueryInSentry?: boolean) => Promise> + query: ( + sql: string, + opts?: { statementQueryTimeout?: number; trackQueryInSentry?: boolean } + ) => Promise> end: () => Promise } = (config) => { return Sentry.startSpan({ op: 'db', name: 'db.init' }, () => { @@ -106,7 +106,10 @@ export const init: (config: PoolConfig) => { let pool: pg.Pool | null = new pg.Pool(config) return { - async query(sql, trackQueryInSentry = true) { + async query( + sql, + { statementQueryTimeout, trackQueryInSentry } = { trackQueryInSentry: true } + ) { return Sentry.startSpan( // For metrics purposes, log the query that will be run if it's not an user provided query (with possibly sentitives infos) { @@ -115,11 +118,14 @@ export const init: (config: PoolConfig) => { attributes: { sql: trackQueryInSentry ? sql : 'custom' }, }, async () => { + const statementTimeoutQueryPrefix = statementQueryTimeout + ? `SET statement_timeout='${statementQueryTimeout}s';` + : '' // node-postgres need a statement_timeout to kill the connection when timeout is reached // otherwise the query will keep running on the database even if query timeout was reached // This need to be added at query and not connection level because poolers (pgbouncer) doesn't // allow to set this parameter at connection time - const sqlWithStatementTimeout = `${STATEMENT_TIMEOUT_QUERY_PREFIX}${sql}` + const sqlWithStatementTimeout = `${statementTimeoutQueryPrefix}${sql}` try { if (!pool) { const pool = new pg.Pool(config) @@ -156,8 +162,7 @@ export const init: (config: PoolConfig) => { if (error.position) { // error.position is 1-based // we also remove our `SET statement_timeout = 'XXs';\n` from the position - const position = - Number(error.position) - 1 - STATEMENT_TIMEOUT_QUERY_PREFIX.length + const position = Number(error.position) - 1 - statementTimeoutQueryPrefix.length // we set the new error position error.position = `${position + 1}` diff --git a/src/server/routes/query.ts b/src/server/routes/query.ts index 21788ce8..c8f23bc9 100644 --- a/src/server/routes/query.ts +++ b/src/server/routes/query.ts @@ -19,11 +19,18 @@ export default async (fastify: FastifyInstance) => { Body: { query: string } + Querystring: { + statementTimeoutSecs?: number + } }>('/', async (request, reply) => { + const statementTimeoutSecs = request.query.statementTimeoutSecs errorOnEmptyQuery(request) const config = createConnectionConfig(request) const pgMeta = new PostgresMeta(config) - const { data, error } = await pgMeta.query(request.body.query, false) + const { data, error } = await pgMeta.query(request.body.query, { + trackQueryInSentry: true, + statementQueryTimeout: statementTimeoutSecs, + }) await pgMeta.end() if (error) { request.log.error({ error, request: extractRequestForLogging(request) }) diff --git a/test/server/query-timeout.ts b/test/server/query-timeout.ts index 3dc8010d..e41894fc 100644 --- a/test/server/query-timeout.ts +++ b/test/server/query-timeout.ts @@ -13,6 +13,7 @@ describe('test query timeout', () => { const res = await app.inject({ method: 'POST', path: '/query', + query: `statementTimeoutSecs=${TIMEOUT - 2}`, payload: { query, }, @@ -36,4 +37,36 @@ describe('test query timeout', () => { }, TIMEOUT * 1000 ) + + test( + 'query without timeout parameter should not have timeout', + async () => { + const query = `SELECT pg_sleep(${TIMEOUT});` + // Execute a query that will sleep for 10 seconds without specifying timeout + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { + query, + }, + }) + + // Check that we get the proper timeout error response + expect(res.statusCode).toBe(408) // Request Timeout + expect(res.json()).toMatchObject({ + error: expect.stringContaining('Query read timeout'), + }) + // wait one second + await new Promise((resolve) => setTimeout(resolve, 1000)) + + // Verify that the connection has not been cleaned up sinice there is no statementTimetout + const connectionsRes = await pgMeta.query(` + SELECT * FROM pg_stat_activity where application_name = 'postgres-meta 0.0.0-automated' and query ILIKE '%${query}%'; + `) + + // Should have no active connections except for our current query + expect(connectionsRes.data).toHaveLength(1) + }, + TIMEOUT * 1000 + ) }) From 9f728fef86288d5351f2bfa014ee71c5dbc4388e Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 27 May 2025 18:36:37 +0200 Subject: [PATCH 13/72] chore: add query timeout params test --- test/server/query-timeout.ts | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/test/server/query-timeout.ts b/test/server/query-timeout.ts index e41894fc..47554afc 100644 --- a/test/server/query-timeout.ts +++ b/test/server/query-timeout.ts @@ -3,17 +3,18 @@ import { app } from './utils' import { pgMeta } from '../lib/utils' const TIMEOUT = (Number(process.env.PG_QUERY_TIMEOUT_SECS) ?? 10) + 2 +const STATEMENT_TIMEOUT = (Number(process.env.PG_QUERY_TIMEOUT_SECS) ?? 10) + 1 describe('test query timeout', () => { test( `query timeout after ${TIMEOUT}s and connection cleanup`, async () => { - const query = `SELECT pg_sleep(${TIMEOUT});` + const query = `SELECT pg_sleep(${TIMEOUT + 10});` // Execute a query that will sleep for 10 seconds const res = await app.inject({ method: 'POST', path: '/query', - query: `statementTimeoutSecs=${TIMEOUT - 2}`, + query: `statementTimeoutSecs=${STATEMENT_TIMEOUT}`, payload: { query, }, @@ -41,7 +42,7 @@ describe('test query timeout', () => { test( 'query without timeout parameter should not have timeout', async () => { - const query = `SELECT pg_sleep(${TIMEOUT});` + const query = `SELECT pg_sleep(${TIMEOUT + 10});` // Execute a query that will sleep for 10 seconds without specifying timeout const res = await app.inject({ method: 'POST', From f183588ba5bbd516716d3ee417ba9f4a738d230e Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 27 May 2025 18:41:58 +0200 Subject: [PATCH 14/72] chore: cleanup --- src/server/constants.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/server/constants.ts b/src/server/constants.ts index 759aa8e0..4d1965f9 100644 --- a/src/server/constants.ts +++ b/src/server/constants.ts @@ -17,7 +17,6 @@ const PG_META_DB_SSL_MODE = process.env.PG_META_DB_SSL_MODE || 'disable' const PG_CONN_TIMEOUT_SECS = Number(process.env.PG_CONN_TIMEOUT_SECS || 15) const PG_QUERY_TIMEOUT_SECS = Number(process.env.PG_QUERY_TIMEOUT_SECS || 55) -export const PG_STATEMENT_TIMEOUT_SECS = PG_QUERY_TIMEOUT_SECS + 1 export let PG_CONNECTION = process.env.PG_META_DB_URL if (!PG_CONNECTION) { From ed47fd38bd1afecc4c5083bbd0b51ff70f7d5a55 Mon Sep 17 00:00:00 2001 From: avallete Date: Wed, 18 Jun 2025 11:55:27 +0200 Subject: [PATCH 15/72] chore: fix typo --- src/server/server.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/server/server.ts b/src/server/server.ts index 9ac3152e..8b7c1c10 100644 --- a/src/server/server.ts +++ b/src/server/server.ts @@ -130,7 +130,7 @@ async function getTypeOutput(): Promise { ), types: types!, detectOneToOneRelationships: GENERATE_TYPES_DETECT_ONE_TO_ONE_RELATIONSHIPS, - postgresVersion: POSTGREST_VERSION, + postgrestVersion: POSTGREST_VERSION, } switch (GENERATE_TYPES?.toLowerCase()) { From 2d4d29e2c7acfd38f3b4b67e0542dfca6b9b1d72 Mon Sep 17 00:00:00 2001 From: Copple <10214025+kiwicopple@users.noreply.github.com> Date: Mon, 30 Jun 2025 04:56:51 +0200 Subject: [PATCH 16/72] chore: remove sponsorship ask (#954) --- README.md | 6 ------ 1 file changed, 6 deletions(-) diff --git a/README.md b/README.md index da8c787d..dd73028a 100644 --- a/README.md +++ b/README.md @@ -116,9 +116,3 @@ To use your own database connection string instead of the provided test database Apache 2.0 -## Sponsors - -We are building the features of Firebase using enterprise-grade, open source products. We support existing communities wherever possible, and if the products don’t exist we build them and open source them ourselves. - -[![New Sponsor](https://user-images.githubusercontent.com/10214025/90518111-e74bbb00-e198-11ea-8f88-c9e3c1aa4b5b.png)](https://github.com/sponsors/supabase) - From eb600b8d1b022da1aecace50753a8e7ef370d0e0 Mon Sep 17 00:00:00 2001 From: avallete Date: Wed, 2 Jul 2025 15:31:41 +0200 Subject: [PATCH 17/72] chore: use CamelCasing convention --- src/server/templates/typescript.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 5ebe9dc8..e0aff414 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -36,9 +36,9 @@ export const apply = async ({ const internal_supabase_schema = postgrestVersion ? `// Allows to automatically instanciate createClient with right options - // instead of createClient(URL, KEY) - __internal_supabase: { - postgrestVersion: '${postgrestVersion}' + // instead of createClient(URL, KEY) + __InternalSupabase: { + PostgrestVersion: '${postgrestVersion}' Tables: { [_ in never]: never } From c01b4b84bc235ff998fe58d71bfc996650c0beab Mon Sep 17 00:00:00 2001 From: avallete Date: Wed, 2 Jul 2025 15:54:40 +0200 Subject: [PATCH 18/72] chore: use CamelCase --- test/server/typegen.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 9e534d78..8c2ef629 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -2389,9 +2389,9 @@ test('typegen: typescript w/ postgrestVersion', async () => { } } // Allows to automatically instanciate createClient with right options - // instead of createClient(URL, KEY) - __internal_supabase: { - postgrestVersion: "13" + // instead of createClient(URL, KEY) + __InternalSupabase: { + PostgrestVersion: "13" Tables: { [_ in never]: never } From 26be3a2d374902960ad99469e340ab62fa00b645 Mon Sep 17 00:00:00 2001 From: Bobbie Soedirgo Date: Fri, 27 Jun 2025 18:04:39 +0800 Subject: [PATCH 19/72] chore: remove extra props from __internal_supabase --- src/server/templates/typescript.ts | 127 +++++++++++++---------------- 1 file changed, 56 insertions(+), 71 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index e0aff414..29c34c1a 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -39,21 +39,6 @@ export const apply = async ({ // instead of createClient(URL, KEY) __InternalSupabase: { PostgrestVersion: '${postgrestVersion}' - Tables: { - [_ in never]: never - } - Views: { - [_ in never]: never - } - Functions: { - [_ in never]: never - } - Enums: { - [_ in never]: never - } - CompositeTypes: { - [_ in never]: never - } }` : '' @@ -61,6 +46,7 @@ export const apply = async ({ export type Json = string | number | boolean | null | { [key: string]: Json | undefined } | Json[] export type Database = { + ${internal_supabase_schema} ${schemas .sort(({ name: a }, { name: b }) => a.localeCompare(b)) .map((schema) => { @@ -456,113 +442,112 @@ export type Database = { } }` })} - ${internal_supabase_schema} } -type DefaultSchema = Database[Extract] +type DatabaseWithoutInternals = Omit + +type DefaultSchema = DatabaseWithoutInternals[Extract] export type Tables< DefaultSchemaTableNameOrOptions extends | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) - : never = never, -> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + : never = never +> = DefaultSchemaTableNameOrOptions extends { schema: keyof DatabaseWithoutInternals } + ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { Row: infer R } ? R : never - : DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] & - DefaultSchema["Views"]) - ? (DefaultSchema["Tables"] & - DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends { - Row: infer R - } - ? R - : never + : DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) + ? (DefaultSchema["Tables"] & DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends { + Row: infer R + } + ? R : never + : never export type TablesInsert< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] - : never = never, -> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + : never = never +> = DefaultSchemaTableNameOrOptions extends { schema: keyof DatabaseWithoutInternals } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Insert: infer I } ? I : never : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] - ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { - Insert: infer I - } - ? I - : never + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { + Insert: infer I + } + ? I : never + : never export type TablesUpdate< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] - : never = never, -> = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + : never = never +> = DefaultSchemaTableNameOrOptions extends { schema: keyof DatabaseWithoutInternals } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Update: infer U } ? U : never : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] - ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { - Update: infer U - } - ? U - : never + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { + Update: infer U + } + ? U : never + : never export type Enums< DefaultSchemaEnumNameOrOptions extends | keyof DefaultSchema["Enums"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, EnumName extends DefaultSchemaEnumNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] - : never = never, -> = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + : never = never +> = DefaultSchemaEnumNameOrOptions extends { schema: keyof DatabaseWithoutInternals } + ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] - ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] - : never + ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] + : never export type CompositeTypes< PublicCompositeTypeNameOrOptions extends | keyof DefaultSchema["CompositeTypes"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] - : never = never, -> = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } - ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + : never = never +> = PublicCompositeTypeNameOrOptions extends { schema: keyof DatabaseWithoutInternals } + ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] - ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] - : never + ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] + : never export const Constants = { ${schemas From be5a28ddfba748e6ec9323efd058a0234c73321a Mon Sep 17 00:00:00 2001 From: avallete Date: Wed, 2 Jul 2025 16:10:27 +0200 Subject: [PATCH 20/72] chore: update test snapshot --- test/server/typegen.ts | 297 +++++++++++++++++++++++------------------ 1 file changed, 165 insertions(+), 132 deletions(-) diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 8c2ef629..fa47cbec 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -491,21 +491,25 @@ test('typegen: typescript', async () => { } } - type DefaultSchema = Database[Extract] + type DatabaseWithoutInternals = Omit + + type DefaultSchema = DatabaseWithoutInternals[Extract] export type Tables< DefaultSchemaTableNameOrOptions extends | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { Row: infer R } ? R @@ -523,14 +527,16 @@ test('typegen: typescript', async () => { export type TablesInsert< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Insert: infer I } ? I @@ -546,14 +552,16 @@ test('typegen: typescript', async () => { export type TablesUpdate< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Update: infer U } ? U @@ -569,14 +577,16 @@ test('typegen: typescript', async () => { export type Enums< DefaultSchemaEnumNameOrOptions extends | keyof DefaultSchema["Enums"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, EnumName extends DefaultSchemaEnumNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] : never = never, - > = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + > = DefaultSchemaEnumNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] : never @@ -584,14 +594,16 @@ test('typegen: typescript', async () => { export type CompositeTypes< PublicCompositeTypeNameOrOptions extends | keyof DefaultSchema["CompositeTypes"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] : never = never, - > = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } - ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + > = PublicCompositeTypeNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] : never @@ -1124,21 +1136,25 @@ test('typegen w/ one-to-one relationships', async () => { } } - type DefaultSchema = Database[Extract] + type DatabaseWithoutInternals = Omit + + type DefaultSchema = DatabaseWithoutInternals[Extract] export type Tables< DefaultSchemaTableNameOrOptions extends | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { Row: infer R } ? R @@ -1156,14 +1172,16 @@ test('typegen w/ one-to-one relationships', async () => { export type TablesInsert< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Insert: infer I } ? I @@ -1179,14 +1197,16 @@ test('typegen w/ one-to-one relationships', async () => { export type TablesUpdate< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Update: infer U } ? U @@ -1202,14 +1222,16 @@ test('typegen w/ one-to-one relationships', async () => { export type Enums< DefaultSchemaEnumNameOrOptions extends | keyof DefaultSchema["Enums"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, EnumName extends DefaultSchemaEnumNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] : never = never, - > = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + > = DefaultSchemaEnumNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] : never @@ -1217,14 +1239,16 @@ test('typegen w/ one-to-one relationships', async () => { export type CompositeTypes< PublicCompositeTypeNameOrOptions extends | keyof DefaultSchema["CompositeTypes"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] : never = never, - > = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } - ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + > = PublicCompositeTypeNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] : never @@ -1757,21 +1781,25 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } } - type DefaultSchema = Database[Extract] + type DatabaseWithoutInternals = Omit + + type DefaultSchema = DatabaseWithoutInternals[Extract] export type Tables< DefaultSchemaTableNameOrOptions extends | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { Row: infer R } ? R @@ -1789,14 +1817,16 @@ test('typegen: typescript w/ one-to-one relationships', async () => { export type TablesInsert< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Insert: infer I } ? I @@ -1812,14 +1842,16 @@ test('typegen: typescript w/ one-to-one relationships', async () => { export type TablesUpdate< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Update: infer U } ? U @@ -1835,14 +1867,16 @@ test('typegen: typescript w/ one-to-one relationships', async () => { export type Enums< DefaultSchemaEnumNameOrOptions extends | keyof DefaultSchema["Enums"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, EnumName extends DefaultSchemaEnumNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] : never = never, - > = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + > = DefaultSchemaEnumNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] : never @@ -1850,14 +1884,16 @@ test('typegen: typescript w/ one-to-one relationships', async () => { export type CompositeTypes< PublicCompositeTypeNameOrOptions extends | keyof DefaultSchema["CompositeTypes"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] : never = never, - > = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } - ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + > = PublicCompositeTypeNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] : never @@ -1892,6 +1928,11 @@ test('typegen: typescript w/ postgrestVersion', async () => { | Json[] export type Database = { + // Allows to automatically instanciate createClient with right options + // instead of createClient(URL, KEY) + __InternalSupabase: { + PostgrestVersion: "13" + } public: { Tables: { category: { @@ -2388,43 +2429,27 @@ test('typegen: typescript w/ postgrestVersion', async () => { } } } - // Allows to automatically instanciate createClient with right options - // instead of createClient(URL, KEY) - __InternalSupabase: { - PostgrestVersion: "13" - Tables: { - [_ in never]: never - } - Views: { - [_ in never]: never - } - Functions: { - [_ in never]: never - } - Enums: { - [_ in never]: never - } - CompositeTypes: { - [_ in never]: never - } - } } - type DefaultSchema = Database[Extract] + type DatabaseWithoutInternals = Omit + + type DefaultSchema = DatabaseWithoutInternals[Extract] export type Tables< DefaultSchemaTableNameOrOptions extends | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? (Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - Database[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { Row: infer R } ? R @@ -2442,14 +2467,16 @@ test('typegen: typescript w/ postgrestVersion', async () => { export type TablesInsert< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Insert: infer I } ? I @@ -2465,14 +2492,16 @@ test('typegen: typescript w/ postgrestVersion', async () => { export type TablesUpdate< DefaultSchemaTableNameOrOptions extends | keyof DefaultSchema["Tables"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] : never = never, - > = DefaultSchemaTableNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { Update: infer U } ? U @@ -2488,14 +2517,16 @@ test('typegen: typescript w/ postgrestVersion', async () => { export type Enums< DefaultSchemaEnumNameOrOptions extends | keyof DefaultSchema["Enums"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, EnumName extends DefaultSchemaEnumNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] : never = never, - > = DefaultSchemaEnumNameOrOptions extends { schema: keyof Database } - ? Database[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + > = DefaultSchemaEnumNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] : never @@ -2503,14 +2534,16 @@ test('typegen: typescript w/ postgrestVersion', async () => { export type CompositeTypes< PublicCompositeTypeNameOrOptions extends | keyof DefaultSchema["CompositeTypes"] - | { schema: keyof Database }, + | { schema: keyof DatabaseWithoutInternals }, CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { - schema: keyof Database + schema: keyof DatabaseWithoutInternals } - ? keyof Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] : never = never, - > = PublicCompositeTypeNameOrOptions extends { schema: keyof Database } - ? Database[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + > = PublicCompositeTypeNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] : never From 5744206a7aa0bd70001100ab6cc2376cca5d32c7 Mon Sep 17 00:00:00 2001 From: avallete Date: Wed, 2 Jul 2025 17:22:32 +0200 Subject: [PATCH 21/72] feat(query): add idle_session_timeout for idle session auto-close --- src/lib/db.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/lib/db.ts b/src/lib/db.ts index 6f7e906a..263be4d8 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -118,8 +118,10 @@ export const init: (config: PoolConfig) => { attributes: { sql: trackQueryInSentry ? sql : 'custom' }, }, async () => { + // Use statement_timeout AND idle_session_timeout to ensure the connection will be killed even if idle after + // timeout time. const statementTimeoutQueryPrefix = statementQueryTimeout - ? `SET statement_timeout='${statementQueryTimeout}s';` + ? `SET statement_timeout='${statementQueryTimeout}s'; SET idle_session_timeout='${statementQueryTimeout}s';` : '' // node-postgres need a statement_timeout to kill the connection when timeout is reached // otherwise the query will keep running on the database even if query timeout was reached From 003391e58afdb4afeebcf82d3d886d99ec50240a Mon Sep 17 00:00:00 2001 From: "Siddharth M. Bhatia" Date: Fri, 11 Jul 2025 20:05:48 -0700 Subject: [PATCH 22/72] fix: Add 'case' to list of Swift keywords (#956) --- src/server/templates/swift.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/server/templates/swift.ts b/src/server/templates/swift.ts index fee24297..e596610e 100644 --- a/src/server/templates/swift.ts +++ b/src/server/templates/swift.ts @@ -392,7 +392,7 @@ function formatForSwiftTypeName(name: string): string { ) } -const SWIFT_KEYWORDS = ['in', 'default'] +const SWIFT_KEYWORDS = ['in', 'default', 'case'] /** * Converts a Postgres name to pascalCase. From dd3e9adb08a68ddbcae2e87e181866e4bd839dc8 Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 22 Jul 2025 12:11:34 +0200 Subject: [PATCH 23/72] fix(server): bump bodyLimit to 3MB default allow parameterize --- src/server/app.ts | 9 +++++++-- src/server/constants.ts | 5 +++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/server/app.ts b/src/server/app.ts index 8efa733c..9df05341 100644 --- a/src/server/app.ts +++ b/src/server/app.ts @@ -3,14 +3,19 @@ import * as Sentry from '@sentry/node' import cors from '@fastify/cors' import swagger from '@fastify/swagger' import { fastify, FastifyInstance, FastifyServerOptions } from 'fastify' -import { PG_META_REQ_HEADER } from './constants.js' +import { PG_META_REQ_HEADER, MAX_BODY_LIMIT } from './constants.js' import routes from './routes/index.js' import { extractRequestForLogging } from './utils.js' // Pseudo package declared only for this module import pkg from '#package.json' with { type: 'json' } export const build = (opts: FastifyServerOptions = {}): FastifyInstance => { - const app = fastify({ disableRequestLogging: true, requestIdHeader: PG_META_REQ_HEADER, ...opts }) + const app = fastify({ + disableRequestLogging: true, + requestIdHeader: PG_META_REQ_HEADER, + bodyLimit: MAX_BODY_LIMIT, + ...opts, + }) Sentry.setupFastifyErrorHandler(app) app.setErrorHandler((error, request, reply) => { diff --git a/src/server/constants.ts b/src/server/constants.ts index 8bf66417..9354c59f 100644 --- a/src/server/constants.ts +++ b/src/server/constants.ts @@ -57,6 +57,11 @@ export const PG_META_MAX_RESULT_SIZE = process.env.PG_META_MAX_RESULT_SIZE_MB parseInt(process.env.PG_META_MAX_RESULT_SIZE_MB, 10) * 1024 * 1024 : 2 * 1024 * 1024 * 1024 // default to 2GB max query size result +export const MAX_BODY_LIMIT = process.env.PG_META_MAX_BODY_LIMIT_MB + ? // Fastify server max body size allowed, is in bytes, convert from MB to Bytes + parseInt(process.env.PG_META_MAX_BODY_LIMIT_MB, 10) * 1024 * 1024 + : 3 * 1024 * 1024 + export const DEFAULT_POOL_CONFIG: PoolConfig = { max: 1, connectionTimeoutMillis: PG_CONN_TIMEOUT_SECS * 1000, From e583d438a70a99d11c91d71fcf633a8721a3fe9a Mon Sep 17 00:00:00 2001 From: Josh O'Steen <4296435+im-jersh@users.noreply.github.com> Date: Mon, 28 Jul 2025 07:47:25 -0700 Subject: [PATCH 24/72] fix(typegen): Map postgres numeric type to Swift Decimal type (#960) --- src/server/templates/swift.ts | 2 + test/db/00-init.sql | 3 +- test/lib/tables.ts | 18 ++ test/server/query.ts | 2 + test/server/typegen.ts | 436 +++++++++++++++++++--------------- 5 files changed, 268 insertions(+), 193 deletions(-) diff --git a/src/server/templates/swift.ts b/src/server/templates/swift.ts index e596610e..7bb41207 100644 --- a/src/server/templates/swift.ts +++ b/src/server/templates/swift.ts @@ -309,6 +309,8 @@ const pgTypeToSwiftType = ( swiftType = 'Float' } else if (pgType === 'float8') { swiftType = 'Double' + } else if (['numeric', 'decimal'].includes(pgType)) { + swiftType = 'Decimal' } else if (pgType === 'uuid') { swiftType = 'UUID' } else if ( diff --git a/test/db/00-init.sql b/test/db/00-init.sql index 00c6a472..3551a4e7 100644 --- a/test/db/00-init.sql +++ b/test/db/00-init.sql @@ -8,7 +8,8 @@ CREATE TYPE composite_type_with_array_attribute AS (my_text_array text[]); CREATE TABLE public.users ( id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, name text, - status user_status DEFAULT 'ACTIVE' + status user_status DEFAULT 'ACTIVE', + decimal numeric ); INSERT INTO public.users (name) diff --git a/test/lib/tables.ts b/test/lib/tables.ts index c4c934e7..c35546b8 100644 --- a/test/lib/tables.ts +++ b/test/lib/tables.ts @@ -78,6 +78,24 @@ test('list', async () => { "schema": "public", "table": "users", }, + { + "check": null, + "comment": null, + "data_type": "numeric", + "default_value": null, + "enums": [], + "format": "numeric", + "identity_generation": null, + "is_generated": false, + "is_identity": false, + "is_nullable": true, + "is_unique": false, + "is_updatable": true, + "name": "decimal", + "ordinal_position": 4, + "schema": "public", + "table": "users", + }, { "check": null, "comment": null, diff --git a/test/server/query.ts b/test/server/query.ts index 2b4bc2ba..8a9d6076 100644 --- a/test/server/query.ts +++ b/test/server/query.ts @@ -10,11 +10,13 @@ test('query', async () => { expect(res.json()).toMatchInlineSnapshot(` [ { + "decimal": null, "id": 1, "name": "Joe Bloggs", "status": "ACTIVE", }, { + "decimal": null, "id": 2, "name": "Jane Doe", "status": "ACTIVE", diff --git a/test/server/typegen.ts b/test/server/typegen.ts index fa47cbec..87996416 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -221,16 +221,19 @@ test('typegen: typescript', async () => { } users: { Row: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -362,16 +365,19 @@ test('typegen: typescript', async () => { } users_view: { Row: { + decimal: number | null id: number | null name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -412,6 +418,7 @@ test('typegen: typescript', async () => { function_returning_row: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -420,6 +427,7 @@ test('typegen: typescript', async () => { function_returning_set_of_rows: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -856,16 +864,19 @@ test('typegen w/ one-to-one relationships', async () => { } users: { Row: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -1007,16 +1018,19 @@ test('typegen w/ one-to-one relationships', async () => { } users_view: { Row: { + decimal: number | null id: number | null name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -1057,6 +1071,7 @@ test('typegen w/ one-to-one relationships', async () => { function_returning_row: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -1065,6 +1080,7 @@ test('typegen w/ one-to-one relationships', async () => { function_returning_set_of_rows: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -1501,16 +1517,19 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } users: { Row: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -1652,16 +1671,19 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } users_view: { Row: { + decimal: number | null id: number | null name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -1702,6 +1724,7 @@ test('typegen: typescript w/ one-to-one relationships', async () => { function_returning_row: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -1710,6 +1733,7 @@ test('typegen: typescript w/ one-to-one relationships', async () => { function_returning_set_of_rows: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -2151,16 +2175,19 @@ test('typegen: typescript w/ postgrestVersion', async () => { } users: { Row: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -2302,16 +2329,19 @@ test('typegen: typescript w/ postgrestVersion', async () => { } users_view: { Row: { + decimal: number | null id: number | null name: string | null status: Database["public"]["Enums"]["user_status"] | null } Insert: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null } Update: { + decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null @@ -2352,6 +2382,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { function_returning_row: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -2360,6 +2391,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { function_returning_set_of_rows: { Args: Record Returns: { + decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null @@ -2566,198 +2598,202 @@ test('typegen: go', async () => { expect(body).toMatchInlineSnapshot(` "package database -type PublicUsersSelect struct { - Id int64 \`json:"id"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicUsersInsert struct { - Id *int64 \`json:"id"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicUsersUpdate struct { - Id *int64 \`json:"id"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicTodosSelect struct { - Details *string \`json:"details"\` - Id int64 \`json:"id"\` - UserId int64 \`json:"user-id"\` -} - -type PublicTodosInsert struct { - Details *string \`json:"details"\` - Id *int64 \`json:"id"\` - UserId int64 \`json:"user-id"\` -} - -type PublicTodosUpdate struct { - Details *string \`json:"details"\` - Id *int64 \`json:"id"\` - UserId *int64 \`json:"user-id"\` -} - -type PublicUsersAuditSelect struct { - CreatedAt *string \`json:"created_at"\` - Id int64 \`json:"id"\` - PreviousValue interface{} \`json:"previous_value"\` - UserId *int64 \`json:"user_id"\` -} - -type PublicUsersAuditInsert struct { - CreatedAt *string \`json:"created_at"\` - Id *int64 \`json:"id"\` - PreviousValue interface{} \`json:"previous_value"\` - UserId *int64 \`json:"user_id"\` -} - -type PublicUsersAuditUpdate struct { - CreatedAt *string \`json:"created_at"\` - Id *int64 \`json:"id"\` - PreviousValue interface{} \`json:"previous_value"\` - UserId *int64 \`json:"user_id"\` -} - -type PublicUserDetailsSelect struct { - Details *string \`json:"details"\` - UserId int64 \`json:"user_id"\` -} - -type PublicUserDetailsInsert struct { - Details *string \`json:"details"\` - UserId int64 \`json:"user_id"\` -} - -type PublicUserDetailsUpdate struct { - Details *string \`json:"details"\` - UserId *int64 \`json:"user_id"\` -} - -type PublicEmptySelect struct { - -} - -type PublicEmptyInsert struct { - -} - -type PublicEmptyUpdate struct { - -} - -type PublicTableWithOtherTablesRowTypeSelect struct { - Col1 interface{} \`json:"col1"\` - Col2 interface{} \`json:"col2"\` -} - -type PublicTableWithOtherTablesRowTypeInsert struct { - Col1 interface{} \`json:"col1"\` - Col2 interface{} \`json:"col2"\` -} - -type PublicTableWithOtherTablesRowTypeUpdate struct { - Col1 interface{} \`json:"col1"\` - Col2 interface{} \`json:"col2"\` -} - -type PublicTableWithPrimaryKeyOtherThanIdSelect struct { - Name *string \`json:"name"\` - OtherId int64 \`json:"other_id"\` -} - -type PublicTableWithPrimaryKeyOtherThanIdInsert struct { - Name *string \`json:"name"\` - OtherId *int64 \`json:"other_id"\` -} - -type PublicTableWithPrimaryKeyOtherThanIdUpdate struct { - Name *string \`json:"name"\` - OtherId *int64 \`json:"other_id"\` -} - -type PublicCategorySelect struct { - Id int32 \`json:"id"\` - Name string \`json:"name"\` -} - -type PublicCategoryInsert struct { - Id *int32 \`json:"id"\` - Name string \`json:"name"\` -} - -type PublicCategoryUpdate struct { - Id *int32 \`json:"id"\` - Name *string \`json:"name"\` -} - -type PublicMemesSelect struct { - Category *int32 \`json:"category"\` - CreatedAt string \`json:"created_at"\` - Id int32 \`json:"id"\` - Metadata interface{} \`json:"metadata"\` - Name string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicMemesInsert struct { - Category *int32 \`json:"category"\` - CreatedAt string \`json:"created_at"\` - Id *int32 \`json:"id"\` - Metadata interface{} \`json:"metadata"\` - Name string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicMemesUpdate struct { - Category *int32 \`json:"category"\` - CreatedAt *string \`json:"created_at"\` - Id *int32 \`json:"id"\` - Metadata interface{} \`json:"metadata"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicTodosViewSelect struct { - Details *string \`json:"details"\` - Id *int64 \`json:"id"\` - UserId *int64 \`json:"user-id"\` -} - -type PublicUsersViewSelect struct { - Id *int64 \`json:"id"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` -} - -type PublicAViewSelect struct { - Id *int64 \`json:"id"\` -} - -type PublicUsersViewWithMultipleRefsToUsersSelect struct { - InitialId *int64 \`json:"initial_id"\` - InitialName *string \`json:"initial_name"\` - SecondId *int64 \`json:"second_id"\` - SecondName *string \`json:"second_name"\` -} - -type PublicTodosMatviewSelect struct { - Details *string \`json:"details"\` - Id *int64 \`json:"id"\` - UserId *int64 \`json:"user-id"\` -} - -type PublicCompositeTypeWithArrayAttribute struct { - MyTextArray interface{} \`json:"my_text_array"\` -} - -type PublicCompositeTypeWithRecordAttribute struct { - Todo interface{} \`json:"todo"\` -}" + type PublicUsersSelect struct { + Decimal *float64 \`json:"decimal"\` + Id int64 \`json:"id"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicUsersInsert struct { + Decimal *float64 \`json:"decimal"\` + Id *int64 \`json:"id"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicUsersUpdate struct { + Decimal *float64 \`json:"decimal"\` + Id *int64 \`json:"id"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicTodosSelect struct { + Details *string \`json:"details"\` + Id int64 \`json:"id"\` + UserId int64 \`json:"user-id"\` + } + + type PublicTodosInsert struct { + Details *string \`json:"details"\` + Id *int64 \`json:"id"\` + UserId int64 \`json:"user-id"\` + } + + type PublicTodosUpdate struct { + Details *string \`json:"details"\` + Id *int64 \`json:"id"\` + UserId *int64 \`json:"user-id"\` + } + + type PublicUsersAuditSelect struct { + CreatedAt *string \`json:"created_at"\` + Id int64 \`json:"id"\` + PreviousValue interface{} \`json:"previous_value"\` + UserId *int64 \`json:"user_id"\` + } + + type PublicUsersAuditInsert struct { + CreatedAt *string \`json:"created_at"\` + Id *int64 \`json:"id"\` + PreviousValue interface{} \`json:"previous_value"\` + UserId *int64 \`json:"user_id"\` + } + + type PublicUsersAuditUpdate struct { + CreatedAt *string \`json:"created_at"\` + Id *int64 \`json:"id"\` + PreviousValue interface{} \`json:"previous_value"\` + UserId *int64 \`json:"user_id"\` + } + + type PublicUserDetailsSelect struct { + Details *string \`json:"details"\` + UserId int64 \`json:"user_id"\` + } + + type PublicUserDetailsInsert struct { + Details *string \`json:"details"\` + UserId int64 \`json:"user_id"\` + } + + type PublicUserDetailsUpdate struct { + Details *string \`json:"details"\` + UserId *int64 \`json:"user_id"\` + } + + type PublicEmptySelect struct { + + } + + type PublicEmptyInsert struct { + + } + + type PublicEmptyUpdate struct { + + } + + type PublicTableWithOtherTablesRowTypeSelect struct { + Col1 interface{} \`json:"col1"\` + Col2 interface{} \`json:"col2"\` + } + + type PublicTableWithOtherTablesRowTypeInsert struct { + Col1 interface{} \`json:"col1"\` + Col2 interface{} \`json:"col2"\` + } + + type PublicTableWithOtherTablesRowTypeUpdate struct { + Col1 interface{} \`json:"col1"\` + Col2 interface{} \`json:"col2"\` + } + + type PublicTableWithPrimaryKeyOtherThanIdSelect struct { + Name *string \`json:"name"\` + OtherId int64 \`json:"other_id"\` + } + + type PublicTableWithPrimaryKeyOtherThanIdInsert struct { + Name *string \`json:"name"\` + OtherId *int64 \`json:"other_id"\` + } + + type PublicTableWithPrimaryKeyOtherThanIdUpdate struct { + Name *string \`json:"name"\` + OtherId *int64 \`json:"other_id"\` + } + + type PublicCategorySelect struct { + Id int32 \`json:"id"\` + Name string \`json:"name"\` + } + + type PublicCategoryInsert struct { + Id *int32 \`json:"id"\` + Name string \`json:"name"\` + } + + type PublicCategoryUpdate struct { + Id *int32 \`json:"id"\` + Name *string \`json:"name"\` + } + + type PublicMemesSelect struct { + Category *int32 \`json:"category"\` + CreatedAt string \`json:"created_at"\` + Id int32 \`json:"id"\` + Metadata interface{} \`json:"metadata"\` + Name string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicMemesInsert struct { + Category *int32 \`json:"category"\` + CreatedAt string \`json:"created_at"\` + Id *int32 \`json:"id"\` + Metadata interface{} \`json:"metadata"\` + Name string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicMemesUpdate struct { + Category *int32 \`json:"category"\` + CreatedAt *string \`json:"created_at"\` + Id *int32 \`json:"id"\` + Metadata interface{} \`json:"metadata"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicTodosViewSelect struct { + Details *string \`json:"details"\` + Id *int64 \`json:"id"\` + UserId *int64 \`json:"user-id"\` + } + + type PublicUsersViewSelect struct { + Decimal *float64 \`json:"decimal"\` + Id *int64 \`json:"id"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + } + + type PublicAViewSelect struct { + Id *int64 \`json:"id"\` + } + + type PublicUsersViewWithMultipleRefsToUsersSelect struct { + InitialId *int64 \`json:"initial_id"\` + InitialName *string \`json:"initial_name"\` + SecondId *int64 \`json:"second_id"\` + SecondName *string \`json:"second_name"\` + } + + type PublicTodosMatviewSelect struct { + Details *string \`json:"details"\` + Id *int64 \`json:"id"\` + UserId *int64 \`json:"user-id"\` + } + + type PublicCompositeTypeWithArrayAttribute struct { + MyTextArray interface{} \`json:"my_text_array"\` + } + + type PublicCompositeTypeWithRecordAttribute struct { + Todo interface{} \`json:"todo"\` + }" `) }) @@ -2991,30 +3027,36 @@ test('typegen: swift', async () => { } } internal struct UsersSelect: Codable, Hashable, Sendable, Identifiable { + internal let decimal: Decimal? internal let id: Int64 internal let name: String? internal let status: UserStatus? internal enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" } } internal struct UsersInsert: Codable, Hashable, Sendable, Identifiable { + internal let decimal: Decimal? internal let id: Int64? internal let name: String? internal let status: UserStatus? internal enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" } } internal struct UsersUpdate: Codable, Hashable, Sendable, Identifiable { + internal let decimal: Decimal? internal let id: Int64? internal let name: String? internal let status: UserStatus? internal enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" @@ -3083,10 +3125,12 @@ test('typegen: swift', async () => { } } internal struct UsersViewSelect: Codable, Hashable, Sendable { + internal let decimal: Decimal? internal let id: Int64? internal let name: String? internal let status: UserStatus? internal enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" @@ -3354,30 +3398,36 @@ test('typegen: swift w/ public access control', async () => { } } public struct UsersSelect: Codable, Hashable, Sendable, Identifiable { + public let decimal: Decimal? public let id: Int64 public let name: String? public let status: UserStatus? public enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" } } public struct UsersInsert: Codable, Hashable, Sendable, Identifiable { + public let decimal: Decimal? public let id: Int64? public let name: String? public let status: UserStatus? public enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" } } public struct UsersUpdate: Codable, Hashable, Sendable, Identifiable { + public let decimal: Decimal? public let id: Int64? public let name: String? public let status: UserStatus? public enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" @@ -3446,10 +3496,12 @@ test('typegen: swift w/ public access control', async () => { } } public struct UsersViewSelect: Codable, Hashable, Sendable { + public let decimal: Decimal? public let id: Int64? public let name: String? public let status: UserStatus? public enum CodingKeys: String, CodingKey { + case decimal = "decimal" case id = "id" case name = "name" case status = "status" From c626519d3a38b6adcea96e7941f4dda00e41be91 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Wed, 30 Jul 2025 18:15:10 +0200 Subject: [PATCH 25/72] fix(typegen): improve ts typegen consistency between generation (#964) Fixes: #959 --- src/server/templates/typescript.ts | 4 +- test/server/typegen.ts | 920 +++++++++-------------------- 2 files changed, 270 insertions(+), 654 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 29c34c1a..f8e6e7ca 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -316,7 +316,9 @@ export type Database = { `${JSON.stringify(fnName)}: { Args: ${fns .map(({ args }) => { - const inArgs = args.filter(({ mode }) => mode === 'in') + const inArgs = args + .toSorted((a, b) => a.name.localeCompare(b.name)) + .filter(({ mode }) => mode === 'in') if (inArgs.length === 0) { return 'Record' diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 87996416..9a03ea9e 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -629,659 +629,6 @@ test('typegen: typescript', async () => { ) }) -test('typegen w/ one-to-one relationships', async () => { - const { body } = await app.inject({ - method: 'GET', - path: '/generators/typescript', - query: { detect_one_to_one_relationships: 'true' }, - }) - expect(body).toMatchInlineSnapshot( - ` - "export type Json = - | string - | number - | boolean - | null - | { [key: string]: Json | undefined } - | Json[] - - export type Database = { - public: { - Tables: { - category: { - Row: { - id: number - name: string - } - Insert: { - id?: number - name: string - } - Update: { - id?: number - name?: string - } - Relationships: [] - } - empty: { - Row: {} - Insert: {} - Update: {} - Relationships: [] - } - foreign_table: { - Row: { - id: number - name: string | null - status: Database["public"]["Enums"]["user_status"] | null - } - Insert: { - id: number - name?: string | null - status?: Database["public"]["Enums"]["user_status"] | null - } - Update: { - id?: number - name?: string | null - status?: Database["public"]["Enums"]["user_status"] | null - } - Relationships: [] - } - memes: { - Row: { - category: number | null - created_at: string - id: number - metadata: Json | null - name: string - status: Database["public"]["Enums"]["meme_status"] | null - } - Insert: { - category?: number | null - created_at: string - id?: number - metadata?: Json | null - name: string - status?: Database["public"]["Enums"]["meme_status"] | null - } - Update: { - category?: number | null - created_at?: string - id?: number - metadata?: Json | null - name?: string - status?: Database["public"]["Enums"]["meme_status"] | null - } - Relationships: [ - { - foreignKeyName: "memes_category_fkey" - columns: ["category"] - isOneToOne: false - referencedRelation: "category" - referencedColumns: ["id"] - }, - ] - } - table_with_other_tables_row_type: { - Row: { - col1: Database["public"]["Tables"]["user_details"]["Row"] | null - col2: Database["public"]["Views"]["a_view"]["Row"] | null - } - Insert: { - col1?: Database["public"]["Tables"]["user_details"]["Row"] | null - col2?: Database["public"]["Views"]["a_view"]["Row"] | null - } - Update: { - col1?: Database["public"]["Tables"]["user_details"]["Row"] | null - col2?: Database["public"]["Views"]["a_view"]["Row"] | null - } - Relationships: [] - } - table_with_primary_key_other_than_id: { - Row: { - name: string | null - other_id: number - } - Insert: { - name?: string | null - other_id?: number - } - Update: { - name?: string | null - other_id?: number - } - Relationships: [] - } - todos: { - Row: { - details: string | null - id: number - "user-id": number - blurb: string | null - blurb_varchar: string | null - details_is_long: boolean | null - details_length: number | null - details_words: string[] | null - } - Insert: { - details?: string | null - id?: number - "user-id": number - } - Update: { - details?: string | null - id?: number - "user-id"?: number - } - Relationships: [ - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "a_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["initial_id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["second_id"] - }, - ] - } - user_details: { - Row: { - details: string | null - user_id: number - } - Insert: { - details?: string | null - user_id: number - } - Update: { - details?: string | null - user_id?: number - } - Relationships: [ - { - foreignKeyName: "user_details_user_id_fkey" - columns: ["user_id"] - isOneToOne: true - referencedRelation: "a_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "user_details_user_id_fkey" - columns: ["user_id"] - isOneToOne: true - referencedRelation: "users" - referencedColumns: ["id"] - }, - { - foreignKeyName: "user_details_user_id_fkey" - columns: ["user_id"] - isOneToOne: true - referencedRelation: "users_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "user_details_user_id_fkey" - columns: ["user_id"] - isOneToOne: true - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["initial_id"] - }, - { - foreignKeyName: "user_details_user_id_fkey" - columns: ["user_id"] - isOneToOne: true - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["second_id"] - }, - ] - } - users: { - Row: { - decimal: number | null - id: number - name: string | null - status: Database["public"]["Enums"]["user_status"] | null - } - Insert: { - decimal?: number | null - id?: number - name?: string | null - status?: Database["public"]["Enums"]["user_status"] | null - } - Update: { - decimal?: number | null - id?: number - name?: string | null - status?: Database["public"]["Enums"]["user_status"] | null - } - Relationships: [] - } - users_audit: { - Row: { - created_at: string | null - id: number - previous_value: Json | null - user_id: number | null - } - Insert: { - created_at?: string | null - id?: number - previous_value?: Json | null - user_id?: number | null - } - Update: { - created_at?: string | null - id?: number - previous_value?: Json | null - user_id?: number | null - } - Relationships: [] - } - } - Views: { - a_view: { - Row: { - id: number | null - } - Insert: { - id?: number | null - } - Update: { - id?: number | null - } - Relationships: [] - } - todos_matview: { - Row: { - details: string | null - id: number | null - "user-id": number | null - } - Relationships: [ - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "a_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["initial_id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["second_id"] - }, - ] - } - todos_view: { - Row: { - details: string | null - id: number | null - "user-id": number | null - } - Insert: { - details?: string | null - id?: number | null - "user-id"?: number | null - } - Update: { - details?: string | null - id?: number | null - "user-id"?: number | null - } - Relationships: [ - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "a_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view" - referencedColumns: ["id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["initial_id"] - }, - { - foreignKeyName: "todos_user-id_fkey" - columns: ["user-id"] - isOneToOne: false - referencedRelation: "users_view_with_multiple_refs_to_users" - referencedColumns: ["second_id"] - }, - ] - } - users_view: { - Row: { - decimal: number | null - id: number | null - name: string | null - status: Database["public"]["Enums"]["user_status"] | null - } - Insert: { - decimal?: number | null - id?: number | null - name?: string | null - status?: Database["public"]["Enums"]["user_status"] | null - } - Update: { - decimal?: number | null - id?: number | null - name?: string | null - status?: Database["public"]["Enums"]["user_status"] | null - } - Relationships: [] - } - users_view_with_multiple_refs_to_users: { - Row: { - initial_id: number | null - initial_name: string | null - second_id: number | null - second_name: string | null - } - Relationships: [] - } - } - Functions: { - blurb: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string - } - blurb_varchar: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string - } - details_is_long: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: boolean - } - details_length: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: number - } - details_words: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string[] - } - function_returning_row: { - Args: Record - Returns: { - decimal: number | null - id: number - name: string | null - status: Database["public"]["Enums"]["user_status"] | null - } - } - function_returning_set_of_rows: { - Args: Record - Returns: { - decimal: number | null - id: number - name: string | null - status: Database["public"]["Enums"]["user_status"] | null - }[] - } - function_returning_table: { - Args: Record - Returns: { - id: number - name: string - }[] - } - get_todos_setof_rows: { - Args: - | { todo_row: Database["public"]["Tables"]["todos"]["Row"] } - | { user_row: Database["public"]["Tables"]["users"]["Row"] } - Returns: { - details: string | null - id: number - "user-id": number - }[] - } - get_user_audit_setof_single_row: { - Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } - Returns: { - created_at: string | null - id: number - previous_value: Json | null - user_id: number | null - }[] - } - polymorphic_function: { - Args: { "": boolean } | { "": string } - Returns: undefined - } - postgres_fdw_disconnect: { - Args: { "": string } - Returns: boolean - } - postgres_fdw_disconnect_all: { - Args: Record - Returns: boolean - } - postgres_fdw_get_connections: { - Args: Record - Returns: Record[] - } - postgres_fdw_handler: { - Args: Record - Returns: unknown - } - test_internal_query: { - Args: Record - Returns: undefined - } - } - Enums: { - meme_status: "new" | "old" | "retired" - user_status: "ACTIVE" | "INACTIVE" - } - CompositeTypes: { - composite_type_with_array_attribute: { - my_text_array: string[] | null - } - composite_type_with_record_attribute: { - todo: Database["public"]["Tables"]["todos"]["Row"] | null - } - } - } - } - - type DatabaseWithoutInternals = Omit - - type DefaultSchema = DatabaseWithoutInternals[Extract] - - export type Tables< - DefaultSchemaTableNameOrOptions extends - | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) - | { schema: keyof DatabaseWithoutInternals }, - TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) - : never = never, - > = DefaultSchemaTableNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & - DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { - Row: infer R - } - ? R - : never - : DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] & - DefaultSchema["Views"]) - ? (DefaultSchema["Tables"] & - DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends { - Row: infer R - } - ? R - : never - : never - - export type TablesInsert< - DefaultSchemaTableNameOrOptions extends - | keyof DefaultSchema["Tables"] - | { schema: keyof DatabaseWithoutInternals }, - TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] - : never = never, - > = DefaultSchemaTableNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { - Insert: infer I - } - ? I - : never - : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] - ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { - Insert: infer I - } - ? I - : never - : never - - export type TablesUpdate< - DefaultSchemaTableNameOrOptions extends - | keyof DefaultSchema["Tables"] - | { schema: keyof DatabaseWithoutInternals }, - TableName extends DefaultSchemaTableNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] - : never = never, - > = DefaultSchemaTableNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { - Update: infer U - } - ? U - : never - : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] - ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { - Update: infer U - } - ? U - : never - : never - - export type Enums< - DefaultSchemaEnumNameOrOptions extends - | keyof DefaultSchema["Enums"] - | { schema: keyof DatabaseWithoutInternals }, - EnumName extends DefaultSchemaEnumNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] - : never = never, - > = DefaultSchemaEnumNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] - : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] - ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] - : never - - export type CompositeTypes< - PublicCompositeTypeNameOrOptions extends - | keyof DefaultSchema["CompositeTypes"] - | { schema: keyof DatabaseWithoutInternals }, - CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] - : never = never, - > = PublicCompositeTypeNameOrOptions extends { - schema: keyof DatabaseWithoutInternals - } - ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] - : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] - ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] - : never - - export const Constants = { - public: { - Enums: { - meme_status: ["new", "old", "retired"], - user_status: ["ACTIVE", "INACTIVE"], - }, - }, - } as const - " - ` - ) -}) - test('typegen: typescript w/ one-to-one relationships', async () => { const { body } = await app.inject({ method: 'GET', @@ -2593,6 +1940,273 @@ test('typegen: typescript w/ postgrestVersion', async () => { ) }) +test('typegen: typescript consistent types definitions orders', async () => { + // Helper function to clean up test entities + const cleanupTestEntities = async () => { + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + -- Drop materialized views first (depend on views/tables) + DROP MATERIALIZED VIEW IF EXISTS test_matview_alpha CASCADE; + DROP MATERIALIZED VIEW IF EXISTS test_matview_beta CASCADE; + DROP MATERIALIZED VIEW IF EXISTS test_matview_gamma CASCADE; + + -- Drop views (may depend on tables) + DROP VIEW IF EXISTS test_view_alpha CASCADE; + DROP VIEW IF EXISTS test_view_beta CASCADE; + DROP VIEW IF EXISTS test_view_gamma CASCADE; + + -- Drop functions + DROP FUNCTION IF EXISTS test_func_alpha(integer, text, boolean) CASCADE; + DROP FUNCTION IF EXISTS test_func_beta(integer, text, boolean) CASCADE; + DROP FUNCTION IF EXISTS test_func_gamma(integer, text, boolean) CASCADE; + + -- Alternative signatures for functions (different parameter orders) + DROP FUNCTION IF EXISTS test_func_alpha(text, boolean, integer) CASCADE; + DROP FUNCTION IF EXISTS test_func_beta(boolean, integer, text) CASCADE; + DROP FUNCTION IF EXISTS test_func_gamma(boolean, text, integer) CASCADE; + + -- Drop tables + DROP TABLE IF EXISTS test_table_alpha CASCADE; + DROP TABLE IF EXISTS test_table_beta CASCADE; + DROP TABLE IF EXISTS test_table_gamma CASCADE; + + -- Drop types + DROP TYPE IF EXISTS test_enum_alpha CASCADE; + DROP TYPE IF EXISTS test_enum_beta CASCADE; + `, + }, + }) + } + + // Clean up any existing test entities + await cleanupTestEntities() + + // === FIRST ROUND: Create entities in order A->B->G with property order 1 === + + // Create custom types first + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE TYPE test_enum_alpha AS ENUM ('active', 'inactive', 'pending'); + CREATE TYPE test_enum_beta AS ENUM ('high', 'medium', 'low'); + `, + }, + }) + + // Create tables in order: alpha, beta, gamma with specific column orders + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE TABLE test_table_alpha ( + id SERIAL PRIMARY KEY, + name TEXT NOT NULL, + status test_enum_alpha DEFAULT 'active', + created_at TIMESTAMP DEFAULT NOW() + ); + + CREATE TABLE test_table_beta ( + id SERIAL PRIMARY KEY, + priority test_enum_beta DEFAULT 'medium', + description TEXT, + alpha_id INTEGER REFERENCES test_table_alpha(id) + ); + + CREATE TABLE test_table_gamma ( + id SERIAL PRIMARY KEY, + beta_id INTEGER REFERENCES test_table_beta(id), + value NUMERIC(10,2), + is_active BOOLEAN DEFAULT true + ); + `, + }, + }) + + // Create functions in order: alpha, beta, gamma with specific parameter orders + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE FUNCTION test_func_alpha(param_a integer, param_b text, param_c boolean) + RETURNS integer AS 'SELECT param_a + 1' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_beta(param_a integer, param_b text, param_c boolean) + RETURNS text AS 'SELECT param_b || ''_processed''' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_gamma(param_a integer, param_b text, param_c boolean) + RETURNS boolean AS 'SELECT NOT param_c' LANGUAGE sql IMMUTABLE; + `, + }, + }) + + // Create views in order: alpha, beta, gamma + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE VIEW test_view_alpha AS + SELECT id, name, status, created_at FROM test_table_alpha; + + CREATE VIEW test_view_beta AS + SELECT id, priority, description, alpha_id FROM test_table_beta; + + CREATE VIEW test_view_gamma AS + SELECT id, beta_id, value, is_active FROM test_table_gamma; + `, + }, + }) + + // Create materialized views in order: alpha, beta, gamma + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE MATERIALIZED VIEW test_matview_alpha AS + SELECT id, name, status FROM test_table_alpha; + + CREATE MATERIALIZED VIEW test_matview_beta AS + SELECT id, priority, description FROM test_table_beta; + + CREATE MATERIALIZED VIEW test_matview_gamma AS + SELECT id, value, is_active FROM test_table_gamma; + `, + }, + }) + + // Generate types for first configuration + const { body: firstCall } = await app.inject({ + method: 'GET', + path: '/generators/typescript', + query: { detect_one_to_one_relationships: 'true', postgrest_version: '13' }, + }) + + // === SECOND ROUND: Drop and recreate in reverse order G->B->A with different property orders === + + // Clean up all test entities + await cleanupTestEntities() + + // Create custom types in reverse order but keep the enum internal ordering (typegen is rightfully dependent on the enum order) + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE TYPE test_enum_beta AS ENUM ('high', 'medium', 'low'); + CREATE TYPE test_enum_alpha AS ENUM ('active', 'inactive', 'pending'); + `, + }, + }) + + // Create tables in reverse order: gamma, beta, alpha with different column orders + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE TABLE test_table_alpha ( + created_at TIMESTAMP DEFAULT NOW(), + status test_enum_alpha DEFAULT 'active', + name TEXT NOT NULL, + id SERIAL PRIMARY KEY + ); + + CREATE TABLE test_table_beta ( + alpha_id INTEGER REFERENCES test_table_alpha(id), + description TEXT, + priority test_enum_beta DEFAULT 'medium', + id SERIAL PRIMARY KEY + ); + + CREATE TABLE test_table_gamma ( + is_active BOOLEAN DEFAULT true, + value NUMERIC(10,2), + beta_id INTEGER REFERENCES test_table_beta(id), + id SERIAL PRIMARY KEY + ); + `, + }, + }) + + // Create functions in reverse order: gamma, beta, alpha with different parameter orders + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE FUNCTION test_func_gamma(param_c boolean, param_a integer, param_b text) + RETURNS boolean AS 'SELECT NOT param_c' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_beta(param_b text, param_c boolean, param_a integer) + RETURNS text AS 'SELECT param_b || ''_processed''' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_alpha(param_c boolean, param_b text, param_a integer) + RETURNS integer AS 'SELECT param_a + 1' LANGUAGE sql IMMUTABLE; + `, + }, + }) + + // Create views in reverse order: gamma, beta, alpha + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE VIEW test_view_gamma AS + SELECT is_active, value, beta_id, id FROM test_table_gamma; + + CREATE VIEW test_view_beta AS + SELECT alpha_id, description, priority, id FROM test_table_beta; + + CREATE VIEW test_view_alpha AS + SELECT created_at, status, name, id FROM test_table_alpha; + `, + }, + }) + + // Create materialized views in reverse order: gamma, beta, alpha + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE MATERIALIZED VIEW test_matview_gamma AS + SELECT is_active, value, id FROM test_table_gamma; + + CREATE MATERIALIZED VIEW test_matview_beta AS + SELECT description, priority, id FROM test_table_beta; + + CREATE MATERIALIZED VIEW test_matview_alpha AS + SELECT status, name, id FROM test_table_alpha; + `, + }, + }) + + // Generate types for second configuration + const { body: secondCall } = await app.inject({ + method: 'GET', + path: '/generators/typescript', + query: { detect_one_to_one_relationships: 'true', postgrest_version: '13' }, + }) + + // Clean up test entities + await cleanupTestEntities() + + // The generated types should be identical regardless of: + // 1. Entity creation order (alpha->beta->gamma vs gamma->beta->alpha) + // 2. Property declaration order (columns, function parameters) + // 3. Enum value order + expect(firstCall).toEqual(secondCall) +}) + test('typegen: go', async () => { const { body } = await app.inject({ method: 'GET', path: '/generators/go' }) expect(body).toMatchInlineSnapshot(` From a142d7334b9e0f6326c1573b6ba264c4c7d4f56b Mon Sep 17 00:00:00 2001 From: Daniel Sticker <63877413+stickerdaniel@users.noreply.github.com> Date: Wed, 30 Jul 2025 19:24:40 -0400 Subject: [PATCH 26/72] chore(ts-template): correct "instanciate" to "instantiate" misspelling (#965) Fix spelling mistake in TypeScript generation template that was propagating to all generated database type files. --- src/server/templates/typescript.ts | 2 +- test/server/typegen.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index f8e6e7ca..c3cae645 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -35,7 +35,7 @@ export const apply = async ({ .forEach((c) => columnsByTableId[c.table_id].push(c)) const internal_supabase_schema = postgrestVersion - ? `// Allows to automatically instanciate createClient with right options + ? `// Allows to automatically instantiate createClient with right options // instead of createClient(URL, KEY) __InternalSupabase: { PostgrestVersion: '${postgrestVersion}' diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 9a03ea9e..76ac6218 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -1299,7 +1299,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { | Json[] export type Database = { - // Allows to automatically instanciate createClient with right options + // Allows to automatically instantiate createClient with right options // instead of createClient(URL, KEY) __InternalSupabase: { PostgrestVersion: "13" From 60397be39f3aabdb7fa0c253da986498a0eed308 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Mon, 4 Aug 2025 12:44:42 +0200 Subject: [PATCH 27/72] fix(typegen): ensure determinism in functions returns properties (#970) Fixes #959 --- src/server/templates/typescript.ts | 34 ++++++++++++++++-------------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index c3cae645..4f9cac03 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -361,9 +361,9 @@ export type Database = { }) return `{ - ${argsNameAndType.map( - ({ name, type }) => `${JSON.stringify(name)}: ${type}` - )} + ${argsNameAndType + .toSorted((a, b) => a.name.localeCompare(b.name)) + .map(({ name, type }) => `${JSON.stringify(name)}: ${type}`)} }` } @@ -373,19 +373,21 @@ export type Database = { ) if (relation) { return `{ - ${columnsByTableId[relation.id].map( - (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType( - schema, - column.format, - { - types, - schemas, - tables, - views, - } - )} ${column.is_nullable ? '| null' : ''}` - )} + ${columnsByTableId[relation.id] + .toSorted((a, b) => a.name.localeCompare(b.name)) + .map( + (column) => + `${JSON.stringify(column.name)}: ${pgTypeToTsType( + schema, + column.format, + { + types, + schemas, + tables, + views, + } + )} ${column.is_nullable ? '| null' : ''}` + )} }` } From 4205b26dd47998172b4d6f4b3af7394b8d8beb86 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Aug 2025 06:36:23 +0000 Subject: [PATCH 28/72] chore(deps): bump actions/checkout from 4 to 5 Bumps [actions/checkout](https://github.com/actions/checkout) from 4 to 5. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v4...v5) --- updated-dependencies: - dependency-name: actions/checkout dependency-version: '5' dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yml | 6 +++--- .github/workflows/docs.yml | 2 +- .github/workflows/publish-deps.yml | 2 +- .github/workflows/release.yml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index ac0d7539..c531213f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,7 +20,7 @@ jobs: name: Test runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-node@v4 with: @@ -40,7 +40,7 @@ jobs: name: Prettier check runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - name: Setup node uses: actions/setup-node@v4 @@ -64,7 +64,7 @@ jobs: contents: read packages: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 name: Checkout Repo - uses: docker/setup-buildx-action@v3 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index d3645502..3630904a 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -20,7 +20,7 @@ jobs: name: Publish docs runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-node@v4 with: diff --git a/.github/workflows/publish-deps.yml b/.github/workflows/publish-deps.yml index 7e50ecf5..693a3edd 100644 --- a/.github/workflows/publish-deps.yml +++ b/.github/workflows/publish-deps.yml @@ -13,7 +13,7 @@ jobs: # Must match glibc verison in node:20 runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: repository: 'pyramation/libpg-query-node' ref: 'v15' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 686752cd..5afcd6ee 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -19,7 +19,7 @@ jobs: permissions: contents: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 - uses: actions/setup-node@v4 with: From 28cd3e6780353c1ba39da774c94ea91b9b3477bb Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Wed, 3 Sep 2025 08:52:07 +0200 Subject: [PATCH 29/72] perf: suboptimal tables query schema filter happens late (#980) * wip: optimize queries * wip: refactor queries for root filtering * perf: add root level filtering to all queries * feat: add functions args retrieval * chore: fix functions * fix(table): use or for table relationships * test(functions): add tests for retrival argument based --- src/lib/PostgresMetaColumnPrivileges.ts | 37 ++------- src/lib/PostgresMetaColumns.ts | 38 +++------- src/lib/PostgresMetaConfig.ts | 10 +-- src/lib/PostgresMetaExtensions.ts | 14 ++-- src/lib/PostgresMetaForeignTables.ts | 53 +++++++------ src/lib/PostgresMetaFunctions.ts | 76 ++++--------------- src/lib/PostgresMetaIndexes.ts | 33 ++------ src/lib/PostgresMetaMaterializedViews.ts | 67 ++++++++-------- src/lib/PostgresMetaPolicies.ts | 28 +++---- src/lib/PostgresMetaPublications.ts | 21 +++-- src/lib/PostgresMetaRelationships.ts | 40 ++++++---- src/lib/PostgresMetaRoles.ts | 36 ++------- src/lib/PostgresMetaSchemas.ts | 33 ++++---- src/lib/PostgresMetaTablePrivileges.ts | 53 +++---------- src/lib/PostgresMetaTables.ts | 53 ++++++++----- src/lib/PostgresMetaTriggers.ts | 37 +++------ src/lib/PostgresMetaTypes.ts | 38 +--------- src/lib/PostgresMetaVersion.ts | 4 +- src/lib/PostgresMetaViews.ts | 67 ++++++++-------- src/lib/generators.ts | 26 +++++-- src/lib/helpers.ts | 13 +++- ...rivileges.sql => column_privileges.sql.ts} | 20 ++++- src/lib/sql/{columns.sql => columns.sql.ts} | 18 +++++ src/lib/sql/common.ts | 17 +++++ src/lib/sql/{config.sql => config.sql.ts} | 6 ++ src/lib/sql/extensions.sql | 10 --- src/lib/sql/extensions.sql.ts | 19 +++++ src/lib/sql/foreign_tables.sql | 10 --- src/lib/sql/foreign_tables.sql.ts | 25 ++++++ .../sql/{functions.sql => functions.sql.ts} | 46 ++++++++++- src/lib/sql/index.ts | 34 --------- src/lib/sql/{indexes.sql => indexes.sql.ts} | 11 ++- src/lib/sql/materialized_views.sql | 11 --- src/lib/sql/materialized_views.sql.ts | 24 ++++++ src/lib/sql/{policies.sql => policies.sql.ts} | 12 +++ .../{publications.sql => publications.sql.ts} | 11 +++ src/lib/sql/{roles.sql => roles.sql.ts} | 17 +++++ src/lib/sql/schemas.sql | 17 ----- src/lib/sql/schemas.sql.ts | 27 +++++++ src/lib/sql/{tables.sql => table.sql.ts} | 15 ++++ ...privileges.sql => table_privileges.sql.ts} | 15 +++- ...onships.sql => table_relationships.sql.ts} | 7 ++ src/lib/sql/{triggers.sql => triggers.sql.ts} | 22 +++++- src/lib/sql/types.sql | 35 --------- src/lib/sql/types.sql.ts | 72 ++++++++++++++++++ src/lib/sql/{version.sql => version.sql.ts} | 2 + src/lib/sql/views.sql | 12 --- src/lib/sql/views.sql.ts | 25 ++++++ ...cies.sql => views_key_dependencies.sql.ts} | 42 +++++----- src/server/templates/typescript.ts | 4 +- test/lib/functions.ts | 49 ++++++++++++ test/lib/tables.ts | 30 ++++---- 52 files changed, 788 insertions(+), 654 deletions(-) rename src/lib/sql/{column_privileges.sql => column_privileges.sql.ts} (88%) rename src/lib/sql/{columns.sql => columns.sql.ts} (80%) create mode 100644 src/lib/sql/common.ts rename src/lib/sql/{config.sql => config.sql.ts} (57%) delete mode 100644 src/lib/sql/extensions.sql create mode 100644 src/lib/sql/extensions.sql.ts delete mode 100644 src/lib/sql/foreign_tables.sql create mode 100644 src/lib/sql/foreign_tables.sql.ts rename src/lib/sql/{functions.sql => functions.sql.ts} (70%) delete mode 100644 src/lib/sql/index.ts rename src/lib/sql/{indexes.sql => indexes.sql.ts} (79%) delete mode 100644 src/lib/sql/materialized_views.sql create mode 100644 src/lib/sql/materialized_views.sql.ts rename src/lib/sql/{policies.sql => policies.sql.ts} (66%) rename src/lib/sql/{publications.sql => publications.sql.ts} (67%) rename src/lib/sql/{roles.sql => roles.sql.ts} (52%) delete mode 100644 src/lib/sql/schemas.sql create mode 100644 src/lib/sql/schemas.sql.ts rename src/lib/sql/{tables.sql => table.sql.ts} (73%) rename src/lib/sql/{table_privileges.sql => table_privileges.sql.ts} (74%) rename src/lib/sql/{table_relationships.sql => table_relationships.sql.ts} (80%) rename src/lib/sql/{triggers.sql => triggers.sql.ts} (62%) delete mode 100644 src/lib/sql/types.sql create mode 100644 src/lib/sql/types.sql.ts rename src/lib/sql/{version.sql => version.sql.ts} (84%) delete mode 100644 src/lib/sql/views.sql create mode 100644 src/lib/sql/views.sql.ts rename src/lib/sql/{views_key_dependencies.sql => views_key_dependencies.sql.ts} (78%) diff --git a/src/lib/PostgresMetaColumnPrivileges.ts b/src/lib/PostgresMetaColumnPrivileges.ts index 4df0d39a..b2a0b6fe 100644 --- a/src/lib/PostgresMetaColumnPrivileges.ts +++ b/src/lib/PostgresMetaColumnPrivileges.ts @@ -1,7 +1,7 @@ import { ident, literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { filterByList } from './helpers.js' -import { columnPrivilegesSql } from './sql/index.js' +import { filterByValue, filterByList } from './helpers.js' +import { COLUMN_PRIVILEGES_SQL } from './sql/column_privileges.sql.js' import { PostgresMetaResult, PostgresColumnPrivileges, @@ -29,25 +29,12 @@ export default class PostgresMetaColumnPrivileges { limit?: number offset?: number } = {}): Promise> { - let sql = ` -with column_privileges as (${columnPrivilegesSql}) -select * -from column_privileges -` - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` where relation_schema ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const sql = COLUMN_PRIVILEGES_SQL({ schemaFilter, limit, offset }) return await this.query(sql) } @@ -86,12 +73,8 @@ end $$; // Return the updated column privileges for modified columns. const columnIds = [...new Set(grants.map(({ column_id }) => column_id))] - sql = ` -with column_privileges as (${columnPrivilegesSql}) -select * -from column_privileges -where column_id in (${columnIds.map(literal).join(',')}) -` + const columnIdsFilter = filterByValue(columnIds) + sql = COLUMN_PRIVILEGES_SQL({ columnIdsFilter }) return await this.query(sql) } @@ -130,12 +113,8 @@ end $$; // Return the updated column privileges for modified columns. const columnIds = [...new Set(revokes.map(({ column_id }) => column_id))] - sql = ` -with column_privileges as (${columnPrivilegesSql}) -select * -from column_privileges -where column_id in (${columnIds.map(literal).join(',')}) -` + const columnIdsFilter = filterByValue(columnIds) + sql = COLUMN_PRIVILEGES_SQL({ columnIdsFilter }) return await this.query(sql) } } diff --git a/src/lib/PostgresMetaColumns.ts b/src/lib/PostgresMetaColumns.ts index 15e56507..613c8ea2 100644 --- a/src/lib/PostgresMetaColumns.ts +++ b/src/lib/PostgresMetaColumns.ts @@ -1,9 +1,9 @@ import { ident, literal } from 'pg-format' import PostgresMetaTables from './PostgresMetaTables.js' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { columnsSql } from './sql/index.js' import { PostgresMetaResult, PostgresColumn } from './types.js' -import { filterByList } from './helpers.js' +import { filterByValue, filterByList } from './helpers.js' +import { COLUMNS_SQL } from './sql/columns.sql.js' export default class PostgresMetaColumns { query: (sql: string) => Promise> @@ -29,32 +29,13 @@ export default class PostgresMetaColumns { limit?: number offset?: number } = {}): Promise> { - let sql = ` -WITH - columns AS (${columnsSql}) -SELECT - * -FROM - columns -WHERE - true` - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` AND schema ${filter}` - } - if (tableId !== undefined) { - sql += ` AND table_id = ${literal(tableId)}` - } - if (limit) { - sql += ` LIMIT ${limit}` - } - if (offset) { - sql += ` OFFSET ${offset}` - } + const tableIdFilter = tableId ? filterByValue([`${tableId}`]) : undefined + const sql = COLUMNS_SQL({ schemaFilter, tableIdFilter, limit, offset }) return await this.query(sql) } @@ -79,6 +60,7 @@ WHERE table?: string schema?: string }): Promise> { + const schemaFilter = schema ? filterByList([schema], []) : undefined if (id) { const regexp = /^(\d+)\.(\d+)$/ if (!regexp.test(id)) { @@ -86,7 +68,8 @@ WHERE } const matches = id.match(regexp) as RegExpMatchArray const [tableId, ordinalPos] = matches.slice(1).map(Number) - const sql = `${columnsSql} AND c.oid = ${tableId} AND a.attnum = ${ordinalPos};` + const idsFilter = filterByValue([`${tableId}.${ordinalPos}`]) + const sql = COLUMNS_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -96,9 +79,8 @@ WHERE return { data: data[0], error } } } else if (name && table) { - const sql = `${columnsSql} AND a.attname = ${literal(name)} AND c.relname = ${literal( - table - )} AND nc.nspname = ${literal(schema)};` + const columnNameFilter = filterByValue([`${table}.${name}`]) + const sql = `${COLUMNS_SQL({ schemaFilter, columnNameFilter })};` const { data, error } = await this.query(sql) if (error) { return { data, error } diff --git a/src/lib/PostgresMetaConfig.ts b/src/lib/PostgresMetaConfig.ts index d362641b..35b194d8 100644 --- a/src/lib/PostgresMetaConfig.ts +++ b/src/lib/PostgresMetaConfig.ts @@ -1,4 +1,4 @@ -import { configSql } from './sql/index.js' +import { CONFIG_SQL } from './sql/config.sql.js' import { PostgresMetaResult, PostgresConfig } from './types.js' export default class PostgresMetaConfig { @@ -15,13 +15,7 @@ export default class PostgresMetaConfig { limit?: number offset?: number } = {}): Promise> { - let sql = configSql - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + const sql = CONFIG_SQL({ limit, offset }) return await this.query(sql) } } diff --git a/src/lib/PostgresMetaExtensions.ts b/src/lib/PostgresMetaExtensions.ts index 4589057f..9543dc2c 100644 --- a/src/lib/PostgresMetaExtensions.ts +++ b/src/lib/PostgresMetaExtensions.ts @@ -1,6 +1,7 @@ import { ident, literal } from 'pg-format' -import { extensionsSql } from './sql/index.js' import { PostgresMetaResult, PostgresExtension } from './types.js' +import { EXTENSIONS_SQL } from './sql/extensions.sql.js' +import { filterByValue } from './helpers.js' export default class PostgresMetaExtensions { query: (sql: string) => Promise> @@ -16,18 +17,13 @@ export default class PostgresMetaExtensions { limit?: number offset?: number } = {}): Promise> { - let sql = extensionsSql - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + const sql = EXTENSIONS_SQL({ limit, offset }) return await this.query(sql) } async retrieve({ name }: { name: string }): Promise> { - const sql = `${extensionsSql} WHERE name = ${literal(name)};` + const nameFilter = filterByValue([name]) + const sql = EXTENSIONS_SQL({ nameFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } diff --git a/src/lib/PostgresMetaForeignTables.ts b/src/lib/PostgresMetaForeignTables.ts index 40ed859f..e565da43 100644 --- a/src/lib/PostgresMetaForeignTables.ts +++ b/src/lib/PostgresMetaForeignTables.ts @@ -1,7 +1,7 @@ -import { literal } from 'pg-format' -import { coalesceRowsToArray, filterByList } from './helpers.js' -import { columnsSql, foreignTablesSql } from './sql/index.js' +import { coalesceRowsToArray, filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresForeignTable } from './types.js' +import { FOREIGN_TABLES_SQL } from './sql/foreign_tables.sql.js' +import { COLUMNS_SQL } from './sql/columns.sql.js' export default class PostgresMetaForeignTables { query: (sql: string) => Promise> @@ -37,17 +37,8 @@ export default class PostgresMetaForeignTables { offset?: number includeColumns?: boolean } = {}): Promise> { - let sql = generateEnrichedForeignTablesSql({ includeColumns }) - const filter = filterByList(includedSchemas, excludedSchemas) - if (filter) { - sql += ` where schema ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const schemaFilter = filterByList(includedSchemas, excludedSchemas) + const sql = generateEnrichedForeignTablesSql({ includeColumns, schemaFilter, limit, offset }) return await this.query(sql) } @@ -69,9 +60,11 @@ export default class PostgresMetaForeignTables { schema?: string }): Promise> { if (id) { - const sql = `${generateEnrichedForeignTablesSql({ + const idsFilter = filterByValue([`${id}`]) + const sql = generateEnrichedForeignTablesSql({ includeColumns: true, - })} where foreign_tables.id = ${literal(id)};` + idsFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -81,11 +74,11 @@ export default class PostgresMetaForeignTables { return { data: data[0], error } } } else if (name) { - const sql = `${generateEnrichedForeignTablesSql({ + const nameFilter = filterByValue([`${schema}.${name}`]) + const sql = generateEnrichedForeignTablesSql({ includeColumns: true, - })} where foreign_tables.name = ${literal(name)} and foreign_tables.schema = ${literal( - schema - )};` + tableIdentifierFilter: nameFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -103,9 +96,23 @@ export default class PostgresMetaForeignTables { } } -const generateEnrichedForeignTablesSql = ({ includeColumns }: { includeColumns: boolean }) => ` -with foreign_tables as (${foreignTablesSql}) - ${includeColumns ? `, columns as (${columnsSql})` : ''} +const generateEnrichedForeignTablesSql = ({ + includeColumns, + schemaFilter, + idsFilter, + tableIdentifierFilter, + limit, + offset, +}: { + includeColumns: boolean + schemaFilter?: string + idsFilter?: string + tableIdentifierFilter?: string + limit?: number + offset?: number +}) => ` +with foreign_tables as (${FOREIGN_TABLES_SQL({ schemaFilter, tableIdentifierFilter, limit, offset })}) + ${includeColumns ? `, columns as (${COLUMNS_SQL({ schemaFilter, tableIdentifierFilter, tableIdFilter: idsFilter })})` : ''} select * ${ diff --git a/src/lib/PostgresMetaFunctions.ts b/src/lib/PostgresMetaFunctions.ts index b50e6761..b6e2a39c 100644 --- a/src/lib/PostgresMetaFunctions.ts +++ b/src/lib/PostgresMetaFunctions.ts @@ -1,8 +1,8 @@ import { ident, literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { filterByList } from './helpers.js' -import { functionsSql } from './sql/index.js' +import { filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresFunction, PostgresFunctionCreate } from './types.js' +import { FUNCTIONS_SQL } from './sql/functions.sql.js' export default class PostgresMetaFunctions { query: (sql: string) => Promise> @@ -24,21 +24,12 @@ export default class PostgresMetaFunctions { limit?: number offset?: number } = {}): Promise> { - let sql = enrichedFunctionsSql - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` WHERE schema ${filter}` - } - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + const sql = FUNCTIONS_SQL({ schemaFilter, limit, offset }) return await this.query(sql) } @@ -63,8 +54,10 @@ export default class PostgresMetaFunctions { schema?: string args?: string[] }): Promise> { + const schemaFilter = schema ? filterByList([schema], []) : undefined if (id) { - const sql = `${enrichedFunctionsSql} WHERE id = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = FUNCTIONS_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -74,7 +67,8 @@ export default class PostgresMetaFunctions { return { data: data[0], error } } } else if (name && schema && args) { - const sql = this.generateRetrieveFunctionSql({ name, schema, args }) + const nameFilter = filterByValue([name]) + const sql = FUNCTIONS_SQL({ schemaFilter, nameFilter, args: args.map(literal) }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -169,6 +163,11 @@ export default class PostgresMetaFunctions { )}(${identityArgs}) SET SCHEMA ${ident(schema)};` : '' + const currentSchemaFilter = currentFunc!.schema + ? filterByList([currentFunc!.schema], []) + : undefined + const currentNameFilter = currentFunc!.name ? filterByValue([currentFunc!.name]) : undefined + const sql = ` DO LANGUAGE plpgsql $$ BEGIN @@ -177,7 +176,7 @@ export default class PostgresMetaFunctions { IF ( SELECT id - FROM (${functionsSql}) AS f + FROM (${FUNCTIONS_SQL({ schemaFilter: currentSchemaFilter, nameFilter: currentNameFilter })}) AS f WHERE f.schema = ${literal(currentFunc!.schema)} AND f.name = ${literal(currentFunc!.name)} AND f.identity_argument_types = ${literal(identityArgs)} @@ -262,49 +261,4 @@ export default class PostgresMetaFunctions { }; ` } - - private generateRetrieveFunctionSql({ - schema, - name, - args, - }: { - schema: string - name: string - args: string[] - }): string { - return `${enrichedFunctionsSql} JOIN pg_proc AS p ON id = p.oid WHERE schema = ${literal( - schema - )} AND name = ${literal(name)} AND p.proargtypes::text = ${ - args.length - ? `( - SELECT STRING_AGG(type_oid::text, ' ') FROM ( - SELECT ( - split_args.arr[ - array_length( - split_args.arr, - 1 - ) - ]::regtype::oid - ) AS type_oid FROM ( - SELECT STRING_TO_ARRAY( - UNNEST( - ARRAY[${args.map(literal)}] - ), - ' ' - ) AS arr - ) AS split_args - ) args - )` - : literal('') - }` - } } - -const enrichedFunctionsSql = ` - WITH f AS ( - ${functionsSql} - ) - SELECT - f.* - FROM f -` diff --git a/src/lib/PostgresMetaIndexes.ts b/src/lib/PostgresMetaIndexes.ts index 14ffbba7..84f7f100 100644 --- a/src/lib/PostgresMetaIndexes.ts +++ b/src/lib/PostgresMetaIndexes.ts @@ -1,10 +1,9 @@ -import { ident, literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { filterByList } from './helpers.js' -import { indexesSql } from './sql/index.js' +import { filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresIndex } from './types.js' +import { INDEXES_SQL } from './sql/indexes.sql.js' -export default class PostgresMetaFunctions { +export default class PostgresMetaIndexes { query: (sql: string) => Promise> constructor(query: (sql: string) => Promise>) { @@ -24,21 +23,12 @@ export default class PostgresMetaFunctions { limit?: number offset?: number } = {}): Promise> { - let sql = enrichedSql - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` WHERE schema ${filter}` - } - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + const sql = INDEXES_SQL({ schemaFilter, limit, offset }) return await this.query(sql) } @@ -54,13 +44,13 @@ export default class PostgresMetaFunctions { }): Promise> async retrieve({ id, - args = [], }: { id?: number args?: string[] }): Promise> { if (id) { - const sql = `${enrichedSql} WHERE id = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = INDEXES_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -74,12 +64,3 @@ export default class PostgresMetaFunctions { } } } - -const enrichedSql = ` - WITH x AS ( - ${indexesSql} - ) - SELECT - x.* - FROM x -` diff --git a/src/lib/PostgresMetaMaterializedViews.ts b/src/lib/PostgresMetaMaterializedViews.ts index 7f1efac5..0a32793a 100644 --- a/src/lib/PostgresMetaMaterializedViews.ts +++ b/src/lib/PostgresMetaMaterializedViews.ts @@ -1,7 +1,7 @@ -import { literal } from 'pg-format' -import { coalesceRowsToArray, filterByList } from './helpers.js' -import { columnsSql, materializedViewsSql } from './sql/index.js' +import { filterByList, coalesceRowsToArray, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresMaterializedView } from './types.js' +import { MATERIALIZED_VIEWS_SQL } from './sql/materialized_views.sql.js' +import { COLUMNS_SQL } from './sql/columns.sql.js' export default class PostgresMetaMaterializedViews { query: (sql: string) => Promise> @@ -10,20 +10,6 @@ export default class PostgresMetaMaterializedViews { this.query = query } - async list(options: { - includedSchemas?: string[] - excludedSchemas?: string[] - limit?: number - offset?: number - includeColumns: true - }): Promise> - async list(options?: { - includedSchemas?: string[] - excludedSchemas?: string[] - limit?: number - offset?: number - includeColumns?: boolean - }): Promise> async list({ includedSchemas, excludedSchemas, @@ -37,17 +23,8 @@ export default class PostgresMetaMaterializedViews { offset?: number includeColumns?: boolean } = {}): Promise> { - let sql = generateEnrichedMaterializedViewsSql({ includeColumns }) - const filter = filterByList(includedSchemas, excludedSchemas, undefined) - if (filter) { - sql += ` where schema ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const schemaFilter = filterByList(includedSchemas, excludedSchemas, undefined) + let sql = generateEnrichedMaterializedViewsSql({ includeColumns, schemaFilter, limit, offset }) return await this.query(sql) } @@ -69,9 +46,11 @@ export default class PostgresMetaMaterializedViews { schema?: string }): Promise> { if (id) { - const sql = `${generateEnrichedMaterializedViewsSql({ + const idsFilter = filterByValue([id]) + const sql = generateEnrichedMaterializedViewsSql({ includeColumns: true, - })} where materialized_views.id = ${literal(id)};` + idsFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -81,11 +60,11 @@ export default class PostgresMetaMaterializedViews { return { data: data[0], error } } } else if (name) { - const sql = `${generateEnrichedMaterializedViewsSql({ + const materializedViewIdentifierFilter = filterByValue([`${schema}.${name}`]) + const sql = generateEnrichedMaterializedViewsSql({ includeColumns: true, - })} where materialized_views.name = ${literal( - name - )} and materialized_views.schema = ${literal(schema)};` + materializedViewIdentifierFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -103,9 +82,23 @@ export default class PostgresMetaMaterializedViews { } } -const generateEnrichedMaterializedViewsSql = ({ includeColumns }: { includeColumns: boolean }) => ` -with materialized_views as (${materializedViewsSql}) - ${includeColumns ? `, columns as (${columnsSql})` : ''} +const generateEnrichedMaterializedViewsSql = ({ + includeColumns, + schemaFilter, + materializedViewIdentifierFilter, + idsFilter, + limit, + offset, +}: { + includeColumns: boolean + schemaFilter?: string + materializedViewIdentifierFilter?: string + idsFilter?: string + limit?: number + offset?: number +}) => ` +with materialized_views as (${MATERIALIZED_VIEWS_SQL({ schemaFilter, limit, offset, materializedViewIdentifierFilter, idsFilter })}) + ${includeColumns ? `, columns as (${COLUMNS_SQL({ schemaFilter, limit, offset, tableIdentifierFilter: materializedViewIdentifierFilter, tableIdFilter: idsFilter })})` : ''} select * ${ diff --git a/src/lib/PostgresMetaPolicies.ts b/src/lib/PostgresMetaPolicies.ts index fa476c12..72d3157b 100644 --- a/src/lib/PostgresMetaPolicies.ts +++ b/src/lib/PostgresMetaPolicies.ts @@ -1,8 +1,8 @@ -import { ident, literal } from 'pg-format' +import { ident } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { filterByList } from './helpers.js' -import { policiesSql } from './sql/index.js' +import { filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresPolicy } from './types.js' +import { POLICIES_SQL } from './sql/policies.sql.js' export default class PostgresMetaPolicies { query: (sql: string) => Promise> @@ -24,21 +24,12 @@ export default class PostgresMetaPolicies { limit?: number offset?: number } = {}): Promise> { - let sql = policiesSql - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` WHERE n.nspname ${filter}` - } - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + let sql = POLICIES_SQL({ schemaFilter, limit, offset }) return await this.query(sql) } @@ -63,8 +54,10 @@ export default class PostgresMetaPolicies { table?: string schema?: string }): Promise> { + const schemaFilter = schema ? filterByList([schema], []) : undefined if (id) { - const sql = `${policiesSql} WHERE pol.oid = ${literal(id)};` + const idsFilter = filterByValue([`${id}`]) + const sql = POLICIES_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -74,9 +67,8 @@ export default class PostgresMetaPolicies { return { data: data[0], error } } } else if (name && table) { - const sql = `${policiesSql} WHERE pol.polname = ${literal(name)} AND n.nspname = ${literal( - schema - )} AND c.relname = ${literal(table)};` + const functionNameIdentifierFilter = filterByValue([`${table}.${name}`]) + const sql = POLICIES_SQL({ schemaFilter, functionNameIdentifierFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } diff --git a/src/lib/PostgresMetaPublications.ts b/src/lib/PostgresMetaPublications.ts index 63c1bafe..f3fdc549 100644 --- a/src/lib/PostgresMetaPublications.ts +++ b/src/lib/PostgresMetaPublications.ts @@ -1,6 +1,7 @@ import { ident, literal } from 'pg-format' -import { publicationsSql } from './sql/index.js' -import { PostgresMetaResult, PostgresPublication, PostgresTable } from './types.js' +import { PostgresMetaResult, PostgresPublication } from './types.js' +import { PUBLICATIONS_SQL } from './sql/publications.sql.js' +import { filterByValue } from './helpers.js' export default class PostgresMetaPublications { query: (sql: string) => Promise> @@ -16,13 +17,7 @@ export default class PostgresMetaPublications { limit?: number offset?: number }): Promise> { - let sql = publicationsSql - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + let sql = PUBLICATIONS_SQL({ limit, offset }) return await this.query(sql) } @@ -36,7 +31,8 @@ export default class PostgresMetaPublications { name?: string }): Promise> { if (id) { - const sql = `${publicationsSql} WHERE p.oid = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = PUBLICATIONS_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -46,7 +42,8 @@ export default class PostgresMetaPublications { return { data: data[0], error } } } else if (name) { - const sql = `${publicationsSql} WHERE p.pubname = ${literal(name)};` + const nameFilter = filterByValue([name]) + const sql = PUBLICATIONS_SQL({ nameFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -223,7 +220,7 @@ begin create temp table pg_meta_publication_tmp (name) on commit drop as values (coalesce(new_name, old.pubname)); end $$; -with publications as (${publicationsSql}) select * from publications where name = (select name from pg_meta_publication_tmp); +with publications as (${PUBLICATIONS_SQL({})}) select * from publications where name = (select name from pg_meta_publication_tmp); ` const { data, error } = await this.query(sql) if (error) { diff --git a/src/lib/PostgresMetaRelationships.ts b/src/lib/PostgresMetaRelationships.ts index 059762c3..e4e47d60 100644 --- a/src/lib/PostgresMetaRelationships.ts +++ b/src/lib/PostgresMetaRelationships.ts @@ -1,23 +1,37 @@ -import { literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { tableRelationshipsSql, viewsKeyDependenciesSql } from './sql/index.js' -import { PostgresMetaResult, PostgresRelationship } from './types.js' +import { filterByList } from './helpers.js' +import type { PostgresMetaResult, PostgresRelationship } from './types.js' +import { TABLE_RELATIONSHIPS_SQL } from './sql/table_relationships.sql.js' +import { VIEWS_KEY_DEPENDENCIES_SQL } from './sql/views_key_dependencies.sql.js' /* * Only used for generating types at the moment. Will need some cleanups before * using it for other things, e.g. /relationships endpoint. */ export default class PostgresMetaRelationships { - query: (sql: string) => Promise> + query: (sql: string) => Promise> - constructor(query: (sql: string) => Promise>) { + constructor(query: (sql: string) => Promise>) { this.query = query } - async list(): Promise> { + async list({ + includeSystemSchemas = false, + includedSchemas, + excludedSchemas, + }: { + includeSystemSchemas?: boolean + includedSchemas?: string[] + excludedSchemas?: string[] + } = {}): Promise> { + const schemaFilter = filterByList( + includedSchemas, + excludedSchemas, + !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined + ) let allTableM2oAndO2oRelationships: PostgresRelationship[] { - let sql = tableRelationshipsSql + const sql = TABLE_RELATIONSHIPS_SQL({ schemaFilter }) const { data, error } = (await this.query(sql)) as PostgresMetaResult if (error) { return { data: null, error } @@ -45,8 +59,9 @@ export default class PostgresMetaRelationships { column_dependencies: ColDep[] } + const viewsKeyDependenciesSql = VIEWS_KEY_DEPENDENCIES_SQL({ schemaFilter }) const { data: viewsKeyDependencies, error } = (await this.query( - allViewsKeyDependenciesSql + viewsKeyDependenciesSql )) as PostgresMetaResult if (error) { return { data: null, error } @@ -62,8 +77,8 @@ export default class PostgresMetaRelationships { return allEntries.reduce( (results, entries) => results - .map((result) => entries.map((entry) => [...result, entry])) - .reduce((subResults, result) => [...subResults, ...result], []), + .map((result) => entries.map((entry) => result.concat(entry))) + .reduce((subResults, result) => subResults.concat(result), []), [[]] ) } @@ -147,8 +162,3 @@ export default class PostgresMetaRelationships { } } } - -const allViewsKeyDependenciesSql = viewsKeyDependenciesSql.replaceAll( - '__EXCLUDED_SCHEMAS', - literal(DEFAULT_SYSTEM_SCHEMAS) -) diff --git a/src/lib/PostgresMetaRoles.ts b/src/lib/PostgresMetaRoles.ts index f55fb4a9..537b0622 100644 --- a/src/lib/PostgresMetaRoles.ts +++ b/src/lib/PostgresMetaRoles.ts @@ -1,11 +1,12 @@ import { ident, literal } from 'pg-format' -import { rolesSql } from './sql/index.js' +import { ROLES_SQL } from './sql/roles.sql.js' import { PostgresMetaResult, PostgresRole, PostgresRoleCreate, PostgresRoleUpdate, } from './types.js' +import { filterByValue } from './helpers.js' export function changeRoleConfig2Object(config: string[]) { if (!config) { return null @@ -32,32 +33,7 @@ export default class PostgresMetaRoles { limit?: number offset?: number } = {}): Promise> { - let sql = ` -WITH - roles AS (${rolesSql}) -SELECT - * -FROM - roles -WHERE - true` - if (!includeDefaultRoles) { - // All default/predefined roles start with pg_: https://www.postgresql.org/docs/15/predefined-roles.html - // The pg_ prefix is also reserved: - // - // ``` - // postgres=# create role pg_mytmp; - // ERROR: role name "pg_mytmp" is reserved - // DETAIL: Role names starting with "pg_" are reserved. - // ``` - sql += ` AND NOT pg_catalog.starts_with(name, 'pg_')` - } - if (limit) { - sql += ` LIMIT ${limit}` - } - if (offset) { - sql += ` OFFSET ${offset}` - } + const sql = ROLES_SQL({ limit, offset, includeDefaultRoles }) const result = await this.query(sql) if (result.data) { result.data = result.data.map((role: any) => { @@ -78,7 +54,8 @@ WHERE name?: string }): Promise> { if (id) { - const sql = `${rolesSql} WHERE oid = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = ROLES_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { @@ -90,7 +67,8 @@ WHERE return { data: data[0], error } } } else if (name) { - const sql = `${rolesSql} WHERE rolname = ${literal(name)};` + const nameFilter = filterByValue([name]) + const sql = ROLES_SQL({ nameFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } diff --git a/src/lib/PostgresMetaSchemas.ts b/src/lib/PostgresMetaSchemas.ts index b84a64cc..aa17bcfd 100644 --- a/src/lib/PostgresMetaSchemas.ts +++ b/src/lib/PostgresMetaSchemas.ts @@ -1,12 +1,13 @@ -import { ident, literal } from 'pg-format' -import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { schemasSql } from './sql/index.js' +import { ident } from 'pg-format' +import { SCHEMAS_SQL } from './sql/schemas.sql.js' import { PostgresMetaResult, PostgresSchema, PostgresSchemaCreate, PostgresSchemaUpdate, } from './types.js' +import { filterByList, filterByValue } from './helpers.js' +import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' export default class PostgresMetaSchemas { query: (sql: string) => Promise> @@ -16,24 +17,24 @@ export default class PostgresMetaSchemas { } async list({ + includedSchemas, + excludedSchemas, includeSystemSchemas = false, limit, offset, }: { + includedSchemas?: string[] + excludedSchemas?: string[] includeSystemSchemas?: boolean limit?: number offset?: number } = {}): Promise> { - let sql = schemasSql - if (!includeSystemSchemas) { - sql = `${sql} AND NOT (n.nspname IN (${DEFAULT_SYSTEM_SCHEMAS.map(literal).join(',')}))` - } - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + const schemaFilter = filterByList( + includedSchemas, + excludedSchemas, + !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined + ) + const sql = SCHEMAS_SQL({ limit, offset, includeSystemSchemas, nameFilter: schemaFilter }) return await this.query(sql) } @@ -47,7 +48,8 @@ export default class PostgresMetaSchemas { name?: string }): Promise> { if (id) { - const sql = `${schemasSql} AND n.oid = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = SCHEMAS_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -57,7 +59,8 @@ export default class PostgresMetaSchemas { return { data: data[0], error } } } else if (name) { - const sql = `${schemasSql} AND n.nspname = ${literal(name)};` + const nameFilter = filterByValue([name]) + const sql = SCHEMAS_SQL({ nameFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } diff --git a/src/lib/PostgresMetaTablePrivileges.ts b/src/lib/PostgresMetaTablePrivileges.ts index 9edb32e9..e0e79a05 100644 --- a/src/lib/PostgresMetaTablePrivileges.ts +++ b/src/lib/PostgresMetaTablePrivileges.ts @@ -1,13 +1,13 @@ -import { ident, literal } from 'pg-format' +import { ident } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { filterByList } from './helpers.js' -import { tablePrivilegesSql } from './sql/index.js' +import { filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresTablePrivileges, PostgresTablePrivilegesGrant, PostgresTablePrivilegesRevoke, } from './types.js' +import { TABLE_PRIVILEGES_SQL } from './sql/table_privileges.sql.js' export default class PostgresMetaTablePrivileges { query: (sql: string) => Promise> @@ -29,25 +29,12 @@ export default class PostgresMetaTablePrivileges { limit?: number offset?: number } = {}): Promise> { - let sql = ` -with table_privileges as (${tablePrivilegesSql}) -select * -from table_privileges -` - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` where schema ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const sql = TABLE_PRIVILEGES_SQL({ schemaFilter, limit, offset }) return await this.query(sql) } @@ -69,11 +56,8 @@ from table_privileges schema?: string }): Promise> { if (id) { - const sql = ` -with table_privileges as (${tablePrivilegesSql}) -select * -from table_privileges -where table_privileges.relation_id = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = TABLE_PRIVILEGES_SQL({ idsFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -83,13 +67,8 @@ where table_privileges.relation_id = ${literal(id)};` return { data: data[0], error } } } else if (name) { - const sql = ` -with table_privileges as (${tablePrivilegesSql}) -select * -from table_privileges -where table_privileges.schema = ${literal(schema)} - and table_privileges.name = ${literal(name)} -` + const nameIdentifierFilter = filterByValue([`${schema}.${name}`]) + const sql = TABLE_PRIVILEGES_SQL({ nameIdentifierFilter }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -129,12 +108,7 @@ end $$; // Return the updated table privileges for modified relations. const relationIds = [...new Set(grants.map(({ relation_id }) => relation_id))] - sql = ` -with table_privileges as (${tablePrivilegesSql}) -select * -from table_privileges -where relation_id in (${relationIds.map(literal).join(',')}) -` + sql = TABLE_PRIVILEGES_SQL({ idsFilter: filterByList(relationIds) }) return await this.query(sql) } @@ -159,12 +133,7 @@ end $$; // Return the updated table privileges for modified relations. const relationIds = [...new Set(revokes.map(({ relation_id }) => relation_id))] - sql = ` -with table_privileges as (${tablePrivilegesSql}) -select * -from table_privileges -where relation_id in (${relationIds.map(literal).join(',')}) -` + sql = TABLE_PRIVILEGES_SQL({ idsFilter: filterByList(relationIds) }) return await this.query(sql) } } diff --git a/src/lib/PostgresMetaTables.ts b/src/lib/PostgresMetaTables.ts index 5b97c253..8d3d9a47 100644 --- a/src/lib/PostgresMetaTables.ts +++ b/src/lib/PostgresMetaTables.ts @@ -1,13 +1,14 @@ import { ident, literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { coalesceRowsToArray, filterByList } from './helpers.js' -import { columnsSql, tablesSql } from './sql/index.js' +import { coalesceRowsToArray, filterByValue, filterByList } from './helpers.js' import { PostgresMetaResult, PostgresTable, PostgresTableCreate, PostgresTableUpdate, } from './types.js' +import { TABLES_SQL } from './sql/table.sql.js' +import { COLUMNS_SQL } from './sql/columns.sql.js' export default class PostgresMetaTables { query: (sql: string) => Promise> @@ -47,21 +48,12 @@ export default class PostgresMetaTables { offset?: number includeColumns?: boolean } = {}): Promise> { - let sql = generateEnrichedTablesSql({ includeColumns }) - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` where schema ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const sql = generateEnrichedTablesSql({ includeColumns, schemaFilter, limit, offset }) return await this.query(sql) } @@ -82,10 +74,14 @@ export default class PostgresMetaTables { name?: string schema?: string }): Promise> { + const schemaFilter = schema ? filterByList([schema], []) : undefined if (id) { - const sql = `${generateEnrichedTablesSql({ + const idsFilter = filterByValue([id]) + const sql = generateEnrichedTablesSql({ + schemaFilter, includeColumns: true, - })} where tables.id = ${literal(id)};` + idsFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -95,9 +91,12 @@ export default class PostgresMetaTables { return { data: data[0], error } } } else if (name) { - const sql = `${generateEnrichedTablesSql({ + const tableIdentifierFilter = filterByValue([`${schema}.${name}`]) + const sql = generateEnrichedTablesSql({ + schemaFilter, includeColumns: true, - })} where tables.name = ${literal(name)} and tables.schema = ${literal(schema)};` + tableIdentifierFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -247,9 +246,23 @@ COMMIT;` } } -const generateEnrichedTablesSql = ({ includeColumns }: { includeColumns: boolean }) => ` -with tables as (${tablesSql}) - ${includeColumns ? `, columns as (${columnsSql})` : ''} +const generateEnrichedTablesSql = ({ + includeColumns, + schemaFilter, + tableIdentifierFilter, + idsFilter, + limit, + offset, +}: { + includeColumns: boolean + schemaFilter?: string + tableIdentifierFilter?: string + idsFilter?: string + limit?: number + offset?: number +}) => ` +with tables as (${TABLES_SQL({ schemaFilter, tableIdentifierFilter, idsFilter, limit, offset })}) + ${includeColumns ? `, columns as (${COLUMNS_SQL({ schemaFilter, tableIdFilter: idsFilter, tableIdentifierFilter: tableIdentifierFilter })})` : ''} select * ${includeColumns ? `, ${coalesceRowsToArray('columns', 'columns.table_id = tables.id')}` : ''} diff --git a/src/lib/PostgresMetaTriggers.ts b/src/lib/PostgresMetaTriggers.ts index 5ce05f76..f7dfbc95 100644 --- a/src/lib/PostgresMetaTriggers.ts +++ b/src/lib/PostgresMetaTriggers.ts @@ -1,8 +1,8 @@ import { ident, literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { filterByList } from './helpers.js' -import { triggersSql } from './sql/index.js' +import { filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresTrigger } from './types.js' +import { TRIGGERS_SQL } from './sql/triggers.sql.js' export default class PostgresMetaTriggers { query: (sql: string) => Promise> @@ -24,21 +24,12 @@ export default class PostgresMetaTriggers { limit?: number offset?: number } = {}): Promise> { - let sql = enrichedTriggersSql - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` WHERE schema ${filter}` - } - if (limit) { - sql = `${sql} LIMIT ${limit}` - } - if (offset) { - sql = `${sql} OFFSET ${offset}` - } + let sql = TRIGGERS_SQL({ schemaFilter, limit, offset }) return await this.query(sql) } @@ -63,8 +54,10 @@ export default class PostgresMetaTriggers { schema?: string table?: string }): Promise> { + const schemaFilter = schema ? filterByList([schema], []) : undefined if (id) { - const sql = `${enrichedTriggersSql} WHERE id = ${literal(id)};` + const idsFilter = filterByValue([id]) + const sql = TRIGGERS_SQL({ idsFilter }) const { data, error } = await this.query(sql) @@ -82,9 +75,9 @@ export default class PostgresMetaTriggers { } if (name && schema && table) { - const sql = `${enrichedTriggersSql} WHERE name = ${literal(name)} AND schema = ${literal( - schema - )} AND triggers.table = ${literal(table)};` + const nameFilter = filterByValue([name]) + const tableNameFilter = filterByValue([table]) + const sql = TRIGGERS_SQL({ schemaFilter, nameFilter, tableNameFilter }) const { data, error } = await this.query(sql) @@ -168,7 +161,6 @@ export default class PostgresMetaTriggers { if (error) { return { data: null, error } } - return await this.retrieve({ name, table, @@ -254,12 +246,3 @@ export default class PostgresMetaTriggers { return { data: triggerRecord!, error: null } } } - -const enrichedTriggersSql = ` - WITH triggers AS ( - ${triggersSql} - ) - SELECT - * - FROM triggers -` diff --git a/src/lib/PostgresMetaTypes.ts b/src/lib/PostgresMetaTypes.ts index 35371d55..990c94e3 100644 --- a/src/lib/PostgresMetaTypes.ts +++ b/src/lib/PostgresMetaTypes.ts @@ -1,7 +1,7 @@ import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' import { filterByList } from './helpers.js' -import { typesSql } from './sql/index.js' import { PostgresMetaResult, PostgresType } from './types.js' +import { TYPES_SQL } from './sql/types.sql.js' export default class PostgresMetaTypes { query: (sql: string) => Promise> @@ -27,44 +27,12 @@ export default class PostgresMetaTypes { limit?: number offset?: number } = {}): Promise> { - let sql = `${typesSql} - where - ( - t.typrelid = 0 - or ( - select - c.relkind ${includeTableTypes ? `in ('c', 'r')` : `= 'c'`} - from - pg_class c - where - c.oid = t.typrelid - ) - ) - ` - if (!includeArrayTypes) { - sql += ` and not exists ( - select - from - pg_type el - where - el.oid = t.typelem - and el.typarray = t.oid - )` - } - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` and n.nspname ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const sql = TYPES_SQL({ schemaFilter, limit, offset, includeTableTypes, includeArrayTypes }) return await this.query(sql) } } diff --git a/src/lib/PostgresMetaVersion.ts b/src/lib/PostgresMetaVersion.ts index 38e0299f..5ea23f37 100644 --- a/src/lib/PostgresMetaVersion.ts +++ b/src/lib/PostgresMetaVersion.ts @@ -1,4 +1,4 @@ -import { versionSql } from './sql/index.js' +import { VERSION_SQL } from './sql/version.sql.js' import { PostgresMetaResult, PostgresVersion } from './types.js' export default class PostgresMetaVersion { @@ -9,7 +9,7 @@ export default class PostgresMetaVersion { } async retrieve(): Promise> { - const { data, error } = await this.query(versionSql) + const { data, error } = await this.query(VERSION_SQL()) if (error) { return { data, error } } diff --git a/src/lib/PostgresMetaViews.ts b/src/lib/PostgresMetaViews.ts index 0f6ad09c..a9e7b0ce 100644 --- a/src/lib/PostgresMetaViews.ts +++ b/src/lib/PostgresMetaViews.ts @@ -1,8 +1,8 @@ -import { literal } from 'pg-format' import { DEFAULT_SYSTEM_SCHEMAS } from './constants.js' -import { coalesceRowsToArray, filterByList } from './helpers.js' -import { columnsSql, viewsSql } from './sql/index.js' +import { coalesceRowsToArray, filterByList, filterByValue } from './helpers.js' import { PostgresMetaResult, PostgresView } from './types.js' +import { VIEWS_SQL } from './sql/views.sql.js' +import { COLUMNS_SQL } from './sql/columns.sql.js' export default class PostgresMetaViews { query: (sql: string) => Promise> @@ -11,22 +11,6 @@ export default class PostgresMetaViews { this.query = query } - async list(options: { - includeSystemSchemas?: boolean - includedSchemas?: string[] - excludedSchemas?: string[] - limit?: number - offset?: number - includeColumns: false - }): Promise> - async list(options?: { - includeSystemSchemas?: boolean - includedSchemas?: string[] - excludedSchemas?: string[] - limit?: number - offset?: number - includeColumns?: boolean - }): Promise> async list({ includeSystemSchemas = false, includedSchemas, @@ -42,21 +26,12 @@ export default class PostgresMetaViews { offset?: number includeColumns?: boolean } = {}): Promise> { - let sql = generateEnrichedViewsSql({ includeColumns }) - const filter = filterByList( + const schemaFilter = filterByList( includedSchemas, excludedSchemas, !includeSystemSchemas ? DEFAULT_SYSTEM_SCHEMAS : undefined ) - if (filter) { - sql += ` where schema ${filter}` - } - if (limit) { - sql += ` limit ${limit}` - } - if (offset) { - sql += ` offset ${offset}` - } + const sql = generateEnrichedViewsSql({ includeColumns, schemaFilter, limit, offset }) return await this.query(sql) } @@ -78,9 +53,11 @@ export default class PostgresMetaViews { schema?: string }): Promise> { if (id) { - const sql = `${generateEnrichedViewsSql({ + const idsFilter = filterByValue([id]) + const sql = generateEnrichedViewsSql({ includeColumns: true, - })} where views.id = ${literal(id)};` + idsFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -90,9 +67,11 @@ export default class PostgresMetaViews { return { data: data[0], error } } } else if (name) { - const sql = `${generateEnrichedViewsSql({ + const viewIdentifierFilter = filterByValue([`${schema}.${name}`]) + const sql = generateEnrichedViewsSql({ includeColumns: true, - })} where views.name = ${literal(name)} and views.schema = ${literal(schema)};` + viewIdentifierFilter, + }) const { data, error } = await this.query(sql) if (error) { return { data, error } @@ -110,9 +89,23 @@ export default class PostgresMetaViews { } } -const generateEnrichedViewsSql = ({ includeColumns }: { includeColumns: boolean }) => ` -with views as (${viewsSql}) - ${includeColumns ? `, columns as (${columnsSql})` : ''} +const generateEnrichedViewsSql = ({ + includeColumns, + schemaFilter, + idsFilter, + viewIdentifierFilter, + limit, + offset, +}: { + includeColumns: boolean + schemaFilter?: string + idsFilter?: string + viewIdentifierFilter?: string + limit?: number + offset?: number +}) => ` +with views as (${VIEWS_SQL({ schemaFilter, limit, offset, viewIdentifierFilter, idsFilter })}) + ${includeColumns ? `, columns as (${COLUMNS_SQL({ schemaFilter, tableIdentifierFilter: viewIdentifierFilter, tableIdFilter: idsFilter })})` : ''} select * ${includeColumns ? `, ${coalesceRowsToArray('columns', 'columns.table_id = views.id')}` : ''} diff --git a/src/lib/generators.ts b/src/lib/generators.ts index c916a44c..6b5f55e5 100644 --- a/src/lib/generators.ts +++ b/src/lib/generators.ts @@ -34,14 +34,18 @@ export async function getGeneratorMetadata( const includedSchemas = filters.includedSchemas ?? [] const excludedSchemas = filters.excludedSchemas ?? [] - const { data: schemas, error: schemasError } = await pgMeta.schemas.list() + const { data: schemas, error: schemasError } = await pgMeta.schemas.list({ + includeSystemSchemas: false, + includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, + }) if (schemasError) { return { data: null, error: schemasError } } const { data: tables, error: tablesError } = await pgMeta.tables.list({ includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, - excludedSchemas, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, includeColumns: false, }) if (tablesError) { @@ -50,7 +54,7 @@ export async function getGeneratorMetadata( const { data: foreignTables, error: foreignTablesError } = await pgMeta.foreignTables.list({ includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, - excludedSchemas, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, includeColumns: false, }) if (foreignTablesError) { @@ -59,7 +63,7 @@ export async function getGeneratorMetadata( const { data: views, error: viewsError } = await pgMeta.views.list({ includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, - excludedSchemas, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, includeColumns: false, }) if (viewsError) { @@ -69,7 +73,7 @@ export async function getGeneratorMetadata( const { data: materializedViews, error: materializedViewsError } = await pgMeta.materializedViews.list({ includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, - excludedSchemas, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, includeColumns: false, }) if (materializedViewsError) { @@ -78,20 +82,26 @@ export async function getGeneratorMetadata( const { data: columns, error: columnsError } = await pgMeta.columns.list({ includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, - excludedSchemas, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, + includeSystemSchemas: false, }) if (columnsError) { return { data: null, error: columnsError } } - const { data: relationships, error: relationshipsError } = await pgMeta.relationships.list() + const { data: relationships, error: relationshipsError } = await pgMeta.relationships.list({ + includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, + includeSystemSchemas: false, + }) if (relationshipsError) { return { data: null, error: relationshipsError } } const { data: functions, error: functionsError } = await pgMeta.functions.list({ includedSchemas: includedSchemas.length > 0 ? includedSchemas : undefined, - excludedSchemas, + excludedSchemas: excludedSchemas.length > 0 ? excludedSchemas : undefined, + includeSystemSchemas: false, }) if (functionsError) { return { data: null, error: functionsError } diff --git a/src/lib/helpers.ts b/src/lib/helpers.ts index 7145bb40..4fca3124 100644 --- a/src/lib/helpers.ts +++ b/src/lib/helpers.ts @@ -13,7 +13,11 @@ COALESCE( ) AS ${source}` } -export const filterByList = (include?: string[], exclude?: string[], defaultExclude?: string[]) => { +export const filterByList = ( + include?: (string | number)[], + exclude?: (string | number)[], + defaultExclude?: (string | number)[] +) => { if (defaultExclude) { exclude = defaultExclude.concat(exclude ?? []) } @@ -24,3 +28,10 @@ export const filterByList = (include?: string[], exclude?: string[], defaultExcl } return '' } + +export const filterByValue = (ids?: (string | number)[]) => { + if (ids?.length) { + return `IN (${ids.map(literal).join(',')})` + } + return '' +} diff --git a/src/lib/sql/column_privileges.sql b/src/lib/sql/column_privileges.sql.ts similarity index 88% rename from src/lib/sql/column_privileges.sql rename to src/lib/sql/column_privileges.sql.ts index 8540c583..f60101dc 100644 --- a/src/lib/sql/column_privileges.sql +++ b/src/lib/sql/column_privileges.sql.ts @@ -1,3 +1,10 @@ +import type { SQLQueryPropsWithSchemaFilter } from './common.js' + +export const COLUMN_PRIVILEGES_SQL = ( + props: SQLQueryPropsWithSchemaFilter & { + columnIdsFilter?: string + } +) => /* SQL */ ` -- Lists each column's privileges in the form of: -- -- [ @@ -28,8 +35,8 @@ -- - we include column privileges for materialized views -- (reason for exclusion in information_schema.column_privileges: -- https://www.postgresql.org/message-id/9136.1502740844%40sss.pgh.pa.us) --- - we query a.attrelid and a.attnum to generate `column_id` --- - `table_catalog` is omitted +-- - we query a.attrelid and a.attnum to generate \`column_id\` +-- - \`table_catalog\` is omitted -- - table_schema -> relation_schema, table_name -> relation_name -- -- Column privileges are intertwined with table privileges in that table @@ -37,12 +44,12 @@ -- -- grant all on mytable to myrole; -- --- Then `myrole` is granted privileges for ALL columns. Likewise, if we do: +-- Then \`myrole\` is granted privileges for ALL columns. Likewise, if we do: -- -- grant all (id) on mytable to myrole; -- revoke all on mytable from myrole; -- --- Then the grant on the `id` column is revoked. +-- Then the grant on the \`id\` column is revoked. -- -- This is unlike how grants for schemas and tables interact, where you need -- privileges for BOTH the schema the table is in AND the table itself in order @@ -130,6 +137,8 @@ from union all select (0)::oid as oid, 'PUBLIC') grantee(oid, rolname) where ((x.relnamespace = nc.oid) + ${props.schemaFilter ? `and nc.nspname ${props.schemaFilter}` : ''} + ${props.columnIdsFilter ? `and (x.attrelid || '.' || x.attnum) ${props.columnIdsFilter}` : ''} and (x.grantee = grantee.oid) and (x.grantor = u_grantor.oid) and (x.prtype = any (ARRAY['INSERT', @@ -143,3 +152,6 @@ group by column_id, nc.nspname, x.relname, x.attname +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/columns.sql b/src/lib/sql/columns.sql.ts similarity index 80% rename from src/lib/sql/columns.sql rename to src/lib/sql/columns.sql.ts index ad01e22a..d19c968c 100644 --- a/src/lib/sql/columns.sql +++ b/src/lib/sql/columns.sql.ts @@ -1,3 +1,13 @@ +import type { SQLQueryPropsWithSchemaFilter } from './common.js' + +export const COLUMNS_SQL = ( + props: SQLQueryPropsWithSchemaFilter & { + tableIdFilter?: string + tableIdentifierFilter?: string + columnNameFilter?: string + idsFilter?: string + } +) => /* SQL */ ` -- Adapted from information_schema.columns SELECT @@ -97,6 +107,11 @@ FROM ORDER BY table_id, ordinal_position, oid asc ) AS check_constraints ON check_constraints.table_id = c.oid AND check_constraints.ordinal_position = a.attnum WHERE + ${props.schemaFilter ? `nc.nspname ${props.schemaFilter} AND` : ''} + ${props.idsFilter ? `(c.oid || '.' || a.attnum) ${props.idsFilter} AND` : ''} + ${props.columnNameFilter ? `(c.relname || '.' || a.attname) ${props.columnNameFilter} AND` : ''} + ${props.tableIdFilter ? `c.oid ${props.tableIdFilter} AND` : ''} + ${props.tableIdentifierFilter ? `nc.nspname || '.' || c.relname ${props.tableIdentifierFilter} AND` : ''} NOT pg_is_other_temp_schema(nc.oid) AND a.attnum > 0 AND NOT a.attisdropped @@ -109,3 +124,6 @@ WHERE 'SELECT, INSERT, UPDATE, REFERENCES' ) ) +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/common.ts b/src/lib/sql/common.ts new file mode 100644 index 00000000..b9c37ec9 --- /dev/null +++ b/src/lib/sql/common.ts @@ -0,0 +1,17 @@ +export type SQLQueryProps = { + limit?: number + offset?: number +} + +export type SQLQueryPropsWithSchemaFilter = SQLQueryProps & { + schemaFilter?: string +} + +export type SQLQueryPropsWithIdsFilter = SQLQueryProps & { + idsFilter?: string +} + +export type SQLQueryPropsWithSchemaFilterAndIdsFilter = SQLQueryProps & { + schemaFilter?: string + idsFilter?: string +} diff --git a/src/lib/sql/config.sql b/src/lib/sql/config.sql.ts similarity index 57% rename from src/lib/sql/config.sql rename to src/lib/sql/config.sql.ts index 553e4426..f33305d5 100644 --- a/src/lib/sql/config.sql +++ b/src/lib/sql/config.sql.ts @@ -1,3 +1,6 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const CONFIG_SQL = (props: SQLQueryPropsWithSchemaFilterAndIdsFilter) => /* SQL */ ` SELECT name, setting, @@ -23,3 +26,6 @@ FROM ORDER BY category, name +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/extensions.sql b/src/lib/sql/extensions.sql deleted file mode 100644 index 9a8700f8..00000000 --- a/src/lib/sql/extensions.sql +++ /dev/null @@ -1,10 +0,0 @@ -SELECT - e.name, - n.nspname AS schema, - e.default_version, - x.extversion AS installed_version, - e.comment -FROM - pg_available_extensions() e(name, default_version, comment) - LEFT JOIN pg_extension x ON e.name = x.extname - LEFT JOIN pg_namespace n ON x.extnamespace = n.oid diff --git a/src/lib/sql/extensions.sql.ts b/src/lib/sql/extensions.sql.ts new file mode 100644 index 00000000..fe65b0c2 --- /dev/null +++ b/src/lib/sql/extensions.sql.ts @@ -0,0 +1,19 @@ +import type { SQLQueryProps } from './common.js' + +export const EXTENSIONS_SQL = (props: SQLQueryProps & { nameFilter?: string }) => /* SQL */ ` +SELECT + e.name, + n.nspname AS schema, + e.default_version, + x.extversion AS installed_version, + e.comment +FROM + pg_available_extensions() e(name, default_version, comment) + LEFT JOIN pg_extension x ON e.name = x.extname + LEFT JOIN pg_namespace n ON x.extnamespace = n.oid +WHERE + true + ${props.nameFilter ? `AND e.name ${props.nameFilter}` : ''} +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/foreign_tables.sql b/src/lib/sql/foreign_tables.sql deleted file mode 100644 index e3e5e14f..00000000 --- a/src/lib/sql/foreign_tables.sql +++ /dev/null @@ -1,10 +0,0 @@ -SELECT - c.oid :: int8 AS id, - n.nspname AS schema, - c.relname AS name, - obj_description(c.oid) AS comment -FROM - pg_class c - JOIN pg_namespace n ON n.oid = c.relnamespace -WHERE - c.relkind = 'f' diff --git a/src/lib/sql/foreign_tables.sql.ts b/src/lib/sql/foreign_tables.sql.ts new file mode 100644 index 00000000..00541f0f --- /dev/null +++ b/src/lib/sql/foreign_tables.sql.ts @@ -0,0 +1,25 @@ +import type { SQLQueryProps } from './common.js' + +export const FOREIGN_TABLES_SQL = ( + props: SQLQueryProps & { + schemaFilter?: string + idsFilter?: string + tableIdentifierFilter?: string + } +) => /* SQL */ ` +SELECT + c.oid :: int8 AS id, + n.nspname AS schema, + c.relname AS name, + obj_description(c.oid) AS comment +FROM + pg_class c + JOIN pg_namespace n ON n.oid = c.relnamespace +WHERE + ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} + ${props.idsFilter ? `c.oid ${props.idsFilter} AND` : ''} + ${props.tableIdentifierFilter ? `(n.nspname || '.' || c.relname) ${props.tableIdentifierFilter} AND` : ''} + c.relkind = 'f' +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/functions.sql b/src/lib/sql/functions.sql.ts similarity index 70% rename from src/lib/sql/functions.sql rename to src/lib/sql/functions.sql.ts index d2258402..92715b95 100644 --- a/src/lib/sql/functions.sql +++ b/src/lib/sql/functions.sql.ts @@ -1,9 +1,17 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const FUNCTIONS_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { + nameFilter?: string + args?: string[] + } +) => /* SQL */ ` -- CTE with sane arg_modes, arg_names, and arg_types. -- All three are always of the same length. -- All three include all args, including OUT and TABLE args. with functions as ( select - *, + p.*, -- proargmodes is null when all arg modes are IN coalesce( p.proargmodes, @@ -21,7 +29,40 @@ with functions as ( array_fill(true, array[pronargdefaults])) as arg_has_defaults from pg_proc as p + ${props.schemaFilter ? `join pg_namespace n on p.pronamespace = n.oid` : ''} where + ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} + ${props.idsFilter ? `p.oid ${props.idsFilter} AND` : ''} + ${props.nameFilter ? `p.proname ${props.nameFilter} AND` : ''} + ${ + props.args === undefined + ? '' + : props.args.length > 0 + ? `p.proargtypes::text = ${ + props.args.length + ? `( + SELECT STRING_AGG(type_oid::text, ' ') FROM ( + SELECT ( + split_args.arr[ + array_length( + split_args.arr, + 1 + ) + ]::regtype::oid + ) AS type_oid FROM ( + SELECT STRING_TO_ARRAY( + UNNEST( + ARRAY[${props.args}] + ), + ' ' + ) AS arr + ) AS split_args + ) args + )` + : "''" + } AND` + : '' + } p.prokind = 'f' ) select @@ -105,3 +146,6 @@ from group by t1.oid ) f_args on f_args.oid = f.oid +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/index.ts b/src/lib/sql/index.ts deleted file mode 100644 index 64be3aa8..00000000 --- a/src/lib/sql/index.ts +++ /dev/null @@ -1,34 +0,0 @@ -import { readFile } from 'node:fs/promises' -import { dirname, join } from 'node:path' -import { fileURLToPath } from 'node:url' - -const __dirname = dirname(fileURLToPath(import.meta.url)) -export const columnPrivilegesSql = await readFile(join(__dirname, 'column_privileges.sql'), 'utf-8') -export const columnsSql = await readFile(join(__dirname, 'columns.sql'), 'utf-8') -export const configSql = await readFile(join(__dirname, 'config.sql'), 'utf-8') -export const extensionsSql = await readFile(join(__dirname, 'extensions.sql'), 'utf-8') -export const foreignTablesSql = await readFile(join(__dirname, 'foreign_tables.sql'), 'utf-8') -export const functionsSql = await readFile(join(__dirname, 'functions.sql'), 'utf-8') -export const indexesSql = await readFile(join(__dirname, 'indexes.sql'), 'utf-8') -export const materializedViewsSql = await readFile( - join(__dirname, 'materialized_views.sql'), - 'utf-8' -) -export const policiesSql = await readFile(join(__dirname, 'policies.sql'), 'utf-8') -export const publicationsSql = await readFile(join(__dirname, 'publications.sql'), 'utf-8') -export const tableRelationshipsSql = await readFile( - join(__dirname, 'table_relationships.sql'), - 'utf-8' -) -export const rolesSql = await readFile(join(__dirname, 'roles.sql'), 'utf-8') -export const schemasSql = await readFile(join(__dirname, 'schemas.sql'), 'utf-8') -export const tablePrivilegesSql = await readFile(join(__dirname, 'table_privileges.sql'), 'utf-8') -export const tablesSql = await readFile(join(__dirname, 'tables.sql'), 'utf-8') -export const triggersSql = await readFile(join(__dirname, 'triggers.sql'), 'utf-8') -export const typesSql = await readFile(join(__dirname, 'types.sql'), 'utf-8') -export const versionSql = await readFile(join(__dirname, 'version.sql'), 'utf-8') -export const viewsKeyDependenciesSql = await readFile( - join(__dirname, 'views_key_dependencies.sql'), - 'utf-8' -) -export const viewsSql = await readFile(join(__dirname, 'views.sql'), 'utf-8') diff --git a/src/lib/sql/indexes.sql b/src/lib/sql/indexes.sql.ts similarity index 79% rename from src/lib/sql/indexes.sql rename to src/lib/sql/indexes.sql.ts index ff0c8f36..5f893a8f 100644 --- a/src/lib/sql/indexes.sql +++ b/src/lib/sql/indexes.sql.ts @@ -1,3 +1,6 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const INDEXES_SQL = (props: SQLQueryPropsWithSchemaFilterAndIdsFilter) => /* SQL */ ` SELECT idx.indexrelid::int8 AS id, idx.indrelid::int8 AS table_id, @@ -37,5 +40,11 @@ SELECT JOIN pg_am am ON c.relam = am.oid JOIN pg_attribute a ON a.attrelid = c.oid AND a.attnum = ANY(idx.indkey) JOIN pg_indexes ix ON c.relname = ix.indexname + WHERE + ${props.schemaFilter ? `n.nspname ${props.schemaFilter}` : 'true'} + ${props.idsFilter ? `AND idx.indexrelid ${props.idsFilter}` : ''} GROUP BY - idx.indexrelid, idx.indrelid, n.nspname, idx.indnatts, idx.indnkeyatts, idx.indisunique, idx.indisprimary, idx.indisexclusion, idx.indimmediate, idx.indisclustered, idx.indisvalid, idx.indcheckxmin, idx.indisready, idx.indislive, idx.indisreplident, idx.indkey, idx.indcollation, idx.indclass, idx.indoption, idx.indexprs, idx.indpred, ix.indexdef, am.amname \ No newline at end of file + idx.indexrelid, idx.indrelid, n.nspname, idx.indnatts, idx.indnkeyatts, idx.indisunique, idx.indisprimary, idx.indisexclusion, idx.indimmediate, idx.indisclustered, idx.indisvalid, idx.indcheckxmin, idx.indisready, idx.indislive, idx.indisreplident, idx.indkey, idx.indcollation, idx.indclass, idx.indoption, idx.indexprs, idx.indpred, ix.indexdef, am.amname +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/materialized_views.sql b/src/lib/sql/materialized_views.sql deleted file mode 100644 index 5281f7da..00000000 --- a/src/lib/sql/materialized_views.sql +++ /dev/null @@ -1,11 +0,0 @@ -select - c.oid::int8 as id, - n.nspname as schema, - c.relname as name, - c.relispopulated as is_populated, - obj_description(c.oid) as comment -from - pg_class c - join pg_namespace n on n.oid = c.relnamespace -where - c.relkind = 'm' diff --git a/src/lib/sql/materialized_views.sql.ts b/src/lib/sql/materialized_views.sql.ts new file mode 100644 index 00000000..aae179e8 --- /dev/null +++ b/src/lib/sql/materialized_views.sql.ts @@ -0,0 +1,24 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const MATERIALIZED_VIEWS_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { + materializedViewIdentifierFilter?: string + } +) => /* SQL */ ` +select + c.oid::int8 as id, + n.nspname as schema, + c.relname as name, + c.relispopulated as is_populated, + obj_description(c.oid) as comment +from + pg_class c + join pg_namespace n on n.oid = c.relnamespace +where + ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} + ${props.idsFilter ? `c.oid ${props.idsFilter} AND` : ''} + ${props.materializedViewIdentifierFilter ? `(n.nspname || '.' || c.relname) ${props.materializedViewIdentifierFilter} AND` : ''} + c.relkind = 'm' +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/policies.sql b/src/lib/sql/policies.sql.ts similarity index 66% rename from src/lib/sql/policies.sql rename to src/lib/sql/policies.sql.ts index 20a09327..9e354931 100644 --- a/src/lib/sql/policies.sql +++ b/src/lib/sql/policies.sql.ts @@ -1,3 +1,8 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const POLICIES_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { functionNameIdentifierFilter?: string } +) => /* SQL */ ` SELECT pol.oid :: int8 AS id, n.nspname AS schema, @@ -40,3 +45,10 @@ FROM pg_policy pol JOIN pg_class c ON c.oid = pol.polrelid LEFT JOIN pg_namespace n ON n.oid = c.relnamespace +WHERE + ${props.schemaFilter ? `n.nspname ${props.schemaFilter}` : 'true'} + ${props.idsFilter ? `AND pol.oid ${props.idsFilter}` : ''} + ${props.functionNameIdentifierFilter ? `AND (c.relname || '.' || pol.polname) ${props.functionNameIdentifierFilter}` : ''} +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/publications.sql b/src/lib/sql/publications.sql.ts similarity index 67% rename from src/lib/sql/publications.sql rename to src/lib/sql/publications.sql.ts index ed0a2e20..cd04e05b 100644 --- a/src/lib/sql/publications.sql +++ b/src/lib/sql/publications.sql.ts @@ -1,3 +1,8 @@ +import type { SQLQueryPropsWithIdsFilter } from './common.js' + +export const PUBLICATIONS_SQL = ( + props: SQLQueryPropsWithIdsFilter & { nameFilter?: string } +) => /* SQL */ ` SELECT p.oid :: int8 AS id, p.pubname AS name, @@ -34,3 +39,9 @@ FROM WHERE pr.prpubid = p.oid ) AS pr ON 1 = 1 +WHERE + ${props.idsFilter ? `p.oid ${props.idsFilter}` : 'true'} + ${props.nameFilter ? `AND p.pubname ${props.nameFilter}` : ''} +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/roles.sql b/src/lib/sql/roles.sql.ts similarity index 52% rename from src/lib/sql/roles.sql rename to src/lib/sql/roles.sql.ts index a0c79d6f..b3d29358 100644 --- a/src/lib/sql/roles.sql +++ b/src/lib/sql/roles.sql.ts @@ -1,3 +1,11 @@ +import type { SQLQueryPropsWithIdsFilter } from './common.js' + +export const ROLES_SQL = ( + props: SQLQueryPropsWithIdsFilter & { + includeDefaultRoles?: boolean + nameFilter?: string + } +) => /* SQL */ ` -- TODO: Consider using pg_authid vs. pg_roles for unencrypted password field SELECT oid :: int8 AS id, @@ -25,3 +33,12 @@ SELECT rolconfig AS config FROM pg_roles +WHERE + ${props.idsFilter ? `oid ${props.idsFilter}` : 'true'} + -- All default/predefined roles start with pg_: https://www.postgresql.org/docs/15/predefined-roles.html + -- The pg_ prefix is also reserved. + ${!props.includeDefaultRoles ? `AND NOT pg_catalog.starts_with(rolname, 'pg_')` : ''} + ${props.nameFilter ? `AND rolname ${props.nameFilter}` : ''} +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/schemas.sql b/src/lib/sql/schemas.sql deleted file mode 100644 index a4859fff..00000000 --- a/src/lib/sql/schemas.sql +++ /dev/null @@ -1,17 +0,0 @@ --- Adapted from information_schema.schemata - -select - n.oid::int8 as id, - n.nspname as name, - u.rolname as owner -from - pg_namespace n, - pg_roles u -where - n.nspowner = u.oid - and ( - pg_has_role(n.nspowner, 'USAGE') - or has_schema_privilege(n.oid, 'CREATE, USAGE') - ) - and not pg_catalog.starts_with(n.nspname, 'pg_temp_') - and not pg_catalog.starts_with(n.nspname, 'pg_toast_temp_') diff --git a/src/lib/sql/schemas.sql.ts b/src/lib/sql/schemas.sql.ts new file mode 100644 index 00000000..a9e5d85b --- /dev/null +++ b/src/lib/sql/schemas.sql.ts @@ -0,0 +1,27 @@ +import type { SQLQueryProps } from './common.js' + +export const SCHEMAS_SQL = ( + props: SQLQueryProps & { nameFilter?: string; idsFilter?: string; includeSystemSchemas?: boolean } +) => /* SQL */ ` +-- Adapted from information_schema.schemata +select + n.oid::int8 as id, + n.nspname as name, + u.rolname as owner +from + pg_namespace n, + pg_roles u +where + n.nspowner = u.oid + ${props.idsFilter ? `and n.oid ${props.idsFilter}` : ''} + ${props.nameFilter ? `and n.nspname ${props.nameFilter}` : ''} + ${!props.includeSystemSchemas ? `and not pg_catalog.starts_with(n.nspname, 'pg_')` : ''} + and ( + pg_has_role(n.nspowner, 'USAGE') + or has_schema_privilege(n.oid, 'CREATE, USAGE') + ) + and not pg_catalog.starts_with(n.nspname, 'pg_temp_') + and not pg_catalog.starts_with(n.nspname, 'pg_toast_temp_') +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/tables.sql b/src/lib/sql/table.sql.ts similarity index 73% rename from src/lib/sql/tables.sql rename to src/lib/sql/table.sql.ts index d0bb9df3..d7f70331 100644 --- a/src/lib/sql/tables.sql +++ b/src/lib/sql/table.sql.ts @@ -1,3 +1,8 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const TABLES_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { tableIdentifierFilter?: string } +) => /* SQL */ ` SELECT c.oid :: int8 AS id, nc.nspname AS schema, @@ -41,6 +46,8 @@ FROM pg_attribute a, pg_namespace n where + ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} + ${props.tableIdentifierFilter ? `n.nspname || '.' || c.relname ${props.tableIdentifierFilter} AND` : ''} i.indrelid = c.oid and c.relnamespace = n.oid and a.attrelid = c.oid @@ -73,11 +80,16 @@ FROM join pg_namespace nta on cta.relnamespace = nta.oid ) on ta.attrelid = c.confrelid and ta.attnum = any (c.confkey) where + ${props.schemaFilter ? `nsa.nspname ${props.schemaFilter} OR nta.nspname ${props.schemaFilter} AND` : ''} + ${props.tableIdentifierFilter ? `(nsa.nspname || '.' || csa.relname) ${props.tableIdentifierFilter} OR (nta.nspname || '.' || cta.relname) ${props.tableIdentifierFilter} AND` : ''} c.contype = 'f' ) as relationships on (relationships.source_schema = nc.nspname and relationships.source_table_name = c.relname) or (relationships.target_table_schema = nc.nspname and relationships.target_table_name = c.relname) WHERE + ${props.schemaFilter ? `nc.nspname ${props.schemaFilter} AND` : ''} + ${props.idsFilter ? `c.oid ${props.idsFilter} AND` : ''} + ${props.tableIdentifierFilter ? `nc.nspname || '.' || c.relname ${props.tableIdentifierFilter} AND` : ''} c.relkind IN ('r', 'p') AND NOT pg_is_other_temp_schema(nc.oid) AND ( @@ -96,3 +108,6 @@ group by c.relreplident, nc.nspname, pk.primary_keys +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/table_privileges.sql b/src/lib/sql/table_privileges.sql.ts similarity index 74% rename from src/lib/sql/table_privileges.sql rename to src/lib/sql/table_privileges.sql.ts index 435409dc..ca4ea122 100644 --- a/src/lib/sql/table_privileges.sql +++ b/src/lib/sql/table_privileges.sql.ts @@ -1,4 +1,11 @@ --- Despite the name `table_privileges`, this includes other kinds of relations: +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const TABLE_PRIVILEGES_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { + nameIdentifierFilter?: string + } +) => /* SQL */ ` +-- Despite the name \`table_privileges\`, this includes other kinds of relations: -- views, matviews, etc. "Relation privileges" just doesn't roll off the tongue. -- -- For each relation, get its relacl in a jsonb format, @@ -59,6 +66,9 @@ left join ( ) as grantee (oid, rolname) on grantee.oid = _priv.grantee where c.relkind in ('r', 'v', 'm', 'f', 'p') + ${props.schemaFilter ? `and nc.nspname ${props.schemaFilter}` : ''} + ${props.idsFilter ? `and c.oid ${props.idsFilter}` : ''} + ${props.nameIdentifierFilter ? `and (nc.nspname || '.' || c.relname) ${props.nameIdentifierFilter}` : ''} and not pg_is_other_temp_schema(c.relnamespace) and ( pg_has_role(c.relowner, 'USAGE') @@ -73,3 +83,6 @@ group by nc.nspname, c.relname, c.relkind +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/table_relationships.sql b/src/lib/sql/table_relationships.sql.ts similarity index 80% rename from src/lib/sql/table_relationships.sql rename to src/lib/sql/table_relationships.sql.ts index 53b80ded..d74c12a4 100644 --- a/src/lib/sql/table_relationships.sql +++ b/src/lib/sql/table_relationships.sql.ts @@ -1,3 +1,6 @@ +import type { SQLQueryPropsWithSchemaFilter } from './common.js' + +export const TABLE_RELATIONSHIPS_SQL = (props: SQLQueryPropsWithSchemaFilter) => /* SQL */ ` -- Adapted from -- https://github.com/PostgREST/postgrest/blob/f9f0f79fa914ac00c11fbf7f4c558e14821e67e2/src/PostgREST/SchemaCache.hs#L722 WITH @@ -15,6 +18,7 @@ pks_uniques_cols AS ( WHERE contype IN ('p', 'u') and connamespace::regnamespace::text <> 'pg_catalog' + ${props.schemaFilter ? `and connamespace::regnamespace::text ${props.schemaFilter}` : ''} GROUP BY connamespace, conrelid ) SELECT @@ -34,6 +38,7 @@ JOIN LATERAL ( FROM unnest(traint.conkey, traint.confkey) WITH ORDINALITY AS _(col, ref, ord) JOIN pg_attribute cols ON cols.attrelid = traint.conrelid AND cols.attnum = col JOIN pg_attribute refs ON refs.attrelid = traint.confrelid AND refs.attnum = ref + WHERE ${props.schemaFilter ? `traint.connamespace::regnamespace::text ${props.schemaFilter}` : 'true'} ) AS column_info ON TRUE JOIN pg_namespace ns1 ON ns1.oid = traint.connamespace JOIN pg_class tab ON tab.oid = traint.conrelid @@ -42,3 +47,5 @@ JOIN pg_namespace ns2 ON ns2.oid = other.relnamespace LEFT JOIN pks_uniques_cols pks_uqs ON pks_uqs.connamespace = traint.connamespace AND pks_uqs.conrelid = traint.conrelid WHERE traint.contype = 'f' AND traint.conparentid = 0 +${props.schemaFilter ? `and ns1.nspname ${props.schemaFilter}` : ''} +` diff --git a/src/lib/sql/triggers.sql b/src/lib/sql/triggers.sql.ts similarity index 62% rename from src/lib/sql/triggers.sql rename to src/lib/sql/triggers.sql.ts index 09fcef14..5580373e 100644 --- a/src/lib/sql/triggers.sql +++ b/src/lib/sql/triggers.sql.ts @@ -1,3 +1,11 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const TRIGGERS_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { + tableNameFilter?: string + nameFilter?: string + } +) => /* SQL */ ` SELECT pg_t.oid AS id, pg_t.tgrelid AS table_id, @@ -6,10 +14,10 @@ SELECT WHEN pg_t.tgenabled = 'O' THEN 'ORIGIN' WHEN pg_t.tgenabled = 'R' THEN 'REPLICA' WHEN pg_t.tgenabled = 'A' THEN 'ALWAYS' - END AS enabled_mode, + END AS enabled_mode, ( STRING_TO_ARRAY( - ENCODE(pg_t.tgargs, 'escape'), '\000' + ENCODE(pg_t.tgargs, 'escape'), '\\000' ) )[:pg_t.tgnargs] AS function_args, is_t.trigger_name AS name, @@ -26,6 +34,8 @@ FROM JOIN pg_class AS pg_c ON pg_t.tgrelid = pg_c.oid +JOIN pg_namespace AS table_ns +ON pg_c.relnamespace = table_ns.oid JOIN information_schema.triggers AS is_t ON is_t.trigger_name = pg_t.tgname AND pg_c.relname = is_t.event_object_table @@ -34,6 +44,11 @@ JOIN pg_proc AS pg_p ON pg_t.tgfoid = pg_p.oid JOIN pg_namespace AS pg_n ON pg_p.pronamespace = pg_n.oid +WHERE + ${props.schemaFilter ? `table_ns.nspname ${props.schemaFilter}` : 'true'} + ${props.tableNameFilter ? `AND pg_c.relname ${props.tableNameFilter}` : ''} + ${props.nameFilter ? `AND is_t.trigger_name ${props.nameFilter}` : ''} + ${props.idsFilter ? `AND pg_t.oid ${props.idsFilter}` : ''} GROUP BY pg_t.oid, pg_t.tgrelid, @@ -48,3 +63,6 @@ GROUP BY is_t.action_timing, pg_p.proname, pg_n.nspname +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/types.sql b/src/lib/sql/types.sql deleted file mode 100644 index 7a628ed1..00000000 --- a/src/lib/sql/types.sql +++ /dev/null @@ -1,35 +0,0 @@ -select - t.oid::int8 as id, - t.typname as name, - n.nspname as schema, - format_type (t.oid, null) as format, - coalesce(t_enums.enums, '[]') as enums, - coalesce(t_attributes.attributes, '[]') as attributes, - obj_description (t.oid, 'pg_type') as comment -from - pg_type t - left join pg_namespace n on n.oid = t.typnamespace - left join ( - select - enumtypid, - jsonb_agg(enumlabel order by enumsortorder) as enums - from - pg_enum - group by - enumtypid - ) as t_enums on t_enums.enumtypid = t.oid - left join ( - select - oid, - jsonb_agg( - jsonb_build_object('name', a.attname, 'type_id', a.atttypid::int8) - order by a.attnum asc - ) as attributes - from - pg_class c - join pg_attribute a on a.attrelid = c.oid - where - c.relkind = 'c' and not a.attisdropped - group by - c.oid - ) as t_attributes on t_attributes.oid = t.typrelid diff --git a/src/lib/sql/types.sql.ts b/src/lib/sql/types.sql.ts new file mode 100644 index 00000000..990fa22f --- /dev/null +++ b/src/lib/sql/types.sql.ts @@ -0,0 +1,72 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const TYPES_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { + includeTableTypes?: boolean + includeArrayTypes?: boolean + } +) => /* SQL */ ` +select + t.oid::int8 as id, + t.typname as name, + n.nspname as schema, + format_type (t.oid, null) as format, + coalesce(t_enums.enums, '[]') as enums, + coalesce(t_attributes.attributes, '[]') as attributes, + obj_description (t.oid, 'pg_type') as comment +from + pg_type t + left join pg_namespace n on n.oid = t.typnamespace + left join ( + select + enumtypid, + jsonb_agg(enumlabel order by enumsortorder) as enums + from + pg_enum + group by + enumtypid + ) as t_enums on t_enums.enumtypid = t.oid + left join ( + select + oid, + jsonb_agg( + jsonb_build_object('name', a.attname, 'type_id', a.atttypid::int8) + order by a.attnum asc + ) as attributes + from + pg_class c + join pg_attribute a on a.attrelid = c.oid + where + c.relkind = 'c' and not a.attisdropped + group by + c.oid + ) as t_attributes on t_attributes.oid = t.typrelid + where + ( + t.typrelid = 0 + or ( + select + c.relkind ${props.includeTableTypes ? `in ('c', 'r')` : `= 'c'`} + from + pg_class c + where + c.oid = t.typrelid + ) + ) + ${ + !props.includeArrayTypes + ? `and not exists ( + select + from + pg_type el + where + el.oid = t.typelem + and el.typarray = t.oid + )` + : '' + } + ${props.schemaFilter ? `and n.nspname ${props.schemaFilter}` : ''} + ${props.idsFilter ? `and t.oid ${props.idsFilter}` : ''} +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/version.sql b/src/lib/sql/version.sql.ts similarity index 84% rename from src/lib/sql/version.sql rename to src/lib/sql/version.sql.ts index ed7fab7e..f959c5fd 100644 --- a/src/lib/sql/version.sql +++ b/src/lib/sql/version.sql.ts @@ -1,3 +1,4 @@ +export const VERSION_SQL = () => /* SQL */ ` SELECT version(), current_setting('server_version_num') :: int8 AS version_number, @@ -8,3 +9,4 @@ SELECT pg_stat_activity ) AS active_connections, current_setting('max_connections') :: int8 AS max_connections +` diff --git a/src/lib/sql/views.sql b/src/lib/sql/views.sql deleted file mode 100644 index bd60da2b..00000000 --- a/src/lib/sql/views.sql +++ /dev/null @@ -1,12 +0,0 @@ -SELECT - c.oid :: int8 AS id, - n.nspname AS schema, - c.relname AS name, - -- See definition of information_schema.views - (pg_relation_is_updatable(c.oid, false) & 20) = 20 AS is_updatable, - obj_description(c.oid) AS comment -FROM - pg_class c - JOIN pg_namespace n ON n.oid = c.relnamespace -WHERE - c.relkind = 'v' diff --git a/src/lib/sql/views.sql.ts b/src/lib/sql/views.sql.ts new file mode 100644 index 00000000..95a707e2 --- /dev/null +++ b/src/lib/sql/views.sql.ts @@ -0,0 +1,25 @@ +import type { SQLQueryPropsWithSchemaFilterAndIdsFilter } from './common.js' + +export const VIEWS_SQL = ( + props: SQLQueryPropsWithSchemaFilterAndIdsFilter & { + viewIdentifierFilter?: string + } +) => /* SQL */ ` +SELECT + c.oid :: int8 AS id, + n.nspname AS schema, + c.relname AS name, + -- See definition of information_schema.views + (pg_relation_is_updatable(c.oid, false) & 20) = 20 AS is_updatable, + obj_description(c.oid) AS comment +FROM + pg_class c + JOIN pg_namespace n ON n.oid = c.relnamespace +WHERE + ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} + ${props.idsFilter ? `c.oid ${props.idsFilter} AND` : ''} + ${props.viewIdentifierFilter ? `(n.nspname || '.' || c.relname) ${props.viewIdentifierFilter} AND` : ''} + c.relkind = 'v' +${props.limit ? `limit ${props.limit}` : ''} +${props.offset ? `offset ${props.offset}` : ''} +` diff --git a/src/lib/sql/views_key_dependencies.sql b/src/lib/sql/views_key_dependencies.sql.ts similarity index 78% rename from src/lib/sql/views_key_dependencies.sql rename to src/lib/sql/views_key_dependencies.sql.ts index c8534486..31035012 100644 --- a/src/lib/sql/views_key_dependencies.sql +++ b/src/lib/sql/views_key_dependencies.sql.ts @@ -1,3 +1,6 @@ +import type { SQLQueryPropsWithSchemaFilter } from './common.js' + +export const VIEWS_KEY_DEPENDENCIES_SQL = (props: SQLQueryPropsWithSchemaFilter) => /* SQL */ ` -- Adapted from -- https://github.com/PostgREST/postgrest/blob/f9f0f79fa914ac00c11fbf7f4c558e14821e67e2/src/PostgREST/SchemaCache.hs#L820 with recursive @@ -25,6 +28,7 @@ pks_fks as ( from pg_constraint left join lateral unnest(confkey) with ordinality as _(col, ord) on true where contype='f' + ${props.schemaFilter ? `and connamespace::regnamespace::text ${props.schemaFilter}` : ''} ), views as ( select @@ -35,7 +39,8 @@ views as ( from pg_class c join pg_namespace n on n.oid = c.relnamespace join pg_rewrite r on r.ev_class = c.oid - where c.relkind in ('v', 'm') and n.nspname not in (__EXCLUDED_SCHEMAS) + where c.relkind in ('v', 'm') + ${props.schemaFilter ? `and n.nspname ${props.schemaFilter}` : ''} ), transform_json as ( select @@ -71,48 +76,48 @@ transform_json as ( -- ----------------------------------------------- -- pattern | replacement | flags -- ----------------------------------------------- - -- `<>` in pg_node_tree is the same as `null` in JSON, but due to very poor performance of json_typeof + -- <> in pg_node_tree is the same as null in JSON, but due to very poor performance of json_typeof -- we need to make this an empty array here to prevent json_array_elements from throwing an error -- when the targetList is null. -- We'll need to put it first, to make the node protection below work for node lists that start with - -- null: `(<> ...`, too. This is the case for coldefexprs, when the first column does not have a default value. + -- null: (<> ..., too. This is the case for coldefexprs, when the first column does not have a default value. '<>' , '()' - -- `,` is not part of the pg_node_tree format, but used in the regex. - -- This removes all `,` that might be part of column names. + -- , is not part of the pg_node_tree format, but used in the regex. + -- This removes all , that might be part of column names. ), ',' , '' - -- The same applies for `{` and `}`, although those are used a lot in pg_node_tree. + -- The same applies for { and }, although those are used a lot in pg_node_tree. -- We remove the escaped ones, which might be part of column names again. - ), E'\\{' , '' - ), E'\\}' , '' + ), E'\\\\{' , '' + ), E'\\\\}' , '' -- The fields we need are formatted as json manually to protect them from the regex. ), ' :targetList ' , ',"targetList":' ), ' :resno ' , ',"resno":' ), ' :resorigtbl ' , ',"resorigtbl":' ), ' :resorigcol ' , ',"resorigcol":' - -- Make the regex also match the node type, e.g. `{QUERY ...`, to remove it in one pass. + -- Make the regex also match the node type, e.g. \`{QUERY ...\`, to remove it in one pass. ), '{' , '{ :' - -- Protect node lists, which start with `({` or `((` from the greedy regex. - -- The extra `{` is removed again later. + -- Protect node lists, which start with \`({\` or \`((\` from the greedy regex. + -- The extra \`{\` is removed again later. ), '((' , '{((' ), '({' , '{({' -- This regex removes all unused fields to avoid the need to format all of them correctly. -- This leads to a smaller json result as well. - -- Removal stops at `,` for used fields (see above) and `}` for the end of the current node. - -- Nesting can't be parsed correctly with a regex, so we stop at `{` as well and + -- Removal stops at \`,\` for used fields (see above) and \`}\` for the end of the current node. + -- Nesting can't be parsed correctly with a regex, so we stop at \`{\` as well and -- add an empty key for the followig node. ), ' :[^}{,]+' , ',"":' , 'g' - -- For performance, the regex also added those empty keys when hitting a `,` or `}`. + -- For performance, the regex also added those empty keys when hitting a \`,\` or \`}\`. -- Those are removed next. ), ',"":}' , '}' ), ',"":,' , ',' -- This reverses the "node list protection" from above. ), '{(' , '(' - -- Every key above has been added with a `,` so far. The first key in an object doesn't need it. + -- Every key above has been added with a \`,\` so far. The first key in an object doesn't need it. ), '{,' , '{' - -- pg_node_tree has `()` around lists, but JSON uses `[]` + -- pg_node_tree has \`()\` around lists, but JSON uses \`[]\` ), '(' , '[' ), ')' , ']' - -- pg_node_tree has ` ` between list items, but JSON uses `,` + -- pg_node_tree has \` \` between list items, but JSON uses \`,\` ), ' ' , ',' )::json as view_definition from views @@ -139,7 +144,7 @@ recursion(view_id, view_schema, view_name, view_column, resorigtbl, resorigcol, false, ARRAY[resorigtbl] from results r - where view_schema not in (__EXCLUDED_SCHEMAS) + where ${props.schemaFilter ? `view_schema ${props.schemaFilter}` : 'true'} union all select view.view_id, @@ -189,3 +194,4 @@ join pg_namespace sch on sch.oid = tbl.relnamespace group by sch.nspname, tbl.relname, rep.view_schema, rep.view_name, pks_fks.conname, pks_fks.contype, pks_fks.ncol -- make sure we only return key for which all columns are referenced in the view - no partial PKs or FKs having ncol = array_length(array_agg(row(col.attname, view_columns) order by pks_fks.ord), 1) +` diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 4f9cac03..03b407d4 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -32,7 +32,9 @@ export const apply = async ({ columns .filter((c) => c.table_id in columnsByTableId) .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - .forEach((c) => columnsByTableId[c.table_id].push(c)) + .forEach((c) => { + columnsByTableId[c.table_id].push(c) + }) const internal_supabase_schema = postgrestVersion ? `// Allows to automatically instantiate createClient with right options diff --git a/test/lib/functions.ts b/test/lib/functions.ts index 05de3244..fb2c4692 100644 --- a/test/lib/functions.ts +++ b/test/lib/functions.ts @@ -354,3 +354,52 @@ test('retrieve set-returning function', async () => { ` ) }) + +test('retrieve function by args filter - polymorphic function with text argument', async () => { + const res = await pgMeta.functions.retrieve({ + schema: 'public', + name: 'polymorphic_function', + args: ['text'], + }) + expect(res.data).toMatchObject({ + name: 'polymorphic_function', + schema: 'public', + argument_types: 'text', + args: [ + { type_id: 25, mode: 'in' }, // text type_id is 25 + ], + }) + expect(res.error).toBeNull() +}) + +test('retrieve function by args filter - polymorphic function with boolean argument', async () => { + const res = await pgMeta.functions.retrieve({ + schema: 'public', + name: 'polymorphic_function', + args: ['boolean'], + }) + expect(res.data).toMatchObject({ + name: 'polymorphic_function', + schema: 'public', + argument_types: 'boolean', + args: [ + { type_id: 16, mode: 'in' }, // boolean type_id is 16 + ], + }) + expect(res.error).toBeNull() +}) + +test('retrieve function by args filter - function with no arguments', async () => { + const res = await pgMeta.functions.retrieve({ + schema: 'public', + name: 'function_returning_set_of_rows', + args: [], + }) + expect(res.data).toMatchObject({ + name: 'function_returning_set_of_rows', + schema: 'public', + argument_types: '', + args: [], + }) + expect(res.error).toBeNull() +}) diff --git a/test/lib/tables.ts b/test/lib/tables.ts index c35546b8..00230ab4 100644 --- a/test/lib/tables.ts +++ b/test/lib/tables.ts @@ -81,39 +81,39 @@ test('list', async () => { { "check": null, "comment": null, - "data_type": "numeric", - "default_value": null, - "enums": [], - "format": "numeric", + "data_type": "USER-DEFINED", + "default_value": "'ACTIVE'::user_status", + "enums": [ + "ACTIVE", + "INACTIVE", + ], + "format": "user_status", "identity_generation": null, "is_generated": false, "is_identity": false, "is_nullable": true, "is_unique": false, "is_updatable": true, - "name": "decimal", - "ordinal_position": 4, + "name": "status", + "ordinal_position": 3, "schema": "public", "table": "users", }, { "check": null, "comment": null, - "data_type": "USER-DEFINED", - "default_value": "'ACTIVE'::user_status", - "enums": [ - "ACTIVE", - "INACTIVE", - ], - "format": "user_status", + "data_type": "numeric", + "default_value": null, + "enums": [], + "format": "numeric", "identity_generation": null, "is_generated": false, "is_identity": false, "is_nullable": true, "is_unique": false, "is_updatable": true, - "name": "status", - "ordinal_position": 3, + "name": "decimal", + "ordinal_position": 4, "schema": "public", "table": "users", }, From 4e899128e4944f9ffffb107525cc463c3aa4447f Mon Sep 17 00:00:00 2001 From: georgRusanov Date: Fri, 5 Sep 2025 10:13:02 +0200 Subject: [PATCH 30/72] chore: added tests (#979) * chore: added tests * chore: added more tests * chore: try to fix test on ci --------- Co-authored-by: georgiy.rusanov --- test/admin-app.test.ts | 17 ++++ test/app.test.ts | 31 ++++++ test/config.test.ts | 127 +++++++++++++++++++++++ test/extensions.test.ts | 144 ++++++++++++++++++++++++++ test/functions.test.ts | 205 ++++++++++++++++++++++++++++++++++++++ test/index.test.ts | 12 +++ test/lib/utils.ts | 4 +- test/publications.test.ts | 187 ++++++++++++++++++++++++++++++++++ test/roles.test.ts | 181 +++++++++++++++++++++++++++++++++ test/schemas.test.ts | 137 +++++++++++++++++++++++++ test/triggers.test.ts | 186 ++++++++++++++++++++++++++++++++++ test/types.test.ts | 46 +++++++++ test/utils.test.ts | 105 +++++++++++++++++++ test/views.test.ts | 51 ++++++++++ 14 files changed, 1432 insertions(+), 1 deletion(-) create mode 100644 test/admin-app.test.ts create mode 100644 test/app.test.ts create mode 100644 test/config.test.ts create mode 100644 test/extensions.test.ts create mode 100644 test/functions.test.ts create mode 100644 test/publications.test.ts create mode 100644 test/roles.test.ts create mode 100644 test/schemas.test.ts create mode 100644 test/triggers.test.ts create mode 100644 test/types.test.ts create mode 100644 test/utils.test.ts create mode 100644 test/views.test.ts diff --git a/test/admin-app.test.ts b/test/admin-app.test.ts new file mode 100644 index 00000000..0bc93e2e --- /dev/null +++ b/test/admin-app.test.ts @@ -0,0 +1,17 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/admin-app.js' + +describe('admin-app', () => { + test('should register metrics endpoint', async () => { + const app = build() + + // Test that the app can be started (this will trigger plugin registration) + await app.ready() + + // Verify that metrics endpoint is available + const routes = app.printRoutes() + expect(routes).toContain('metrics') + + await app.close() + }) +}) diff --git a/test/app.test.ts b/test/app.test.ts new file mode 100644 index 00000000..c705dd9b --- /dev/null +++ b/test/app.test.ts @@ -0,0 +1,31 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' + +describe('server/app', () => { + test('should handle root endpoint', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/', + }) + expect(response.statusCode).toBe(200) + const data = JSON.parse(response.body) + expect(data).toHaveProperty('status') + expect(data).toHaveProperty('name') + expect(data).toHaveProperty('version') + expect(data).toHaveProperty('documentation') + await app.close() + }) + + test('should handle health endpoint', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/health', + }) + expect(response.statusCode).toBe(200) + const data = JSON.parse(response.body) + expect(data).toHaveProperty('date') + await app.close() + }) +}) diff --git a/test/config.test.ts b/test/config.test.ts new file mode 100644 index 00000000..2736eb57 --- /dev/null +++ b/test/config.test.ts @@ -0,0 +1,127 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/config', () => { + test('should list config with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/config?limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(response.json()).toMatchInlineSnapshot(` + [ + { + "boot_val": "on", + "category": "Autovacuum", + "context": "sighup", + "enumvals": null, + "extra_desc": null, + "group": "Autovacuum", + "max_val": null, + "min_val": null, + "name": "autovacuum", + "pending_restart": false, + "reset_val": "on", + "setting": "on", + "short_desc": "Starts the autovacuum subprocess.", + "source": "default", + "sourcefile": null, + "sourceline": null, + "subgroup": "", + "unit": null, + "vartype": "bool", + }, + { + "boot_val": "0.1", + "category": "Autovacuum", + "context": "sighup", + "enumvals": null, + "extra_desc": null, + "group": "Autovacuum", + "max_val": "100", + "min_val": "0", + "name": "autovacuum_analyze_scale_factor", + "pending_restart": false, + "reset_val": "0.1", + "setting": "0.1", + "short_desc": "Number of tuple inserts, updates, or deletes prior to analyze as a fraction of reltuples.", + "source": "default", + "sourcefile": null, + "sourceline": null, + "subgroup": "", + "unit": null, + "vartype": "real", + }, + { + "boot_val": "50", + "category": "Autovacuum", + "context": "sighup", + "enumvals": null, + "extra_desc": null, + "group": "Autovacuum", + "max_val": "2147483647", + "min_val": "0", + "name": "autovacuum_analyze_threshold", + "pending_restart": false, + "reset_val": "50", + "setting": "50", + "short_desc": "Minimum number of tuple inserts, updates, or deletes prior to analyze.", + "source": "default", + "sourcefile": null, + "sourceline": null, + "subgroup": "", + "unit": null, + "vartype": "integer", + }, + { + "boot_val": "200000000", + "category": "Autovacuum", + "context": "postmaster", + "enumvals": null, + "extra_desc": null, + "group": "Autovacuum", + "max_val": "2000000000", + "min_val": "100000", + "name": "autovacuum_freeze_max_age", + "pending_restart": false, + "reset_val": "200000000", + "setting": "200000000", + "short_desc": "Age at which to autovacuum a table to prevent transaction ID wraparound.", + "source": "default", + "sourcefile": null, + "sourceline": null, + "subgroup": "", + "unit": null, + "vartype": "integer", + }, + { + "boot_val": "3", + "category": "Autovacuum", + "context": "postmaster", + "enumvals": null, + "extra_desc": null, + "group": "Autovacuum", + "max_val": "262143", + "min_val": "1", + "name": "autovacuum_max_workers", + "pending_restart": false, + "reset_val": "3", + "setting": "3", + "short_desc": "Sets the maximum number of simultaneously running autovacuum worker processes.", + "source": "default", + "sourcefile": null, + "sourceline": null, + "subgroup": "", + "unit": null, + "vartype": "integer", + }, + ] + `) + await app.close() + }) +}) diff --git a/test/extensions.test.ts b/test/extensions.test.ts new file mode 100644 index 00000000..f6966475 --- /dev/null +++ b/test/extensions.test.ts @@ -0,0 +1,144 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/extensions', () => { + test('should list extensions', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/extensions', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list extensions with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/extensions?limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent extension', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/extensions/non-existent-extension', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) + + test('should create extension, retrieve, update, delete', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/extensions', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { name: 'pgcrypto', version: '1.3' }, + }) + expect(response.statusCode).toBe(200) + expect(response.json()).toMatchInlineSnapshot(` + { + "comment": "cryptographic functions", + "default_version": "1.3", + "installed_version": "1.3", + "name": "pgcrypto", + "schema": "public", + } + `) + + const retrieveResponse = await app.inject({ + method: 'GET', + url: '/extensions/pgcrypto', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(retrieveResponse.statusCode).toBe(200) + expect(retrieveResponse.json()).toMatchInlineSnapshot(` + { + "comment": "cryptographic functions", + "default_version": "1.3", + "installed_version": "1.3", + "name": "pgcrypto", + "schema": "public", + } + `) + + const updateResponse = await app.inject({ + method: 'PATCH', + url: '/extensions/pgcrypto', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { schema: 'public' }, + }) + expect(updateResponse.statusCode).toBe(200) + expect(updateResponse.json()).toMatchInlineSnapshot(` + { + "comment": "cryptographic functions", + "default_version": "1.3", + "installed_version": "1.3", + "name": "pgcrypto", + "schema": "public", + } + `) + + const deleteResponse = await app.inject({ + method: 'DELETE', + url: '/extensions/pgcrypto', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(deleteResponse.statusCode).toBe(200) + expect(deleteResponse.json()).toMatchInlineSnapshot(` + { + "comment": "cryptographic functions", + "default_version": "1.3", + "installed_version": "1.3", + "name": "pgcrypto", + "schema": "public", + } + `) + + await app.close() + }) + + test('should return 400 for invalid extension name', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/extensions', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { name: 'invalid-extension', version: '1.3' }, + }) + expect(response.statusCode).toBe(400) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "could not open extension control file "/usr/share/postgresql/14/extension/invalid-extension.control": No such file or directory", + } + `) + await app.close() + }) +}) diff --git a/test/functions.test.ts b/test/functions.test.ts new file mode 100644 index 00000000..f37e850e --- /dev/null +++ b/test/functions.test.ts @@ -0,0 +1,205 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/functions', () => { + test('should list functions', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/functions', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list functions with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/functions?include_system_schemas=true&limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent function', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/functions/non-existent-function', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) + + test('should create function, retrieve, update, delete', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/functions', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_function', + schema: 'public', + language: 'plpgsql', + definition: 'BEGIN RETURN 42; END;', + return_type: 'integer', + }, + }) + expect(response.statusCode).toBe(200) + const responseData = response.json() + expect(responseData).toMatchObject({ + args: [], + argument_types: '', + behavior: 'VOLATILE', + complete_statement: expect.stringContaining( + 'CREATE OR REPLACE FUNCTION public.test_function()' + ), + config_params: null, + definition: 'BEGIN RETURN 42; END;', + id: expect.any(Number), + identity_argument_types: '', + is_set_returning_function: false, + language: 'plpgsql', + name: 'test_function', + return_type: 'integer', + return_type_id: 23, + return_type_relation_id: null, + schema: 'public', + security_definer: false, + }) + + const { id } = responseData + + const retrieveResponse = await app.inject({ + method: 'GET', + url: `/functions/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(retrieveResponse.statusCode).toBe(200) + const retrieveData = retrieveResponse.json() + expect(retrieveData).toMatchObject({ + args: [], + argument_types: '', + behavior: 'VOLATILE', + complete_statement: expect.stringContaining( + 'CREATE OR REPLACE FUNCTION public.test_function()' + ), + config_params: null, + definition: 'BEGIN RETURN 42; END;', + id: expect.any(Number), + identity_argument_types: '', + is_set_returning_function: false, + language: 'plpgsql', + name: 'test_function', + return_type: 'integer', + return_type_id: 23, + return_type_relation_id: null, + schema: 'public', + security_definer: false, + }) + + const updateResponse = await app.inject({ + method: 'PATCH', + url: `/functions/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_function', + schema: 'public', + language: 'plpgsql', + definition: 'BEGIN RETURN 50; END;', + return_type: 'integer', + }, + }) + expect(updateResponse.statusCode).toBe(200) + const updateData = updateResponse.json() + expect(updateData).toMatchObject({ + args: [], + argument_types: '', + behavior: 'VOLATILE', + complete_statement: expect.stringContaining( + 'CREATE OR REPLACE FUNCTION public.test_function()' + ), + config_params: null, + definition: 'BEGIN RETURN 50; END;', + id: expect.any(Number), + identity_argument_types: '', + is_set_returning_function: false, + language: 'plpgsql', + name: 'test_function', + return_type: 'integer', + return_type_id: 23, + return_type_relation_id: null, + schema: 'public', + security_definer: false, + }) + + const deleteResponse = await app.inject({ + method: 'DELETE', + url: `/functions/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(deleteResponse.statusCode).toBe(200) + const deleteData = deleteResponse.json() + expect(deleteData).toMatchObject({ + args: [], + argument_types: '', + behavior: 'VOLATILE', + complete_statement: expect.stringContaining( + 'CREATE OR REPLACE FUNCTION public.test_function()' + ), + config_params: null, + definition: 'BEGIN RETURN 50; END;', + id: expect.any(Number), + identity_argument_types: '', + is_set_returning_function: false, + language: 'plpgsql', + name: 'test_function', + return_type: 'integer', + return_type_id: 23, + return_type_relation_id: null, + schema: 'public', + security_definer: false, + }) + }) + + test('should return 400 for invalid payload', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/functions', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_function12', + }, + }) + expect(response.statusCode).toBe(400) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "syntax error at or near "NULL"", + } + `) + }) +}) diff --git a/test/index.test.ts b/test/index.test.ts index 6ca2b87e..d879d232 100644 --- a/test/index.test.ts +++ b/test/index.test.ts @@ -24,3 +24,15 @@ import './server/table-privileges' import './server/typegen' import './server/result-size-limit' import './server/query-timeout' +// New tests for increased coverage - commented out to avoid import issues +// import './server/app' +// import './server/utils' +// import './server/functions' +// import './server/config' +// import './server/extensions' +// import './server/publications' +// import './server/schemas' +// import './server/roles' +// import './server/triggers' +// import './server/types' +// import './server/views' diff --git a/test/lib/utils.ts b/test/lib/utils.ts index e4d48fe7..a88391ed 100644 --- a/test/lib/utils.ts +++ b/test/lib/utils.ts @@ -1,9 +1,11 @@ import { afterAll } from 'vitest' import { PostgresMeta } from '../../src/lib' +export const TEST_CONNECTION_STRING = 'postgresql://postgres:postgres@localhost:5432' + export const pgMeta = new PostgresMeta({ max: 1, - connectionString: 'postgresql://postgres:postgres@localhost:5432/postgres', + connectionString: TEST_CONNECTION_STRING, }) afterAll(() => pgMeta.end()) diff --git a/test/publications.test.ts b/test/publications.test.ts new file mode 100644 index 00000000..0687c9dd --- /dev/null +++ b/test/publications.test.ts @@ -0,0 +1,187 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/publications', () => { + test('should list publications', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/publications', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list publications with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/publications?limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent publication', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/publications/non-existent-publication', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) + + test('should create publication, retrieve, update, delete', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/publications', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_publication', + publish_insert: true, + publish_update: true, + publish_delete: true, + publish_truncate: false, + tables: ['users'], + }, + }) + expect(response.statusCode).toBe(200) + const responseData = response.json() + expect(responseData).toMatchObject({ + id: expect.any(Number), + name: 'test_publication', + owner: 'postgres', + publish_delete: true, + publish_insert: true, + publish_truncate: false, + publish_update: true, + tables: [ + { + id: expect.any(Number), + name: 'users', + schema: 'public', + }, + ], + }) + + const { id } = responseData + + const retrieveResponse = await app.inject({ + method: 'GET', + url: `/publications/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(retrieveResponse.statusCode).toBe(200) + const retrieveData = retrieveResponse.json() + expect(retrieveData).toMatchObject({ + id: expect.any(Number), + name: 'test_publication', + owner: 'postgres', + publish_delete: true, + publish_insert: true, + publish_truncate: false, + publish_update: true, + tables: [ + { + id: expect.any(Number), + name: 'users', + schema: 'public', + }, + ], + }) + + const updateResponse = await app.inject({ + method: 'PATCH', + url: `/publications/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + publish_delete: false, + }, + }) + expect(updateResponse.statusCode).toBe(200) + const updateData = updateResponse.json() + expect(updateData).toMatchObject({ + id: expect.any(Number), + name: 'test_publication', + owner: 'postgres', + publish_delete: false, + publish_insert: true, + publish_truncate: false, + publish_update: true, + tables: [ + { + id: expect.any(Number), + name: 'users', + schema: 'public', + }, + ], + }) + + const deleteResponse = await app.inject({ + method: 'DELETE', + url: `/publications/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(deleteResponse.statusCode).toBe(200) + const deleteData = deleteResponse.json() + expect(deleteData).toMatchObject({ + id: expect.any(Number), + name: 'test_publication', + owner: 'postgres', + publish_delete: false, + publish_insert: true, + publish_truncate: false, + publish_update: true, + tables: [ + { + id: expect.any(Number), + name: 'users', + schema: 'public', + }, + ], + }) + }) + + test('should return 400 for invalid payload', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/publications', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_publication', + tables: ['non_existent_table'], + }, + }) + expect(response.statusCode).toBe(400) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "relation "non_existent_table" does not exist", + } + `) + }) +}) diff --git a/test/roles.test.ts b/test/roles.test.ts new file mode 100644 index 00000000..77b98c06 --- /dev/null +++ b/test/roles.test.ts @@ -0,0 +1,181 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/roles', () => { + test('should list roles', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/roles', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list roles with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/roles?limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent role', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/roles/non-existent-role', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) + + test('should create role, retrieve, update, delete', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/roles', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_role', + }, + }) + expect(response.statusCode).toBe(200) + const responseData = response.json() + expect(responseData).toMatchObject({ + active_connections: 0, + can_bypass_rls: false, + can_create_db: false, + can_create_role: false, + can_login: false, + config: null, + connection_limit: 100, + id: expect.any(Number), + inherit_role: true, + is_replication_role: false, + is_superuser: false, + name: 'test_role', + password: '********', + valid_until: null, + }) + + const { id } = responseData + + const retrieveResponse = await app.inject({ + method: 'GET', + url: `/roles/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(retrieveResponse.statusCode).toBe(200) + const retrieveData = retrieveResponse.json() + expect(retrieveData).toMatchObject({ + active_connections: 0, + can_bypass_rls: false, + can_create_db: false, + can_create_role: false, + can_login: false, + config: null, + connection_limit: 100, + id: expect.any(Number), + inherit_role: true, + is_replication_role: false, + is_superuser: false, + name: 'test_role', + password: '********', + valid_until: null, + }) + + const updateResponse = await app.inject({ + method: 'PATCH', + url: `/roles/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_role_updated', + }, + }) + expect(updateResponse.statusCode).toBe(200) + const updateData = updateResponse.json() + expect(updateData).toMatchObject({ + active_connections: 0, + can_bypass_rls: false, + can_create_db: false, + can_create_role: false, + can_login: false, + config: null, + connection_limit: 100, + id: expect.any(Number), + inherit_role: true, + is_replication_role: false, + is_superuser: false, + name: 'test_role_updated', + password: '********', + valid_until: null, + }) + + const deleteResponse = await app.inject({ + method: 'DELETE', + url: `/roles/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(deleteResponse.statusCode).toBe(200) + const deleteData = deleteResponse.json() + expect(deleteData).toMatchObject({ + active_connections: 0, + can_bypass_rls: false, + can_create_db: false, + can_create_role: false, + can_login: false, + config: null, + connection_limit: 100, + id: expect.any(Number), + inherit_role: true, + is_replication_role: false, + is_superuser: false, + name: 'test_role_updated', + password: '********', + valid_until: null, + }) + }) + + test('should return 400 for invalid payload', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/roles', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'pg_', + }, + }) + expect(response.statusCode).toBe(400) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "role name "pg_" is reserved", + } + `) + }) +}) diff --git a/test/schemas.test.ts b/test/schemas.test.ts new file mode 100644 index 00000000..73ed73c1 --- /dev/null +++ b/test/schemas.test.ts @@ -0,0 +1,137 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/schemas', () => { + test('should list schemas', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/schemas', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list schemas with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/schemas?include_system_schemas=true&limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent schema', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/schemas/non-existent-schema', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) + + test('should create schema, retrieve, update, delete', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/schemas', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_schema', + }, + }) + expect(response.statusCode).toBe(200) + const responseData = response.json() + expect(responseData).toMatchObject({ + id: expect.any(Number), + name: 'test_schema', + owner: 'postgres', + }) + + const { id } = responseData + + const retrieveResponse = await app.inject({ + method: 'GET', + url: `/schemas/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(retrieveResponse.statusCode).toBe(200) + const retrieveData = retrieveResponse.json() + expect(retrieveData).toMatchObject({ + id: expect.any(Number), + name: 'test_schema', + owner: 'postgres', + }) + + const updateResponse = await app.inject({ + method: 'PATCH', + url: `/schemas/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_schema_updated', + }, + }) + expect(updateResponse.statusCode).toBe(200) + const updateData = updateResponse.json() + expect(updateData).toMatchObject({ + id: expect.any(Number), + name: 'test_schema_updated', + owner: 'postgres', + }) + + const deleteResponse = await app.inject({ + method: 'DELETE', + url: `/schemas/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(deleteResponse.statusCode).toBe(200) + const deleteData = deleteResponse.json() + expect(deleteData).toMatchObject({ + id: expect.any(Number), + name: 'test_schema_updated', + owner: 'postgres', + }) + }) + + test('should return 400 for invalid payload', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/schemas', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'pg_', + }, + }) + expect(response.statusCode).toBe(400) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "unacceptable schema name "pg_"", + } + `) + }) +}) diff --git a/test/triggers.test.ts b/test/triggers.test.ts new file mode 100644 index 00000000..c537093c --- /dev/null +++ b/test/triggers.test.ts @@ -0,0 +1,186 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/triggers', () => { + test('should list triggers', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/triggers', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list triggers with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/triggers?include_system_schemas=true&limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent trigger', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/triggers/non-existent-trigger', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) + + test('should create trigger, retrieve, update, delete', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/triggers', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_trigger1', + table: 'users_audit', + function_name: 'audit_action', + activation: 'AFTER', + events: ['UPDATE'], + }, + }) + expect(response.statusCode).toBe(200) + const responseData = response.json() + expect(responseData).toMatchObject({ + activation: 'AFTER', + condition: null, + enabled_mode: 'ORIGIN', + events: ['UPDATE'], + function_args: [], + function_name: 'audit_action', + function_schema: 'public', + id: expect.any(Number), + name: 'test_trigger1', + orientation: 'STATEMENT', + schema: 'public', + table: 'users_audit', + table_id: expect.any(Number), + }) + + const { id } = responseData + + const retrieveResponse = await app.inject({ + method: 'GET', + url: `/triggers/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(retrieveResponse.statusCode).toBe(200) + const retrieveData = retrieveResponse.json() + expect(retrieveData).toMatchObject({ + activation: 'AFTER', + condition: null, + enabled_mode: 'ORIGIN', + events: ['UPDATE'], + function_args: [], + function_name: 'audit_action', + function_schema: 'public', + id: expect.any(Number), + name: 'test_trigger1', + orientation: 'STATEMENT', + schema: 'public', + table: 'users_audit', + table_id: expect.any(Number), + }) + + const updateResponse = await app.inject({ + method: 'PATCH', + url: `/triggers/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_trigger1_updated', + enabled_mode: 'DISABLED', + }, + }) + expect(updateResponse.statusCode).toBe(200) + const updateData = updateResponse.json() + expect(updateData).toMatchObject({ + activation: 'AFTER', + condition: null, + enabled_mode: 'DISABLED', + events: ['UPDATE'], + function_args: [], + function_name: 'audit_action', + function_schema: 'public', + id: expect.any(Number), + name: 'test_trigger1_updated', + orientation: 'STATEMENT', + schema: 'public', + table: 'users_audit', + table_id: expect.any(Number), + }) + + const deleteResponse = await app.inject({ + method: 'DELETE', + url: `/triggers/${id}`, + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(deleteResponse.statusCode).toBe(200) + const deleteData = deleteResponse.json() + expect(deleteData).toMatchObject({ + activation: 'AFTER', + condition: null, + enabled_mode: 'DISABLED', + events: ['UPDATE'], + function_args: [], + function_name: 'audit_action', + function_schema: 'public', + id: expect.any(Number), + name: 'test_trigger1_updated', + orientation: 'STATEMENT', + schema: 'public', + table: 'users_audit', + table_id: expect.any(Number), + }) + }) + + test('should return 400 for invalid payload', async () => { + const app = build() + const response = await app.inject({ + method: 'POST', + url: '/triggers', + headers: { + pg: TEST_CONNECTION_STRING, + }, + payload: { + name: 'test_trigger_invalid', + table: 'non_existent_table', + function_name: 'audit_action', + activation: 'AFTER', + events: ['UPDATE'], + }, + }) + expect(response.statusCode).toBe(400) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "relation "public.non_existent_table" does not exist", + } + `) + }) +}) diff --git a/test/types.test.ts b/test/types.test.ts new file mode 100644 index 00000000..df2af697 --- /dev/null +++ b/test/types.test.ts @@ -0,0 +1,46 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/types', () => { + test('should list types', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/types', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list types with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/types?include_system_schemas=true&limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent type', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/types/non-existent-type', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + await app.close() + }) +}) diff --git a/test/utils.test.ts b/test/utils.test.ts new file mode 100644 index 00000000..3d70b1a5 --- /dev/null +++ b/test/utils.test.ts @@ -0,0 +1,105 @@ +import { expect, test, describe } from 'vitest' +import { FastifyRequest } from 'fastify' +import { + extractRequestForLogging, + createConnectionConfig, + translateErrorToResponseCode, +} from '../src/server/utils.js' + +describe('server/utils', () => { + describe('extractRequestForLogging', () => { + test('should extract request information for logging', () => { + const mockRequest = { + method: 'GET', + url: '/test', + headers: { + 'user-agent': 'test-agent', + 'x-supabase-info': 'test-info', + }, + query: { param: 'value' }, + } as FastifyRequest + + const result = extractRequestForLogging(mockRequest) + expect(result).toHaveProperty('method') + expect(result).toHaveProperty('url') + expect(result).toHaveProperty('pg') + expect(result).toHaveProperty('opt') + }) + + test('should handle request with minimal properties', () => { + const mockRequest = { + method: 'POST', + url: '/api/test', + headers: {}, + } as FastifyRequest + + const result = extractRequestForLogging(mockRequest) + expect(result.method).toBe('POST') + expect(result.url).toBe('/api/test') + expect(result.pg).toBe('unknown') + }) + }) + + describe('createConnectionConfig', () => { + test('should create connection config from request headers', () => { + const mockRequest = { + headers: { + pg: 'postgresql://user:pass@localhost:5432/db', + 'x-pg-application-name': 'test-app', + }, + } as FastifyRequest + + const result = createConnectionConfig(mockRequest) + expect(result).toHaveProperty('connectionString') + expect(result).toHaveProperty('application_name') + expect(result.connectionString).toBe('postgresql://user:pass@localhost:5432/db') + expect(result.application_name).toBe('test-app') + }) + + test('should handle request without application name', () => { + const mockRequest = { + headers: { + pg: 'postgresql://user:pass@localhost:5432/db', + }, + } as FastifyRequest + + const result = createConnectionConfig(mockRequest) + expect(result).toHaveProperty('connectionString') + expect(result.connectionString).toBe('postgresql://user:pass@localhost:5432/db') + // application_name should have default value if not provided + expect(result.application_name).toBe('postgres-meta 0.0.0-automated') + }) + }) + + describe('translateErrorToResponseCode', () => { + test('should return 504 for connection timeout errors', () => { + const error = { message: 'Connection terminated due to connection timeout' } + const result = translateErrorToResponseCode(error) + expect(result).toBe(504) + }) + + test('should return 503 for too many clients errors', () => { + const error = { message: 'sorry, too many clients already' } + const result = translateErrorToResponseCode(error) + expect(result).toBe(503) + }) + + test('should return 408 for query timeout errors', () => { + const error = { message: 'Query read timeout' } + const result = translateErrorToResponseCode(error) + expect(result).toBe(408) + }) + + test('should return default 400 for other errors', () => { + const error = { message: 'database connection failed' } + const result = translateErrorToResponseCode(error) + expect(result).toBe(400) + }) + + test('should return custom default for other errors', () => { + const error = { message: 'some other error' } + const result = translateErrorToResponseCode(error, 500) + expect(result).toBe(500) + }) + }) +}) diff --git a/test/views.test.ts b/test/views.test.ts new file mode 100644 index 00000000..d713e919 --- /dev/null +++ b/test/views.test.ts @@ -0,0 +1,51 @@ +import { expect, test, describe } from 'vitest' +import { build } from '../src/server/app.js' +import { TEST_CONNECTION_STRING } from './lib/utils.js' + +describe('server/routes/views', () => { + test('should list views', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/views', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should list views with query parameters', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/views?include_system_schemas=true&limit=5&offset=0', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(200) + expect(Array.isArray(JSON.parse(response.body))).toBe(true) + await app.close() + }) + + test('should return 404 for non-existent view', async () => { + const app = build() + const response = await app.inject({ + method: 'GET', + url: '/views/1', + headers: { + pg: TEST_CONNECTION_STRING, + }, + }) + expect(response.statusCode).toBe(404) + expect(response.json()).toMatchInlineSnapshot(` + { + "error": "Cannot find a view with ID 1", + } + `) + await app.close() + }) +}) From 479aa1cff51816f7d5d60ab7efd49cf65294e719 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Sep 2025 01:37:50 +0000 Subject: [PATCH 31/72] chore(deps): bump vite from 6.3.4 to 6.3.6 (#986) Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) from 6.3.4 to 6.3.6. - [Release notes](https://github.com/vitejs/vite/releases) - [Changelog](https://github.com/vitejs/vite/blob/v6.3.6/packages/vite/CHANGELOG.md) - [Commits](https://github.com/vitejs/vite/commits/v6.3.6/packages/vite) --- updated-dependencies: - dependency-name: vite dependency-version: 6.3.6 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 752051a3..44c9457e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8175,9 +8175,9 @@ } }, "node_modules/vite": { - "version": "6.3.4", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.4.tgz", - "integrity": "sha512-BiReIiMS2fyFqbqNT/Qqt4CVITDU9M9vE+DKcVAsB+ZV0wvTKd+3hMbkpxz1b+NmEDMegpVbisKiAZOnvO92Sw==", + "version": "6.3.6", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.6.tgz", + "integrity": "sha512-0msEVHJEScQbhkbVTb/4iHZdJ6SXp/AvxL2sjwYQFfBqleHtnCqv1J3sa9zbWz/6kW1m9Tfzn92vW+kZ1WV6QA==", "dev": true, "license": "MIT", "dependencies": { From 0300c491f5549f36960d7830236ab0cf0cfaf48e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 10 Sep 2025 01:41:34 +0000 Subject: [PATCH 32/72] chore(deps): bump brace-expansion (#987) Bumps and [brace-expansion](https://github.com/juliangruber/brace-expansion). These dependencies needed to be updated together. Updates `brace-expansion` from 1.1.11 to 1.1.12 - [Release notes](https://github.com/juliangruber/brace-expansion/releases) - [Commits](https://github.com/juliangruber/brace-expansion/compare/1.1.11...v1.1.12) Updates `brace-expansion` from 2.0.1 to 2.0.2 - [Release notes](https://github.com/juliangruber/brace-expansion/releases) - [Commits](https://github.com/juliangruber/brace-expansion/compare/1.1.11...v1.1.12) --- updated-dependencies: - dependency-name: brace-expansion dependency-version: 1.1.12 dependency-type: indirect - dependency-name: brace-expansion dependency-version: 2.0.2 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/package-lock.json b/package-lock.json index 44c9457e..75d2da17 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2601,9 +2601,9 @@ "license": "MIT" }, "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "version": "1.1.12", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", + "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", @@ -6861,9 +6861,9 @@ } }, "node_modules/rimraf/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { @@ -7717,9 +7717,9 @@ } }, "node_modules/test-exclude/node_modules/brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", "dev": true, "license": "MIT", "dependencies": { From 8b5b74f61bc4f1b7f82c02d1f374ce7744795b64 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Tue, 16 Sep 2025 17:52:57 +0200 Subject: [PATCH 33/72] ci: canary deploy process (#988) * ci: canary deploy process * chore: upgrade a dep * chore: reword canary comment --- .github/workflows/canary-comment.yml | 129 +++++++++++++++++++++ .github/workflows/canary-deploy.yml | 123 ++++++++++++++++++++ CONTRIBUTING.md | 25 ++++- package-lock.json | 161 +++++---------------------- package.json | 2 +- 5 files changed, 302 insertions(+), 138 deletions(-) create mode 100644 .github/workflows/canary-comment.yml create mode 100644 .github/workflows/canary-deploy.yml diff --git a/.github/workflows/canary-comment.yml b/.github/workflows/canary-comment.yml new file mode 100644 index 00000000..aae0a7dc --- /dev/null +++ b/.github/workflows/canary-comment.yml @@ -0,0 +1,129 @@ +name: Update Canary PR Comment + +permissions: + pull-requests: write + actions: read + +on: + workflow_run: + workflows: ['Canary Deploy'] + types: [completed] + +jobs: + update-comment: + # Only run on the correct repository + if: github.repository == 'supabase/postgres-meta' + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + # Get PR number from the workflow run + - name: Get PR info + id: pr-info + uses: actions/github-script@v7 + with: + script: | + // Get the workflow run details + const workflowRun = context.payload.workflow_run; + + // Find associated PR + const prs = await github.rest.pulls.list({ + owner: context.repo.owner, + repo: context.repo.repo, + state: 'open', + head: `${workflowRun.head_repository.owner.login}:${workflowRun.head_branch}` + }); + + if (prs.data.length > 0) { + const pr = prs.data[0]; + core.setOutput('pr_number', pr.number); + core.setOutput('found', 'true'); + console.log(`Found PR #${pr.number}`); + } else { + core.setOutput('found', 'false'); + console.log('No associated PR found'); + } + + # Only continue if we found a PR and the workflow succeeded + - name: Download canary info + if: ${{ steps.pr-info.outputs.found == 'true' && github.event.workflow_run.conclusion == 'success' }} + uses: actions/download-artifact@v4 + with: + name: canary-info + path: canary-info/ + run-id: ${{ github.event.workflow_run.id }} + continue-on-error: true + + - name: Read canary info + if: ${{ steps.pr-info.outputs.found == 'true' && github.event.workflow_run.conclusion == 'success' }} + id: canary-info + run: | + if [ -f "canary-info/canary-tags.txt" ]; then + # Read the first tag (DockerHub) from the tags + FIRST_TAG=$(head -n1 canary-info/canary-tags.txt) + echo "tag=$FIRST_TAG" >> $GITHUB_OUTPUT + echo "found=true" >> $GITHUB_OUTPUT + echo "commit-sha=$(cat canary-info/commit-sha.txt)" >> $GITHUB_OUTPUT + else + echo "found=false" >> $GITHUB_OUTPUT + fi + continue-on-error: true + + # Find existing comment + - name: Find existing comment + if: ${{ steps.pr-info.outputs.found == 'true' }} + uses: peter-evans/find-comment@v3 + id: find-comment + with: + issue-number: ${{ steps.pr-info.outputs.pr_number }} + comment-author: 'github-actions[bot]' + body-includes: '' + + # Create or update comment based on workflow status + - name: Create or update canary comment + if: ${{ steps.pr-info.outputs.found == 'true' }} + uses: peter-evans/create-or-update-comment@v4 + with: + comment-id: ${{ steps.find-comment.outputs.comment-id }} + issue-number: ${{ steps.pr-info.outputs.pr_number }} + body: | + + ## 🚀 Canary Deployment Status + + ${{ github.event.workflow_run.conclusion == 'success' && steps.canary-info.outputs.found == 'true' && format('✅ **Canary image deployed successfully!** + + 🐳 **Docker Image:** `{0}` + 📝 **Commit:** `{1}` + + You can test this canary deployment by pulling the image: + ```bash + docker pull {0} + ``` + + You can also set the version in a supabase local project by running: + ```bash + echo "{0}" > supabase/.temp/pgmeta-version + ``` + + Or use it in your docker-compose.yml: + ```yaml + services: + postgres-meta: + image: {0} + # ... other configuration + ``` + + The canary image is available on: + - 🐳 [Docker Hub](https://hub.docker.com/r/supabase/postgres-meta) + - 📦 [GitHub Container Registry](https://ghcr.io/supabase/postgres-meta) + - ☁️ [AWS ECR Public](https://gallery.ecr.aws/supabase/postgres-meta) + ', steps.canary-info.outputs.tag, steps.canary-info.outputs.commit-sha) || '' }} + + ${{ github.event.workflow_run.conclusion == 'failure' && '❌ **Canary deployment failed** + + Please check the [workflow logs](' }}${{ github.event.workflow_run.conclusion == 'failure' && github.event.workflow_run.html_url || '' }}${{ github.event.workflow_run.conclusion == 'failure' && ') for more details. + + Make sure your PR has the `deploy-canary` label and targets the `master` branch.' || '' }} + + --- + Last updated: ${{ github.event.workflow_run.updated_at }} + edit-mode: replace diff --git a/.github/workflows/canary-deploy.yml b/.github/workflows/canary-deploy.yml new file mode 100644 index 00000000..872fcc41 --- /dev/null +++ b/.github/workflows/canary-deploy.yml @@ -0,0 +1,123 @@ +name: Canary Deploy + +permissions: + contents: read + pull-requests: read + packages: write + id-token: write + +on: + pull_request: + types: [opened, synchronize, labeled] + paths: + - 'src/**' + - 'package.json' + - 'package-lock.json' + - 'tsconfig.json' + - 'Dockerfile' + +jobs: + build-canary: + # Only run if PR has the 'deploy-canary' label, is on the correct repository, and targets master branch + if: | + github.repository == 'supabase/postgres-meta' && + github.event.pull_request.base.ref == 'master' && + contains(github.event.pull_request.labels.*.name, 'deploy-canary') + runs-on: ubuntu-22.04 + timeout-minutes: 30 + outputs: + canary-tag: ${{ steps.meta.outputs.tags }} + pr-number: ${{ github.event.pull_request.number }} + steps: + # Checkout fork code - safe because no secrets are available for building + - name: Checkout code + uses: actions/checkout@v5 + + # Log PR author for auditing + - name: Log PR author + run: | + echo "Canary deploy triggered by: ${{ github.event.pull_request.user.login }}" + echo "PR #${{ github.event.pull_request.number }} from fork: ${{ github.event.pull_request.head.repo.full_name }}" + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version-file: '.nvmrc' + cache: 'npm' + + - name: Install dependencies and build + run: | + npm clean-install + npm run build + + # Generate canary tag + - id: meta + uses: docker/metadata-action@v5 + with: + images: | + supabase/postgres-meta + public.ecr.aws/supabase/postgres-meta + ghcr.io/supabase/postgres-meta + tags: | + type=raw,value=canary-pr-${{ github.event.pull_request.number }}-${{ github.event.pull_request.head.sha }} + type=raw,value=canary-pr-${{ github.event.pull_request.number }} + + - uses: docker/setup-qemu-action@v3 + with: + platforms: amd64,arm64 + - uses: docker/setup-buildx-action@v3 + + - name: Login to DockerHub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKER_USERNAME }} + password: ${{ secrets.DOCKER_PASSWORD }} + + - name: configure aws credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: ${{ secrets.PROD_AWS_ROLE }} + aws-region: us-east-1 + + - name: Login to ECR + uses: docker/login-action@v3 + with: + registry: public.ecr.aws + + - name: Login to GHCR + uses: docker/login-action@v3 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push canary image + uses: docker/build-push-action@v6 + with: + context: . + push: true + platforms: linux/amd64,linux/arm64 + tags: ${{ steps.meta.outputs.tags }} + labels: | + org.opencontainers.image.title=postgres-meta-canary + org.opencontainers.image.description=Canary build for PR #${{ github.event.pull_request.number }} + org.opencontainers.image.source=${{ github.event.pull_request.head.repo.html_url }} + org.opencontainers.image.revision=${{ github.event.pull_request.head.sha }} + canary.pr.number=${{ github.event.pull_request.number }} + canary.pr.author=${{ github.event.pull_request.user.login }} + + # Save canary info for the comment workflow + - name: Save canary info + run: | + mkdir -p canary-info + echo "${{ steps.meta.outputs.tags }}" > canary-info/canary-tags.txt + echo "${{ github.event.pull_request.number }}" > canary-info/pr-number.txt + echo "${{ github.event.pull_request.head.sha }}" > canary-info/commit-sha.txt + echo "postgres-meta" > canary-info/package-name.txt + + - name: Upload canary info + uses: actions/upload-artifact@v4 + with: + name: canary-info + path: canary-info/ + retention-days: 7 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c7f56a71..c6d17b80 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -11,4 +11,27 @@ 2. Run the tests: `npm run test:run` 3. Make changes in code (`/src`) and tests (`/test/lib` and `/test/server`) 4. Run the tests again: `npm run test:run` -5. Commit + PR \ No newline at end of file +5. Commit + PR + +### Canary Deployments + +For testing your changes when they impact other things (like type generation and postgrest-js), you can deploy a canary version of postgres-meta: + +1. **Create a Pull Request** targeting the `master` branch +2. **Add the `deploy-canary` label** to your PR +3. **Wait for the canary build** - GitHub Actions will automatically build and push a canary Docker image +4. **Use the canary image** - The bot will comment on your PR with the exact image tag and usage instructions + +The canary image will be tagged as: + +- `supabase/postgres-meta:canary-pr-{PR_NUMBER}-{COMMIT_SHA}` +- `supabase/postgres-meta:canary-pr-{PR_NUMBER}` + +Example usage: + +```bash +docker pull supabase/postgres-meta:canary-pr-123-abc1234 +echo "canary-pr-123-abc1234" > supabase/.temp/pgmeta-version +``` + +**Note:** Only maintainers can add the `deploy-canary` label for security reasons. The canary deployment requires access to production Docker registries. diff --git a/package-lock.json b/package-lock.json index 75d2da17..cd6f7b63 100644 --- a/package-lock.json +++ b/package-lock.json @@ -38,7 +38,7 @@ "cpy-cli": "^5.0.0", "nodemon": "^3.1.7", "npm-run-all": "^4.1.5", - "pino-pretty": "^12.0.0", + "pino-pretty": "^13.1.1", "rimraf": "^6.0.1", "ts-node": "^10.9.1", "typescript": "^5.6.3", @@ -2230,19 +2230,6 @@ "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", "license": "ISC" }, - "node_modules/abort-controller": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", - "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", - "dev": true, - "license": "MIT", - "dependencies": { - "event-target-shim": "^5.0.0" - }, - "engines": { - "node": ">=6.5" - } - }, "node_modules/abstract-logging": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/abstract-logging/-/abstract-logging-2.0.1.tgz", @@ -2551,27 +2538,6 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "license": "MIT" }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, "node_modules/big-integer": { "version": "1.6.52", "resolved": "https://registry.npmjs.org/big-integer/-/big-integer-1.6.52.tgz", @@ -2623,31 +2589,6 @@ "node": ">=8" } }, - "node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, "node_modules/cac": { "version": "6.7.14", "resolved": "https://registry.npmjs.org/cac/-/cac-6.7.14.tgz", @@ -3500,26 +3441,6 @@ "@types/estree": "^1.0.0" } }, - "node_modules/event-target-shim": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", - "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/events": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/events/-/events-3.3.0.tgz", - "integrity": "sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8.x" - } - }, "node_modules/expect-type": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.2.0.tgz", @@ -4277,27 +4198,6 @@ "node": ">=0.10.0" } }, - "node_modules/ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "BSD-3-Clause" - }, "node_modules/ignore": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", @@ -6329,9 +6229,9 @@ } }, "node_modules/pino-pretty": { - "version": "12.1.0", - "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-12.1.0.tgz", - "integrity": "sha512-Z7JdCPqggoRyo0saJyCe1BN8At5qE+ZBElNbyx+znCaCVN+ohOqlWb+/WSYnamzfi2e6P6pXq/3H66KwFQHXWg==", + "version": "13.1.1", + "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-13.1.1.tgz", + "integrity": "sha512-TNNEOg0eA0u+/WuqH0MH0Xui7uqVk9D74ESOpjtebSQYbNWJk/dIxCXIxFsNfeN53JmtWqYHP2OrIZjT/CBEnA==", "dev": true, "license": "MIT", "dependencies": { @@ -6345,31 +6245,30 @@ "on-exit-leak-free": "^2.1.0", "pino-abstract-transport": "^2.0.0", "pump": "^3.0.0", - "readable-stream": "^4.0.0", - "secure-json-parse": "^2.4.0", + "secure-json-parse": "^4.0.0", "sonic-boom": "^4.0.1", - "strip-json-comments": "^3.1.1" + "strip-json-comments": "^5.0.2" }, "bin": { "pino-pretty": "bin.js" } }, - "node_modules/pino-pretty/node_modules/readable-stream": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-4.7.0.tgz", - "integrity": "sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==", + "node_modules/pino-pretty/node_modules/secure-json-parse": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-4.0.0.tgz", + "integrity": "sha512-dxtLJO6sc35jWidmLxo7ij+Eg48PM/kleBsxpC8QJE0qJICe+KawkDQmvCMZUr9u7WKVHgMW6vy3fQ7zMiFZMA==", "dev": true, - "license": "MIT", - "dependencies": { - "abort-controller": "^3.0.0", - "buffer": "^6.0.3", - "events": "^3.3.0", - "process": "^0.11.10", - "string_decoder": "^1.3.0" - }, - "engines": { - "node": "^12.22.0 || ^14.17.0 || >=16.0.0" - } + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ], + "license": "BSD-3-Clause" }, "node_modules/pino-std-serializers": { "version": "7.0.0", @@ -6513,16 +6412,6 @@ "prettier": "^3.0.3" } }, - "node_modules/process": { - "version": "0.11.10", - "resolved": "https://registry.npmjs.org/process/-/process-0.11.10.tgz", - "integrity": "sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6.0" - } - }, "node_modules/process-warning": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-3.0.0.tgz", @@ -7629,13 +7518,13 @@ } }, "node_modules/strip-json-comments": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", - "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-5.0.3.tgz", + "integrity": "sha512-1tB5mhVo7U+ETBKNf92xT4hrQa3pm0MZ0PQvuDnWgAAGHDsfp4lPSpiS6psrSiet87wyGPh9ft6wmhOMQ0hDiw==", "dev": true, "license": "MIT", "engines": { - "node": ">=8" + "node": ">=14.16" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" diff --git a/package.json b/package.json index 62315e9f..941df965 100644 --- a/package.json +++ b/package.json @@ -67,7 +67,7 @@ "cpy-cli": "^5.0.0", "nodemon": "^3.1.7", "npm-run-all": "^4.1.5", - "pino-pretty": "^12.0.0", + "pino-pretty": "^13.1.1", "rimraf": "^6.0.1", "ts-node": "^10.9.1", "typescript": "^5.6.3", From dcb8e9ba9a70f05aab4865144743233ed285db59 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Tue, 16 Sep 2025 23:36:28 +0200 Subject: [PATCH 34/72] ci: fix canary-comment dependency (#989) * ci: fix canary-comment dependency * chore: change dependency for trigger * chore: only comment on canary label * Revert "chore: change dependency for trigger" This reverts commit ea759f017e55d7499bdc0a6001e696f88fd4ceaa. --- .github/workflows/canary-comment.yml | 64 ++++++++++++++++------------ .github/workflows/canary-deploy.yml | 16 ------- 2 files changed, 37 insertions(+), 43 deletions(-) diff --git a/.github/workflows/canary-comment.yml b/.github/workflows/canary-comment.yml index aae0a7dc..69cebc03 100644 --- a/.github/workflows/canary-comment.yml +++ b/.github/workflows/canary-comment.yml @@ -35,42 +35,52 @@ jobs: if (prs.data.length > 0) { const pr = prs.data[0]; - core.setOutput('pr_number', pr.number); - core.setOutput('found', 'true'); - console.log(`Found PR #${pr.number}`); + + // Check if PR has the deploy-canary label + const labels = pr.labels.map(label => label.name); + const hasCanaryLabel = labels.includes('deploy-canary'); + + if (hasCanaryLabel) { + core.setOutput('pr_number', pr.number); + core.setOutput('found', 'true'); + core.setOutput('has_canary_label', 'true'); + console.log(`Found PR #${pr.number} with deploy-canary label`); + } else { + core.setOutput('found', 'false'); + core.setOutput('has_canary_label', 'false'); + console.log(`Found PR #${pr.number} but it doesn't have deploy-canary label`); + } } else { core.setOutput('found', 'false'); + core.setOutput('has_canary_label', 'false'); console.log('No associated PR found'); } - # Only continue if we found a PR and the workflow succeeded - - name: Download canary info - if: ${{ steps.pr-info.outputs.found == 'true' && github.event.workflow_run.conclusion == 'success' }} - uses: actions/download-artifact@v4 + # Extract canary info from the workflow run + - name: Extract canary info + if: ${{ steps.pr-info.outputs.found == 'true' && steps.pr-info.outputs.has_canary_label == 'true' && github.event.workflow_run.conclusion == 'success' }} + id: canary-info + uses: actions/github-script@v7 with: - name: canary-info - path: canary-info/ - run-id: ${{ github.event.workflow_run.id }} - continue-on-error: true + script: | + const workflowRun = context.payload.workflow_run; - - name: Read canary info - if: ${{ steps.pr-info.outputs.found == 'true' && github.event.workflow_run.conclusion == 'success' }} - id: canary-info - run: | - if [ -f "canary-info/canary-tags.txt" ]; then - # Read the first tag (DockerHub) from the tags - FIRST_TAG=$(head -n1 canary-info/canary-tags.txt) - echo "tag=$FIRST_TAG" >> $GITHUB_OUTPUT - echo "found=true" >> $GITHUB_OUTPUT - echo "commit-sha=$(cat canary-info/commit-sha.txt)" >> $GITHUB_OUTPUT - else - echo "found=false" >> $GITHUB_OUTPUT - fi - continue-on-error: true + // Extract PR number from the branch name or workflow run + const prNumber = '${{ steps.pr-info.outputs.pr_number }}'; + const commitSha = workflowRun.head_sha; + + // Generate the canary tag based on the pattern used in canary-deploy.yml + const canaryTag = `supabase/postgres-meta:canary-pr-${prNumber}-${commitSha}`; + + core.setOutput('tag', canaryTag); + core.setOutput('found', 'true'); + core.setOutput('commit-sha', commitSha); + + console.log(`Generated canary tag: ${canaryTag}`); # Find existing comment - name: Find existing comment - if: ${{ steps.pr-info.outputs.found == 'true' }} + if: ${{ steps.pr-info.outputs.found == 'true' && steps.pr-info.outputs.has_canary_label == 'true' }} uses: peter-evans/find-comment@v3 id: find-comment with: @@ -80,7 +90,7 @@ jobs: # Create or update comment based on workflow status - name: Create or update canary comment - if: ${{ steps.pr-info.outputs.found == 'true' }} + if: ${{ steps.pr-info.outputs.found == 'true' && steps.pr-info.outputs.has_canary_label == 'true' }} uses: peter-evans/create-or-update-comment@v4 with: comment-id: ${{ steps.find-comment.outputs.comment-id }} diff --git a/.github/workflows/canary-deploy.yml b/.github/workflows/canary-deploy.yml index 872fcc41..f40f7a0f 100644 --- a/.github/workflows/canary-deploy.yml +++ b/.github/workflows/canary-deploy.yml @@ -105,19 +105,3 @@ jobs: org.opencontainers.image.revision=${{ github.event.pull_request.head.sha }} canary.pr.number=${{ github.event.pull_request.number }} canary.pr.author=${{ github.event.pull_request.user.login }} - - # Save canary info for the comment workflow - - name: Save canary info - run: | - mkdir -p canary-info - echo "${{ steps.meta.outputs.tags }}" > canary-info/canary-tags.txt - echo "${{ github.event.pull_request.number }}" > canary-info/pr-number.txt - echo "${{ github.event.pull_request.head.sha }}" > canary-info/commit-sha.txt - echo "postgres-meta" > canary-info/package-name.txt - - - name: Upload canary info - uses: actions/upload-artifact@v4 - with: - name: canary-info - path: canary-info/ - retention-days: 7 From 2b82470a2e4b22946c3ee7ddfe9e725d5c2a80fa Mon Sep 17 00:00:00 2001 From: avallete Date: Tue, 23 Sep 2025 20:00:45 +0200 Subject: [PATCH 35/72] chore: refactor typegen to reduce loops --- src/server/constants.ts | 4 + src/server/templates/typescript.ts | 186 +++++++++++++++++++---------- 2 files changed, 125 insertions(+), 65 deletions(-) diff --git a/src/server/constants.ts b/src/server/constants.ts index 9354c59f..c64b45e6 100644 --- a/src/server/constants.ts +++ b/src/server/constants.ts @@ -51,6 +51,10 @@ export const GENERATE_TYPES_SWIFT_ACCESS_CONTROL = process.env ? (process.env.PG_META_GENERATE_TYPES_SWIFT_ACCESS_CONTROL as AccessControl) : 'internal' +// json/jsonb/text types +export const VALID_UNNAMED_FUNCTION_ARG_TYPES = new Set([114, 3802, 25]) +export const VALID_FUNCTION_ARGS_MODE = new Set(['in', 'inout', 'variadic']) + export const PG_META_MAX_RESULT_SIZE = process.env.PG_META_MAX_RESULT_SIZE_MB ? // Node-postgres get a maximum size in bytes make the conversion from the env variable // from MB to Bytes diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 03b407d4..1c262fae 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -8,7 +8,7 @@ import type { PostgresView, } from '../../lib/index.js' import type { GeneratorMetadata } from '../../lib/generators.js' -import { GENERATE_TYPES_DEFAULT_SCHEMA } from '../constants.js' +import { GENERATE_TYPES_DEFAULT_SCHEMA, VALID_FUNCTION_ARGS_MODE } from '../constants.js' export const apply = async ({ schemas, @@ -26,15 +26,99 @@ export const apply = async ({ detectOneToOneRelationships: boolean postgrestVersion?: string }): Promise => { + schemas.sort((a, b) => a.name.localeCompare(b.name)) + const columnsByTableId = Object.fromEntries( [...tables, ...foreignTables, ...views, ...materializedViews].map((t) => [t.id, []]) ) - columns - .filter((c) => c.table_id in columnsByTableId) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - .forEach((c) => { - columnsByTableId[c.table_id].push(c) - }) + for (const column of columns) { + if (column.table_id in columnsByTableId) { + columnsByTableId[column.table_id].push(column) + } + } + for (const tableId in columnsByTableId) { + columnsByTableId[tableId].sort((a, b) => a.name.localeCompare(b.name)) + } + + const introspectionBySchema = Object.fromEntries<{ + tables: Pick[] + views: PostgresView[] + functions: { fn: PostgresFunction; inArgs: PostgresFunction['args'] }[] + enums: PostgresType[] + compositeTypes: PostgresType[] + }>( + schemas.map((s) => [ + s.name, + { tables: [], views: [], functions: [], enums: [], compositeTypes: [] }, + ]) + ) + for (const table of tables) { + if (table.schema in introspectionBySchema) { + introspectionBySchema[table.schema].tables.push(table) + } + } + for (const table of foreignTables) { + if (table.schema in introspectionBySchema) { + introspectionBySchema[table.schema].tables.push(table) + } + } + for (const view of views) { + if (view.schema in introspectionBySchema) { + introspectionBySchema[view.schema].views.push(view) + } + } + for (const materializedView of materializedViews) { + if (materializedView.schema in introspectionBySchema) { + introspectionBySchema[materializedView.schema].views.push({ + ...materializedView, + is_updatable: false, + }) + } + } + for (const func of functions) { + if (func.schema in introspectionBySchema) { + func.args.sort((a, b) => a.name.localeCompare(b.name)) + // Either: + // 1. All input args are be named, or + // 2. There is only one input arg which is unnamed + const inArgs = func.args.filter(({ mode }) => VALID_FUNCTION_ARGS_MODE.has(mode)) + + if ( + // Case 1: Function has a single parameter + inArgs.length === 1 || + // Case 2: All input args are named + !inArgs.some(({ name }) => name === '') + ) { + introspectionBySchema[func.schema].functions.push({ fn: func, inArgs }) + } + } + } + for (const type of types) { + if (type.schema in introspectionBySchema) { + if (type.enums.length > 0) { + introspectionBySchema[type.schema].enums.push(type) + } + if (type.attributes.length > 0) { + introspectionBySchema[type.schema].compositeTypes.push(type) + } + } + } + for (const schema in introspectionBySchema) { + introspectionBySchema[schema].tables.sort((a, b) => a.name.localeCompare(b.name)) + introspectionBySchema[schema].views.sort((a, b) => a.name.localeCompare(b.name)) + introspectionBySchema[schema].functions.sort((a, b) => a.fn.name.localeCompare(b.fn.name)) + introspectionBySchema[schema].enums.sort((a, b) => a.name.localeCompare(b.name)) + introspectionBySchema[schema].compositeTypes.sort((a, b) => a.name.localeCompare(b.name)) + } + + // group types by id for quicker lookup + const typesById = types.reduce( + (acc, type) => { + acc[type.id] = type + return acc + }, + {} as Record + ) const internal_supabase_schema = postgrestVersion ? `// Allows to automatically instantiate createClient with right options @@ -49,44 +133,15 @@ export type Json = string | number | boolean | null | { [key: string]: Json | un export type Database = { ${internal_supabase_schema} - ${schemas - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - .map((schema) => { - const schemaTables = [...tables, ...foreignTables] - .filter((table) => table.schema === schema.name) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - const schemaViews = [...views, ...materializedViews] - .filter((view) => view.schema === schema.name) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - const schemaFunctions = functions - .filter((func) => { - if (func.schema !== schema.name) { - return false - } - - // Either: - // 1. All input args are be named, or - // 2. There is only one input arg which is unnamed - const inArgs = func.args.filter(({ mode }) => ['in', 'inout', 'variadic'].includes(mode)) - - if (!inArgs.some(({ name }) => name === '')) { - return true - } - - if (inArgs.length === 1) { - return true - } - - return false - }) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - const schemaEnums = types - .filter((type) => type.schema === schema.name && type.enums.length > 0) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - const schemaCompositeTypes = types - .filter((type) => type.schema === schema.name && type.attributes.length > 0) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - return `${JSON.stringify(schema.name)}: { + ${schemas.map((schema) => { + const { + tables: schemaTables, + views: schemaViews, + functions: schemaFunctions, + enums: schemaEnums, + compositeTypes: schemaCompositeTypes, + } = introspectionBySchema[schema.name] + return `${JSON.stringify(schema.name)}: { Tables: { ${ schemaTables.length === 0 @@ -105,9 +160,9 @@ export type Database = { })} ${column.is_nullable ? '| null' : ''}` ), ...schemaFunctions - .filter((fn) => fn.argument_types === table.name) - .map((fn) => { - const type = types.find(({ id }) => id === fn.return_type_id) + .filter(({ fn }) => fn.argument_types === table.name) + .map(({ fn }) => { + const type = typesById[fn.return_type_id] let tsType = 'unknown' if (type) { tsType = pgTypeToTsType(schema, type.name, { @@ -226,7 +281,7 @@ export type Database = { )} } ${ - 'is_updatable' in view && view.is_updatable + view.is_updatable ? `Insert: { ${columnsByTableId[view.id].map((column) => { let output = JSON.stringify(column.name) @@ -306,28 +361,29 @@ export type Database = { const schemaFunctionsGroupedByName = schemaFunctions.reduce( (acc, curr) => { - acc[curr.name] ??= [] - acc[curr.name].push(curr) + acc[curr.fn.name] ??= [] + acc[curr.fn.name].push(curr) return acc }, - {} as Record + {} as Record ) + for (const fnName in schemaFunctionsGroupedByName) { + schemaFunctionsGroupedByName[fnName].sort((a, b) => + b.fn.definition.localeCompare(a.fn.definition) + ) + } return Object.entries(schemaFunctionsGroupedByName).map( ([fnName, fns]) => `${JSON.stringify(fnName)}: { Args: ${fns - .map(({ args }) => { - const inArgs = args - .toSorted((a, b) => a.name.localeCompare(b.name)) - .filter(({ mode }) => mode === 'in') - + .map(({ inArgs }) => { if (inArgs.length === 0) { return 'Record' } const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { - const type = types.find(({ id }) => id === type_id) + const type = typesById[type_id] let tsType = 'unknown' if (type) { tsType = pgTypeToTsType(schema, type.name, { @@ -346,10 +402,10 @@ export type Database = { .join(' | ')} Returns: ${(() => { // Case 1: `returns table`. - const tableArgs = fns[0].args.filter(({ mode }) => mode === 'table') + const tableArgs = fns[0].fn.args.filter(({ mode }) => mode === 'table') if (tableArgs.length > 0) { const argsNameAndType = tableArgs.map(({ name, type_id }) => { - const type = types.find(({ id }) => id === type_id) + const type = typesById[type_id] let tsType = 'unknown' if (type) { tsType = pgTypeToTsType(schema, type.name, { @@ -371,7 +427,7 @@ export type Database = { // Case 2: returns a relation's row type. const relation = [...tables, ...views].find( - ({ id }) => id === fns[0].return_type_relation_id + ({ id }) => id === fns[0].fn.return_type_relation_id ) if (relation) { return `{ @@ -394,7 +450,7 @@ export type Database = { } // Case 3: returns base/array/composite/enum type. - const type = types.find(({ id }) => id === fns[0].return_type_id) + const type = typesById[fns[0].fn.return_type_id] if (type) { return pgTypeToTsType(schema, type.name, { types, @@ -405,7 +461,7 @@ export type Database = { } return 'unknown' - })()}${fns[0].is_set_returning_function ? '[]' : ''} + })()}${fns[0].fn.is_set_returning_function ? '[]' : ''} }` ) })()} @@ -430,7 +486,7 @@ export type Database = { ({ name, attributes }) => `${JSON.stringify(name)}: { ${attributes.map(({ name, type_id }) => { - const type = types.find(({ id }) => id === type_id) + const type = typesById[type_id] let tsType = 'unknown' if (type) { tsType = `${pgTypeToTsType(schema, type.name, { @@ -447,7 +503,7 @@ export type Database = { } } }` - })} + })} } type DatabaseWithoutInternals = Omit From 09155b08b43992c349712c1841e8162ff2645d9f Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Wed, 1 Oct 2025 10:18:11 +0200 Subject: [PATCH 36/72] chore: dedup typescript typegen logic (#993) * chore: dedup typescript typegen logic * chore: reduce loops --- src/server/templates/typescript.ts | 568 ++++++++++++++--------------- 1 file changed, 282 insertions(+), 286 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 1c262fae..f0079874 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -1,4 +1,5 @@ import prettier from 'prettier' +import type { GeneratorMetadata } from '../../lib/generators.js' import type { PostgresColumn, PostgresFunction, @@ -7,9 +8,13 @@ import type { PostgresType, PostgresView, } from '../../lib/index.js' -import type { GeneratorMetadata } from '../../lib/generators.js' import { GENERATE_TYPES_DEFAULT_SCHEMA, VALID_FUNCTION_ARGS_MODE } from '../constants.js' +type TsRelationship = Pick< + GeneratorMetadata['relationships'][number], + 'foreign_key_name' | 'columns' | 'is_one_to_one' | 'referenced_relation' | 'referenced_columns' +> + export const apply = async ({ schemas, tables, @@ -27,10 +32,37 @@ export const apply = async ({ postgrestVersion?: string }): Promise => { schemas.sort((a, b) => a.name.localeCompare(b.name)) + relationships.sort( + (a, b) => + a.foreign_key_name.localeCompare(b.foreign_key_name) || + a.referenced_relation.localeCompare(b.referenced_relation) || + JSON.stringify(a.referenced_columns).localeCompare(JSON.stringify(b.referenced_columns)) + ) + const introspectionBySchema = Object.fromEntries<{ + tables: { + table: Pick + relationships: TsRelationship[] + }[] + views: { + view: PostgresView + relationships: TsRelationship[] + }[] + functions: { fn: PostgresFunction; inArgs: PostgresFunction['args'] }[] + enums: PostgresType[] + compositeTypes: PostgresType[] + }>( + schemas.map((s) => [ + s.name, + { tables: [], views: [], functions: [], enums: [], compositeTypes: [] }, + ]) + ) const columnsByTableId = Object.fromEntries( [...tables, ...foreignTables, ...views, ...materializedViews].map((t) => [t.id, []]) ) + // group types by id for quicker lookup + const typesById = new Map() + for (const column of columns) { if (column.table_id in columnsByTableId) { columnsByTableId[column.table_id].push(column) @@ -40,38 +72,74 @@ export const apply = async ({ columnsByTableId[tableId].sort((a, b) => a.name.localeCompare(b.name)) } - const introspectionBySchema = Object.fromEntries<{ - tables: Pick[] - views: PostgresView[] - functions: { fn: PostgresFunction; inArgs: PostgresFunction['args'] }[] - enums: PostgresType[] - compositeTypes: PostgresType[] - }>( - schemas.map((s) => [ - s.name, - { tables: [], views: [], functions: [], enums: [], compositeTypes: [] }, - ]) - ) + for (const type of types) { + typesById.set(type.id, type) + if (type.schema in introspectionBySchema) { + if (type.enums.length > 0) { + introspectionBySchema[type.schema].enums.push(type) + } + if (type.attributes.length > 0) { + introspectionBySchema[type.schema].compositeTypes.push(type) + } + } + } + + function getRelationships( + object: { schema: string; name: string }, + relationships: GeneratorMetadata['relationships'] + ): Pick< + GeneratorMetadata['relationships'][number], + 'foreign_key_name' | 'columns' | 'is_one_to_one' | 'referenced_relation' | 'referenced_columns' + >[] { + return relationships.filter( + (relationship) => + relationship.schema === object.schema && + relationship.referenced_schema === object.schema && + relationship.relation === object.name + ) + } + + function generateRelationshiptTsDefinition(relationship: TsRelationship): string { + return `{ + foreignKeyName: ${JSON.stringify(relationship.foreign_key_name)} + columns: ${JSON.stringify(relationship.columns)}${detectOneToOneRelationships ? `\nisOneToOne: ${relationship.is_one_to_one}` : ''} + referencedRelation: ${JSON.stringify(relationship.referenced_relation)} + referencedColumns: ${JSON.stringify(relationship.referenced_columns)} + }` + } + for (const table of tables) { if (table.schema in introspectionBySchema) { - introspectionBySchema[table.schema].tables.push(table) + introspectionBySchema[table.schema].tables.push({ + table, + relationships: getRelationships(table, relationships), + }) } } for (const table of foreignTables) { if (table.schema in introspectionBySchema) { - introspectionBySchema[table.schema].tables.push(table) + introspectionBySchema[table.schema].tables.push({ + table, + relationships: getRelationships(table, relationships), + }) } } for (const view of views) { if (view.schema in introspectionBySchema) { - introspectionBySchema[view.schema].views.push(view) + introspectionBySchema[view.schema].views.push({ + view, + relationships: getRelationships(view, relationships), + }) } } for (const materializedView of materializedViews) { if (materializedView.schema in introspectionBySchema) { introspectionBySchema[materializedView.schema].views.push({ - ...materializedView, - is_updatable: false, + view: { + ...materializedView, + is_updatable: false, + }, + relationships: getRelationships(materializedView, relationships), }) } } @@ -93,32 +161,105 @@ export const apply = async ({ } } } - for (const type of types) { - if (type.schema in introspectionBySchema) { - if (type.enums.length > 0) { - introspectionBySchema[type.schema].enums.push(type) - } - if (type.attributes.length > 0) { - introspectionBySchema[type.schema].compositeTypes.push(type) - } - } - } for (const schema in introspectionBySchema) { - introspectionBySchema[schema].tables.sort((a, b) => a.name.localeCompare(b.name)) - introspectionBySchema[schema].views.sort((a, b) => a.name.localeCompare(b.name)) + introspectionBySchema[schema].tables.sort((a, b) => a.table.name.localeCompare(b.table.name)) + introspectionBySchema[schema].views.sort((a, b) => a.view.name.localeCompare(b.view.name)) introspectionBySchema[schema].functions.sort((a, b) => a.fn.name.localeCompare(b.fn.name)) introspectionBySchema[schema].enums.sort((a, b) => a.name.localeCompare(b.name)) introspectionBySchema[schema].compositeTypes.sort((a, b) => a.name.localeCompare(b.name)) } - // group types by id for quicker lookup - const typesById = types.reduce( - (acc, type) => { - acc[type.id] = type - return acc - }, - {} as Record - ) + const getFunctionTsReturnType = (fn: PostgresFunction, returnType: string) => { + return `${returnType}${fn.is_set_returning_function ? '[]' : ''}` + } + + const getFunctionReturnType = (schema: PostgresSchema, fn: PostgresFunction): string => { + const tableArgs = fn.args.filter(({ mode }) => mode === 'table') + if (tableArgs.length > 0) { + const argsNameAndType = tableArgs.map(({ name, type_id }) => { + const type = typesById.get(type_id) + let tsType = 'unknown' + if (type) { + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) + } + return { name, type: tsType } + }) + + return `{ + ${argsNameAndType.map(({ name, type }) => `${JSON.stringify(name)}: ${type}`)} + }` + } + + // Case 2: returns a relation's row type. + const relation = + introspectionBySchema[schema.name]?.tables.find( + ({ table: { id } }) => id === fn.return_type_relation_id + )?.table || + introspectionBySchema[schema.name]?.views.find( + ({ view: { id } }) => id === fn.return_type_relation_id + )?.view + if (relation) { + return `{ + ${columnsByTableId[relation.id].map( + (column) => + `${JSON.stringify(column.name)}: ${pgTypeToTsType(schema, column.format, { + types, + schemas, + tables, + views, + })} ${column.is_nullable ? '| null' : ''}` + )} + }` + } + + // Case 3: returns base/array/composite/enum type. + const type = typesById.get(fn.return_type_id) + if (type) { + return pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) + } + + return 'unknown' + } + + const getFunctionSignatures = ( + schema: PostgresSchema, + fns: Array<{ fn: PostgresFunction; inArgs: PostgresFunction['args'] }> + ) => { + const args = fns + .map(({ inArgs }) => { + if (inArgs.length === 0) { + return 'Record' + } + const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { + const type = typesById.get(type_id) + let tsType = 'unknown' + if (type) { + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) + } + return { name, type: tsType, has_default } + }) + return `{ ${argsNameAndType.map(({ name, type, has_default }) => `${JSON.stringify(name)}${has_default ? '?' : ''}: ${type}`)} }` + }) + .toSorted() + // A function can have multiples definitions with differents args, but will always return the same type + .join(' | ') + return `{\nArgs: ${args}\n Returns: ${getFunctionTsReturnType(fns[0].fn, getFunctionReturnType(schema, fns[0].fn))}\n}` + } const internal_supabase_schema = postgrestVersion ? `// Allows to automatically instantiate createClient with right options @@ -128,6 +269,24 @@ export const apply = async ({ }` : '' + function generateColumnTsDefinition( + schema: PostgresSchema, + column: { + name: string + format: string + is_nullable: boolean + is_optional: boolean + }, + context: { + types: PostgresType[] + schemas: PostgresSchema[] + tables: PostgresTable[] + views: PostgresView[] + } + ) { + return `${JSON.stringify(column.name)}${column.is_optional ? '?' : ''}: ${pgTypeToTsType(schema, column.format, context)} ${column.is_nullable ? '| null' : ''}` + } + let output = ` export type Json = string | number | boolean | null | { [key: string]: Json | undefined } | Json[] @@ -147,117 +306,68 @@ export type Database = { schemaTables.length === 0 ? '[_ in never]: never' : schemaTables.map( - (table) => `${JSON.stringify(table.name)}: { + ({ table, relationships }) => `${JSON.stringify(table.name)}: { Row: { ${[ - ...columnsByTableId[table.id].map( - (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - })} ${column.is_nullable ? '| null' : ''}` + ...columnsByTableId[table.id].map((column) => + generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: column.is_nullable, + is_optional: false, + }, + { types, schemas, tables, views } + ) ), ...schemaFunctions .filter(({ fn }) => fn.argument_types === table.name) .map(({ fn }) => { - const type = typesById[fn.return_type_id] - let tsType = 'unknown' - if (type) { - tsType = pgTypeToTsType(schema, type.name, { - types, - schemas, - tables, - views, - }) - } - return `${JSON.stringify(fn.name)}: ${tsType} | null` + return `${JSON.stringify(fn.name)}: ${getFunctionReturnType(schema, fn)} | null` }), ]} } Insert: { ${columnsByTableId[table.id].map((column) => { - let output = JSON.stringify(column.name) - if (column.identity_generation === 'ALWAYS') { - return `${output}?: never` - } - - if ( - column.is_nullable || - column.is_identity || - column.default_value !== null - ) { - output += '?:' - } else { - output += ':' + return `${JSON.stringify(column.name)}?: never` } - - output += pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - }) - - if (column.is_nullable) { - output += '| null' - } - - return output + return generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: column.is_nullable, + is_optional: + column.is_nullable || + column.is_identity || + column.default_value !== null, + }, + { types, schemas, tables, views } + ) })} } Update: { ${columnsByTableId[table.id].map((column) => { - let output = JSON.stringify(column.name) - if (column.identity_generation === 'ALWAYS') { - return `${output}?: never` + return `${JSON.stringify(column.name)}?: never` } - output += `?: ${pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - })}` - - if (column.is_nullable) { - output += '| null' - } - - return output + return generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: column.is_nullable, + is_optional: true, + }, + { types, schemas, tables, views } + ) })} } Relationships: [ - ${relationships - .filter( - (relationship) => - relationship.schema === table.schema && - relationship.referenced_schema === table.schema && - relationship.relation === table.name - ) - .sort( - (a, b) => - a.foreign_key_name.localeCompare(b.foreign_key_name) || - a.referenced_relation.localeCompare(b.referenced_relation) || - JSON.stringify(a.referenced_columns).localeCompare( - JSON.stringify(b.referenced_columns) - ) - ) - .map( - (relationship) => `{ - foreignKeyName: ${JSON.stringify(relationship.foreign_key_name)} - columns: ${JSON.stringify(relationship.columns)} - ${ - detectOneToOneRelationships - ? `isOneToOne: ${relationship.is_one_to_one};` - : '' - }referencedRelation: ${JSON.stringify(relationship.referenced_relation)} - referencedColumns: ${JSON.stringify(relationship.referenced_columns)} - }` - )} + ${relationships.map(generateRelationshiptTsDefinition)} ] }` ) @@ -268,86 +378,61 @@ export type Database = { schemaViews.length === 0 ? '[_ in never]: never' : schemaViews.map( - (view) => `${JSON.stringify(view.name)}: { + ({ view, relationships }) => `${JSON.stringify(view.name)}: { Row: { - ${columnsByTableId[view.id].map( - (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - })} ${column.is_nullable ? '| null' : ''}` + ${columnsByTableId[view.id].map((column) => + generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: column.is_nullable, + is_optional: false, + }, + { types, schemas, tables, views } + ) )} } ${ view.is_updatable ? `Insert: { ${columnsByTableId[view.id].map((column) => { - let output = JSON.stringify(column.name) - if (!column.is_updatable) { - return `${output}?: never` + return `${JSON.stringify(column.name)}?: never` } - - output += `?: ${pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - })} | null` - - return output + return generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: true, + is_optional: true, + }, + { types, schemas, tables, views } + ) })} } Update: { ${columnsByTableId[view.id].map((column) => { - let output = JSON.stringify(column.name) - if (!column.is_updatable) { - return `${output}?: never` + return `${JSON.stringify(column.name)}?: never` } - - output += `?: ${pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - })} | null` - - return output + return generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: true, + is_optional: true, + }, + { types, schemas, tables, views } + ) })} } ` : '' }Relationships: [ - ${relationships - .filter( - (relationship) => - relationship.schema === view.schema && - relationship.referenced_schema === view.schema && - relationship.relation === view.name - ) - .sort( - (a, b) => - a.foreign_key_name.localeCompare(b.foreign_key_name) || - a.referenced_relation.localeCompare(b.referenced_relation) || - JSON.stringify(a.referenced_columns).localeCompare( - JSON.stringify(b.referenced_columns) - ) - ) - .map( - (relationship) => `{ - foreignKeyName: ${JSON.stringify(relationship.foreign_key_name)} - columns: ${JSON.stringify(relationship.columns)} - ${ - detectOneToOneRelationships - ? `isOneToOne: ${relationship.is_one_to_one};` - : '' - }referencedRelation: ${JSON.stringify(relationship.referenced_relation)} - referencedColumns: ${JSON.stringify(relationship.referenced_columns)} - }` - )} + ${relationships.map(generateRelationshiptTsDefinition)} ] }` ) @@ -373,97 +458,12 @@ export type Database = { ) } - return Object.entries(schemaFunctionsGroupedByName).map( - ([fnName, fns]) => - `${JSON.stringify(fnName)}: { - Args: ${fns - .map(({ inArgs }) => { - if (inArgs.length === 0) { - return 'Record' - } - - const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { - const type = typesById[type_id] - let tsType = 'unknown' - if (type) { - tsType = pgTypeToTsType(schema, type.name, { - types, - schemas, - tables, - views, - }) - } - return { name, type: tsType, has_default } - }) - return `{ ${argsNameAndType.map(({ name, type, has_default }) => `${JSON.stringify(name)}${has_default ? '?' : ''}: ${type}`)} }` - }) - .toSorted() - // A function can have multiples definitions with differents args, but will always return the same type - .join(' | ')} - Returns: ${(() => { - // Case 1: `returns table`. - const tableArgs = fns[0].fn.args.filter(({ mode }) => mode === 'table') - if (tableArgs.length > 0) { - const argsNameAndType = tableArgs.map(({ name, type_id }) => { - const type = typesById[type_id] - let tsType = 'unknown' - if (type) { - tsType = pgTypeToTsType(schema, type.name, { - types, - schemas, - tables, - views, - }) - } - return { name, type: tsType } - }) - - return `{ - ${argsNameAndType - .toSorted((a, b) => a.name.localeCompare(b.name)) - .map(({ name, type }) => `${JSON.stringify(name)}: ${type}`)} - }` - } - - // Case 2: returns a relation's row type. - const relation = [...tables, ...views].find( - ({ id }) => id === fns[0].fn.return_type_relation_id - ) - if (relation) { - return `{ - ${columnsByTableId[relation.id] - .toSorted((a, b) => a.name.localeCompare(b.name)) - .map( - (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType( - schema, - column.format, - { - types, - schemas, - tables, - views, - } - )} ${column.is_nullable ? '| null' : ''}` - )} - }` - } - - // Case 3: returns base/array/composite/enum type. - const type = typesById[fns[0].fn.return_type_id] - if (type) { - return pgTypeToTsType(schema, type.name, { - types, - schemas, - tables, - views, - }) - } - - return 'unknown' - })()}${fns[0].fn.is_set_returning_function ? '[]' : ''} - }` - ) + return Object.entries(schemaFunctionsGroupedByName) + .map(([fnName, fns]) => { + const functionSignatures = getFunctionSignatures(schema, fns) + return `${JSON.stringify(fnName)}:\n${functionSignatures}` + }) + .join(',\n') })()} } Enums: { @@ -486,7 +486,7 @@ export type Database = { ({ name, attributes }) => `${JSON.stringify(name)}: { ${attributes.map(({ name, type_id }) => { - const type = typesById[type_id] + const type = typesById.get(type_id) let tsType = 'unknown' if (type) { tsType = `${pgTypeToTsType(schema, type.name, { @@ -612,13 +612,9 @@ export type CompositeTypes< : never export const Constants = { - ${schemas - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - .map((schema) => { - const schemaEnums = types - .filter((type) => type.schema === schema.name && type.enums.length > 0) - .sort(({ name: a }, { name: b }) => a.localeCompare(b)) - return `${JSON.stringify(schema.name)}: { + ${schemas.map((schema) => { + const schemaEnums = introspectionBySchema[schema.name].enums + return `${JSON.stringify(schema.name)}: { Enums: { ${schemaEnums.map( (enum_) => @@ -628,7 +624,7 @@ export const Constants = { )} } }` - })} + })} } as const ` From 0aeceb212acf4310103eca6fa8af5a23716fc021 Mon Sep 17 00:00:00 2001 From: Charis <26616127+charislam@users.noreply.github.com> Date: Wed, 8 Oct 2025 14:18:13 -0400 Subject: [PATCH 37/72] fix(tables): ensure order of composite pks preserved (#996) * fix(tables): ensure order of composite pks preserved * test: add test for preserving composite pk order --- src/lib/sql/table.sql.ts | 43 +++++++++++++++++++--------------------- test/lib/tables.ts | 17 ++++++++++++++++ 2 files changed, 37 insertions(+), 23 deletions(-) diff --git a/src/lib/sql/table.sql.ts b/src/lib/sql/table.sql.ts index d7f70331..446c1f40 100644 --- a/src/lib/sql/table.sql.ts +++ b/src/lib/sql/table.sql.ts @@ -32,29 +32,26 @@ FROM JOIN pg_class c ON nc.oid = c.relnamespace left join ( select - table_id, - jsonb_agg(_pk.*) as primary_keys - from ( - select - n.nspname as schema, - c.relname as table_name, - a.attname as name, - c.oid :: int8 as table_id - from - pg_index i, - pg_class c, - pg_attribute a, - pg_namespace n - where - ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} - ${props.tableIdentifierFilter ? `n.nspname || '.' || c.relname ${props.tableIdentifierFilter} AND` : ''} - i.indrelid = c.oid - and c.relnamespace = n.oid - and a.attrelid = c.oid - and a.attnum = any (i.indkey) - and i.indisprimary - ) as _pk - group by table_id + c.oid::int8 as table_id, + jsonb_agg( + jsonb_build_object( + 'table_id', c.oid::int8, + 'schema', n.nspname, + 'table_name', c.relname, + 'name', a.attname + ) + order by array_position(i.indkey, a.attnum) + ) as primary_keys + from + pg_index i + join pg_class c on i.indrelid = c.oid + join pg_namespace n on c.relnamespace = n.oid + join pg_attribute a on a.attrelid = c.oid and a.attnum = any(i.indkey) + where + ${props.schemaFilter ? `n.nspname ${props.schemaFilter} AND` : ''} + ${props.tableIdentifierFilter ? `n.nspname || '.' || c.relname ${props.tableIdentifierFilter} AND` : ''} + i.indisprimary + group by c.oid ) as pk on pk.table_id = c.oid left join ( diff --git a/test/lib/tables.ts b/test/lib/tables.ts index 00230ab4..677204fc 100644 --- a/test/lib/tables.ts +++ b/test/lib/tables.ts @@ -525,3 +525,20 @@ test('primary keys', async () => { ) await pgMeta.tables.remove(res.data!.id) }) + +test('composite primary keys preserve order', async () => { + let res = await pgMeta.tables.create({ name: 't_pk_order' }) + await pgMeta.columns.create({ table_id: res.data!.id, name: 'col_a', type: 'int8' }) + await pgMeta.columns.create({ table_id: res.data!.id, name: 'col_b', type: 'text' }) + await pgMeta.columns.create({ table_id: res.data!.id, name: 'col_c', type: 'int4' }) + + // Set primary keys in specific order: col_c, col_a, col_b + res = await pgMeta.tables.update(res.data!.id, { + primary_keys: [{ name: 'col_c' }, { name: 'col_a' }, { name: 'col_b' }], + }) + + // Verify the order is preserved + expect(res.data!.primary_keys.map((pk: any) => pk.name)).toEqual(['col_c', 'col_a', 'col_b']) + + await pgMeta.tables.remove(res.data!.id) +}) From 1d0dad13da8313f5c2d6b8d0a54ecad82095b945 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 Oct 2025 22:06:18 +0000 Subject: [PATCH 38/72] chore(deps-dev): bump pino-pretty from 13.1.1 to 13.1.2 (#998) Bumps [pino-pretty](https://github.com/pinojs/pino-pretty) from 13.1.1 to 13.1.2. - [Release notes](https://github.com/pinojs/pino-pretty/releases) - [Commits](https://github.com/pinojs/pino-pretty/compare/v13.1.1...v13.1.2) --- updated-dependencies: - dependency-name: pino-pretty dependency-version: 13.1.2 dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index cd6f7b63..2787f0d6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -6229,9 +6229,9 @@ } }, "node_modules/pino-pretty": { - "version": "13.1.1", - "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-13.1.1.tgz", - "integrity": "sha512-TNNEOg0eA0u+/WuqH0MH0Xui7uqVk9D74ESOpjtebSQYbNWJk/dIxCXIxFsNfeN53JmtWqYHP2OrIZjT/CBEnA==", + "version": "13.1.2", + "resolved": "https://registry.npmjs.org/pino-pretty/-/pino-pretty-13.1.2.tgz", + "integrity": "sha512-3cN0tCakkT4f3zo9RXDIhy6GTvtYD6bK4CRBLN9j3E/ePqN1tugAXD5rGVfoChW6s0hiek+eyYlLNqc/BG7vBQ==", "dev": true, "license": "MIT", "dependencies": { From d521264e6f99780cfea77d4fd00c111aebcba9d1 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Thu, 16 Oct 2025 16:11:31 +0200 Subject: [PATCH 39/72] feat(typegen): add functions setof type introspection (#971) * feat(typegen): add setof function type introspection - Introspect the setof function fields for functions - Restore functions as unions of args + returns * chore: update snapshots * chore: unify sort and dedup loops * chore: remove duplicate sort * chore: include view in type * fix: isOneToOne * fix: tests * chore: dedup typescript typegen logic * chore: re-use generateColumn * fix: retrieve prorows only * chore: refactor typegen for prorows only * fix: only get reltype in types * chore: reuse relationTypeByIds * chore: reduce functions changes to minimum * chore: only single loop for types * chore: single sort for relationships * chore: reduce loops * fix: relationtype setof functions generation * chore: fix prettier * chore: update snapshots * chore: fix types test * fix: test types * fix: include materializedView types * test: add search_todos_by_details function * fix: add setof from * for all relation functions * fix(typescript): union unknown null (#995) * fix(typescript): unknown is already nullable Fixes: https://github.com/supabase/cli/issues/4234 https://github.com/supabase/cli/issues/577 * fix: also exclude any from null union --- Dockerfile | 2 +- package.json | 3 +- src/lib/sql/functions.sql.ts | 4 + src/lib/sql/types.sql.ts | 5 +- src/lib/types.ts | 2 + src/server/templates/typescript.ts | 357 +++- test/db/00-init.sql | 247 +++ test/lib/functions.ts | 128 ++ test/lib/types.ts | 5 +- test/server/typegen.ts | 2733 ++++++++++++++++++++++++++-- 10 files changed, 3308 insertions(+), 178 deletions(-) diff --git a/Dockerfile b/Dockerfile index 8756b7ac..df79412a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,4 +1,4 @@ -FROM node:20 as build +FROM node:20 AS build WORKDIR /usr/src/app # Do `npm ci` separately so we can cache `node_modules` # https://nodejs.org/en/docs/guides/nodejs-docker-webapp/ diff --git a/package.json b/package.json index 941df965..e903e455 100644 --- a/package.json +++ b/package.json @@ -26,7 +26,8 @@ "gen:types:go": "PG_META_GENERATE_TYPES=go node --loader ts-node/esm src/server/server.ts", "gen:types:swift": "PG_META_GENERATE_TYPES=swift node --loader ts-node/esm src/server/server.ts", "start": "node dist/server/server.js", - "dev": "trap 'npm run db:clean' INT && run-s db:clean db:run && nodemon --exec node --loader ts-node/esm src/server/server.ts | pino-pretty --colorize", + "dev": "trap 'npm run db:clean' INT && run-s db:clean db:run && run-s dev:code", + "dev:code": "nodemon --exec node --loader ts-node/esm src/server/server.ts | pino-pretty --colorize", "test": "run-s db:clean db:run test:run db:clean", "db:clean": "cd test/db && docker compose down", "db:run": "cd test/db && docker compose up --detach --wait", diff --git a/src/lib/sql/functions.sql.ts b/src/lib/sql/functions.sql.ts index 92715b95..97dad2f3 100644 --- a/src/lib/sql/functions.sql.ts +++ b/src/lib/sql/functions.sql.ts @@ -85,6 +85,10 @@ select pg_get_function_result(f.oid) as return_type, nullif(rt.typrelid::int8, 0) as return_type_relation_id, f.proretset as is_set_returning_function, + case + when f.proretset then nullif(f.prorows, 0) + else null + end as prorows, case when f.provolatile = 'i' then 'IMMUTABLE' when f.provolatile = 's' then 'STABLE' diff --git a/src/lib/sql/types.sql.ts b/src/lib/sql/types.sql.ts index 990fa22f..c230f23f 100644 --- a/src/lib/sql/types.sql.ts +++ b/src/lib/sql/types.sql.ts @@ -13,7 +13,8 @@ select format_type (t.oid, null) as format, coalesce(t_enums.enums, '[]') as enums, coalesce(t_attributes.attributes, '[]') as attributes, - obj_description (t.oid, 'pg_type') as comment + obj_description (t.oid, 'pg_type') as comment, + nullif(t.typrelid::int8, 0) as type_relation_id from pg_type t left join pg_namespace n on n.oid = t.typnamespace @@ -46,7 +47,7 @@ from t.typrelid = 0 or ( select - c.relkind ${props.includeTableTypes ? `in ('c', 'r')` : `= 'c'`} + c.relkind ${props.includeTableTypes ? `in ('c', 'r', 'v', 'm')` : `= 'c'`} from pg_class c where diff --git a/src/lib/types.ts b/src/lib/types.ts index bfd60250..26b3bc78 100644 --- a/src/lib/types.ts +++ b/src/lib/types.ts @@ -156,6 +156,7 @@ const postgresFunctionSchema = Type.Object({ return_type: Type.String(), return_type_relation_id: Type.Union([Type.Integer(), Type.Null()]), is_set_returning_function: Type.Boolean(), + prorows: Type.Union([Type.Number(), Type.Null()]), behavior: Type.Union([ Type.Literal('IMMUTABLE'), Type.Literal('STABLE'), @@ -442,6 +443,7 @@ export const postgresTypeSchema = Type.Object({ enums: Type.Array(Type.String()), attributes: Type.Array(Type.Object({ name: Type.String(), type_id: Type.Integer() })), comment: Type.Union([Type.String(), Type.Null()]), + type_relation_id: Type.Union([Type.Integer(), Type.Null()]), }) export type PostgresType = Static diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index f0079874..1b527686 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -8,7 +8,11 @@ import type { PostgresType, PostgresView, } from '../../lib/index.js' -import { GENERATE_TYPES_DEFAULT_SCHEMA, VALID_FUNCTION_ARGS_MODE } from '../constants.js' +import { + GENERATE_TYPES_DEFAULT_SCHEMA, + VALID_FUNCTION_ARGS_MODE, + VALID_UNNAMED_FUNCTION_ARG_TYPES, +} from '../constants.js' type TsRelationship = Pick< GeneratorMetadata['relationships'][number], @@ -56,13 +60,17 @@ export const apply = async ({ { tables: [], views: [], functions: [], enums: [], compositeTypes: [] }, ]) ) - - const columnsByTableId = Object.fromEntries( - [...tables, ...foreignTables, ...views, ...materializedViews].map((t) => [t.id, []]) - ) + const columnsByTableId: Record = {} + const tablesNamesByTableId: Record = {} + const relationTypeByIds = new Map() // group types by id for quicker lookup const typesById = new Map() + const tablesLike = [...tables, ...foreignTables, ...views, ...materializedViews] + for (const tableLike of tablesLike) { + columnsByTableId[tableLike.id] = [] + tablesNamesByTableId[tableLike.id] = tableLike.name + } for (const column of columns) { if (column.table_id in columnsByTableId) { columnsByTableId[column.table_id].push(column) @@ -74,6 +82,10 @@ export const apply = async ({ for (const type of types) { typesById.set(type.id, type) + // Save all the types that are relation types for quicker lookup + if (type.type_relation_id) { + relationTypeByIds.set(type.id, type) + } if (type.schema in introspectionBySchema) { if (type.enums.length > 0) { introspectionBySchema[type.schema].enums.push(type) @@ -143,19 +155,49 @@ export const apply = async ({ }) } } + // Helper function to get table/view name from relation id + const getTableNameFromRelationId = ( + relationId: number | null, + returnTypeId: number | null + ): string | null => { + if (!relationId) return null + + if (tablesNamesByTableId[relationId]) return tablesNamesByTableId[relationId] + // if it's a composite type we use the type name as relation name to allow sub-selecting fields of the composite type + const reltype = returnTypeId ? relationTypeByIds.get(returnTypeId) : null + return reltype ? reltype.name : null + } + for (const func of functions) { if (func.schema in introspectionBySchema) { func.args.sort((a, b) => a.name.localeCompare(b.name)) - // Either: - // 1. All input args are be named, or - // 2. There is only one input arg which is unnamed + // Get all input args (in, inout, variadic modes) const inArgs = func.args.filter(({ mode }) => VALID_FUNCTION_ARGS_MODE.has(mode)) if ( - // Case 1: Function has a single parameter - inArgs.length === 1 || + // Case 1: Function has no parameters + inArgs.length === 0 || // Case 2: All input args are named - !inArgs.some(({ name }) => name === '') + !inArgs.some(({ name }) => name === '') || + // Case 3: All unnamed args have default values AND are valid types + inArgs.every((arg) => { + if (arg.name === '') { + return arg.has_default && VALID_UNNAMED_FUNCTION_ARG_TYPES.has(arg.type_id) + } + return true + }) || + // Case 4: Single unnamed parameter of valid type (json, jsonb, text) + // Exclude all functions definitions that have only one single argument unnamed argument that isn't + // a json/jsonb/text as it won't be considered by PostgREST + (inArgs.length === 1 && + inArgs[0].name === '' && + (VALID_UNNAMED_FUNCTION_ARG_TYPES.has(inArgs[0].type_id) || + // OR if the function have a single unnamed args which is another table (embeded function) + (relationTypeByIds.get(inArgs[0].type_id) && + getTableNameFromRelationId(func.return_type_relation_id, func.return_type_id)) || + // OR if the function takes a table row but doesn't qualify as embedded (for error reporting) + (relationTypeByIds.get(inArgs[0].type_id) && + !getTableNameFromRelationId(func.return_type_relation_id, func.return_type_id)))) ) { introspectionBySchema[func.schema].functions.push({ fn: func, inArgs }) } @@ -170,10 +212,62 @@ export const apply = async ({ } const getFunctionTsReturnType = (fn: PostgresFunction, returnType: string) => { - return `${returnType}${fn.is_set_returning_function ? '[]' : ''}` + // Determine if this function should have SetofOptions + let setofOptionsInfo = '' + + const returnTableName = getTableNameFromRelationId( + fn.return_type_relation_id, + fn.return_type_id + ) + const returnsSetOfTable = fn.is_set_returning_function && fn.return_type_relation_id !== null + const returnsMultipleRows = fn.prorows !== null && fn.prorows > 1 + // Case 1: if the function returns a table, we need to add SetofOptions to allow selecting sub fields of the table + // Those can be used in rpc to select sub fields of a table + if (returnTableName) { + setofOptionsInfo = `SetofOptions: { + from: "*" + to: ${JSON.stringify(returnTableName)} + isOneToOne: ${Boolean(!returnsMultipleRows)} + isSetofReturn: ${fn.is_set_returning_function} + }` + } + // Case 2: if the function has a single table argument, we need to add SetofOptions to allow selecting sub fields of the table + // and set the right "from" and "to" values to allow selecting from a table row + if (fn.args.length === 1) { + const relationType = relationTypeByIds.get(fn.args[0].type_id) + + // Only add SetofOptions for functions with table arguments (embedded functions) + // or specific functions that RETURNS table-name + if (relationType) { + const sourceTable = relationType.format + // Case 1: Standard embedded function with proper setof detection + if (returnsSetOfTable && returnTableName) { + setofOptionsInfo = `SetofOptions: { + from: ${JSON.stringify(sourceTable)} + to: ${JSON.stringify(returnTableName)} + isOneToOne: ${Boolean(!returnsMultipleRows)} + isSetofReturn: true + }` + } + // Case 2: Handle RETURNS table-name those are always a one to one relationship + else if (returnTableName && !returnsSetOfTable) { + const targetTable = returnTableName + setofOptionsInfo = `SetofOptions: { + from: ${JSON.stringify(sourceTable)} + to: ${JSON.stringify(targetTable)} + isOneToOne: true + isSetofReturn: false + }` + } + } + } + + return `${returnType}${fn.is_set_returning_function && returnsMultipleRows ? '[]' : ''} + ${setofOptionsInfo ? `${setofOptionsInfo}` : ''}` } const getFunctionReturnType = (schema: PostgresSchema, fn: PostgresFunction): string => { + // Case 1: `returns table`. const tableArgs = fn.args.filter(({ mode }) => mode === 'table') if (tableArgs.length > 0) { const argsNameAndType = tableArgs.map(({ name, type_id }) => { @@ -205,15 +299,25 @@ export const apply = async ({ )?.view if (relation) { return `{ - ${columnsByTableId[relation.id].map( - (column) => - `${JSON.stringify(column.name)}: ${pgTypeToTsType(schema, column.format, { - types, - schemas, - tables, - views, - })} ${column.is_nullable ? '| null' : ''}` - )} + ${columnsByTableId[relation.id] + .map((column) => + generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: column.is_nullable, + is_optional: false, + }, + { + types, + schemas, + tables, + views, + } + ) + ) + .join(',\n')} }` } @@ -230,35 +334,144 @@ export const apply = async ({ return 'unknown' } + // Special error case for functions that take table row but don't qualify as embedded functions + const hasTableRowError = (fn: PostgresFunction, inArgs: PostgresFunction['args']) => { + if ( + inArgs.length === 1 && + inArgs[0].name === '' && + relationTypeByIds.get(inArgs[0].type_id) && + !getTableNameFromRelationId(fn.return_type_relation_id, fn.return_type_id) + ) { + return true + } + return false + } + + // Check for generic conflict cases that need error reporting + const getConflictError = ( + schema: PostgresSchema, + fns: Array<{ fn: PostgresFunction; inArgs: PostgresFunction['args'] }>, + fn: PostgresFunction, + inArgs: PostgresFunction['args'] + ) => { + // If there is a single function definition, there is no conflict + if (fns.length <= 1) return null + + // Generic conflict detection patterns + // Pattern 1: No-args vs default-args conflicts + if (inArgs.length === 0) { + const conflictingFns = fns.filter(({ fn: otherFn, inArgs: otherInArgs }) => { + if (otherFn === fn) return false + return otherInArgs.length === 1 && otherInArgs[0].name === '' && otherInArgs[0].has_default + }) + + if (conflictingFns.length > 0) { + const conflictingFn = conflictingFns[0] + const returnTypeName = typesById.get(conflictingFn.fn.return_type_id)?.name || 'unknown' + return `Could not choose the best candidate function between: ${schema.name}.${fn.name}(), ${schema.name}.${fn.name}( => ${returnTypeName}). Try renaming the parameters or the function itself in the database so function overloading can be resolved` + } + } + + // Pattern 2: Same parameter name but different types (unresolvable overloads) + if (inArgs.length === 1 && inArgs[0].name !== '') { + const conflictingFns = fns.filter(({ fn: otherFn, inArgs: otherInArgs }) => { + if (otherFn === fn) return false + return ( + otherInArgs.length === 1 && + otherInArgs[0].name === inArgs[0].name && + otherInArgs[0].type_id !== inArgs[0].type_id + ) + }) + + if (conflictingFns.length > 0) { + const allConflictingFunctions = [{ fn, inArgs }, ...conflictingFns] + const conflictList = allConflictingFunctions + .sort((a, b) => { + const aArgs = a.inArgs + const bArgs = b.inArgs + return (aArgs[0]?.type_id || 0) - (bArgs[0]?.type_id || 0) + }) + .map((f) => { + const args = f.inArgs + return `${schema.name}.${fn.name}(${args.map((a) => `${a.name || ''} => ${typesById.get(a.type_id)?.name || 'unknown'}`).join(', ')})` + }) + .join(', ') + + return `Could not choose the best candidate function between: ${conflictList}. Try renaming the parameters or the function itself in the database so function overloading can be resolved` + } + } + + return null + } const getFunctionSignatures = ( schema: PostgresSchema, fns: Array<{ fn: PostgresFunction; inArgs: PostgresFunction['args'] }> ) => { - const args = fns - .map(({ inArgs }) => { - if (inArgs.length === 0) { - return 'Record' - } - const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { - const type = typesById.get(type_id) - let tsType = 'unknown' - if (type) { - tsType = pgTypeToTsType(schema, type.name, { - types, - schemas, - tables, - views, + return fns + .map(({ fn, inArgs }) => { + let argsType = 'never' + let returnType = getFunctionReturnType(schema, fn) + + // Check for specific error cases + const conflictError = getConflictError(schema, fns, fn, inArgs) + if (conflictError) { + if (inArgs.length > 0) { + const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { + const type = typesById.get(type_id) + let tsType = 'unknown' + if (type) { + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) + } + return { name, type: tsType, has_default } + }) + argsType = `{ ${argsNameAndType.map(({ name, type, has_default }) => `${JSON.stringify(name)}${has_default ? '?' : ''}: ${type}`)} }` + } + returnType = `{ error: true } & ${JSON.stringify(conflictError)}` + } else if (hasTableRowError(fn, inArgs)) { + // Special case for computed fields returning scalars functions + if (inArgs.length > 0) { + const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { + const type = typesById.get(type_id) + let tsType = 'unknown' + if (type) { + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) + } + return { name, type: tsType, has_default } }) + argsType = `{ ${argsNameAndType.map(({ name, type, has_default }) => `${JSON.stringify(name)}${has_default ? '?' : ''}: ${type}`)} }` } - return { name, type: tsType, has_default } - }) - return `{ ${argsNameAndType.map(({ name, type, has_default }) => `${JSON.stringify(name)}${has_default ? '?' : ''}: ${type}`)} }` + returnType = `{ error: true } & ${JSON.stringify(`the function ${schema.name}.${fn.name} with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache`)}` + } else if (inArgs.length > 0) { + const argsNameAndType = inArgs.map(({ name, type_id, has_default }) => { + const type = typesById.get(type_id) + let tsType = 'unknown' + if (type) { + tsType = pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }) + } + return { name, type: tsType, has_default } + }) + argsType = `{ ${argsNameAndType.map(({ name, type, has_default }) => `${JSON.stringify(name)}${has_default ? '?' : ''}: ${type}`)} }` + } + + return `{ Args: ${argsType}; Returns: ${getFunctionTsReturnType(fn, returnType)} }` }) - .toSorted() - // A function can have multiples definitions with differents args, but will always return the same type - .join(' | ') - return `{\nArgs: ${args}\n Returns: ${getFunctionTsReturnType(fns[0].fn, getFunctionReturnType(schema, fns[0].fn))}\n}` + .join(' |\n') } const internal_supabase_schema = postgrestVersion @@ -269,6 +482,14 @@ export const apply = async ({ }` : '' + function generateNullableUnionTsType(tsType: string, isNullable: boolean) { + // Only add the null union if the type is not unknown as unknown already includes null + if (tsType === 'unknown' || tsType === 'any' || !isNullable) { + return tsType + } + return `${tsType} | null` + } + function generateColumnTsDefinition( schema: PostgresSchema, column: { @@ -284,7 +505,7 @@ export const apply = async ({ views: PostgresView[] } ) { - return `${JSON.stringify(column.name)}${column.is_optional ? '?' : ''}: ${pgTypeToTsType(schema, column.format, context)} ${column.is_nullable ? '| null' : ''}` + return `${JSON.stringify(column.name)}${column.is_optional ? '?' : ''}: ${generateNullableUnionTsType(pgTypeToTsType(schema, column.format, context), column.is_nullable)}` } let output = ` @@ -324,7 +545,7 @@ export type Database = { ...schemaFunctions .filter(({ fn }) => fn.argument_types === table.name) .map(({ fn }) => { - return `${JSON.stringify(fn.name)}: ${getFunctionReturnType(schema, fn)} | null` + return `${JSON.stringify(fn.name)}: ${generateNullableUnionTsType(getFunctionReturnType(schema, fn), true)}` }), ]} } @@ -380,18 +601,26 @@ export type Database = { : schemaViews.map( ({ view, relationships }) => `${JSON.stringify(view.name)}: { Row: { - ${columnsByTableId[view.id].map((column) => - generateColumnTsDefinition( - schema, - { - name: column.name, - format: column.format, - is_nullable: column.is_nullable, - is_optional: false, - }, - { types, schemas, tables, views } - ) - )} + ${[ + ...columnsByTableId[view.id].map((column) => + generateColumnTsDefinition( + schema, + { + name: column.name, + format: column.format, + is_nullable: column.is_nullable, + is_optional: false, + }, + { types, schemas, tables, views } + ) + ), + ...schemaFunctions + .filter(({ fn }) => fn.argument_types === view.name) + .map( + ({ fn }) => + `${JSON.stringify(fn.name)}: ${generateNullableUnionTsType(getFunctionReturnType(schema, fn), true)}` + ), + ]} } ${ view.is_updatable @@ -443,7 +672,6 @@ export type Database = { if (schemaFunctions.length === 0) { return '[_ in never]: never' } - const schemaFunctionsGroupedByName = schemaFunctions.reduce( (acc, curr) => { acc[curr.fn.name] ??= [] @@ -489,12 +717,15 @@ export type Database = { const type = typesById.get(type_id) let tsType = 'unknown' if (type) { - tsType = `${pgTypeToTsType(schema, type.name, { - types, - schemas, - tables, - views, - })} | null` + tsType = `${generateNullableUnionTsType( + pgTypeToTsType(schema, type.name, { + types, + schemas, + tables, + views, + }), + true + )}` } return `${JSON.stringify(name)}: ${tsType}` })} diff --git a/test/db/00-init.sql b/test/db/00-init.sql index 3551a4e7..64107713 100644 --- a/test/db/00-init.sql +++ b/test/db/00-init.sql @@ -56,6 +56,17 @@ $$ language plpgsql; CREATE VIEW todos_view AS SELECT * FROM public.todos; -- For testing typegen on view-to-view relationships create view users_view as select * from public.users; +-- Create a more complex view for testing +CREATE VIEW user_todos_summary_view AS +SELECT + u.id as user_id, + u.name as user_name, + u.status as user_status, + COUNT(t.id) as todo_count, + array_agg(t.details) FILTER (WHERE t.details IS NOT NULL) as todo_details +FROM public.users u +LEFT JOIN public.todos t ON t."user-id" = u.id +GROUP BY u.id, u.name, u.status; create materialized view todos_matview as select * from public.todos; @@ -69,6 +80,11 @@ $$ select substring($1.details, 1, 3); $$ language sql stable; +create function public.blurb_varchar(public.todos_view) returns character varying as +$$ +select substring($1.details, 1, 3); +$$ language sql stable; + create function public.details_length(public.todos) returns integer as $$ select length($1.details); @@ -101,6 +117,15 @@ as $$ select * from public.users limit 1; $$; +create or replace function public.function_returning_single_row(todos public.todos) +returns public.users +language sql +stable +as $$ + select * from public.users limit 1; +$$; + + create or replace function public.function_returning_set_of_rows() returns setof public.users language sql @@ -117,6 +142,15 @@ as $$ select id, name from public.users; $$; +create or replace function public.function_returning_table_with_args(user_id int) +returns table (id int, name text) +language sql +stable +as $$ + select id, name from public.users WHERE id = user_id; +$$; + + create or replace function public.polymorphic_function(text) returns void language sql as ''; create or replace function public.polymorphic_function(bool) returns void language sql as ''; @@ -169,6 +203,20 @@ AS $$ SELECT * FROM public.users_audit WHERE user_id = user_row.id; $$; +CREATE OR REPLACE FUNCTION public.get_todos_by_matview(todos_matview) +RETURNS SETOF todos ROWS 1 +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos LIMIT 1; +$$; + +CREATE OR REPLACE FUNCTION public.search_todos_by_details(search_details text) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos WHERE details ilike search_details; +$$; + CREATE OR REPLACE FUNCTION public.get_todos_setof_rows(user_row users) RETURNS SETOF todos LANGUAGE SQL STABLE @@ -182,3 +230,202 @@ LANGUAGE SQL STABLE AS $$ SELECT * FROM public.todos WHERE "user-id" = todo_row."user-id"; $$; + +-- SETOF composite_type - Returns multiple rows of a custom composite type +CREATE OR REPLACE FUNCTION public.get_composite_type_data() +RETURNS SETOF composite_type_with_array_attribute +LANGUAGE SQL STABLE +AS $$ + SELECT ROW(ARRAY['hello', 'world']::text[])::composite_type_with_array_attribute + UNION ALL + SELECT ROW(ARRAY['foo', 'bar']::text[])::composite_type_with_array_attribute; +$$; + +-- SETOF record - Returns multiple rows with structure defined in the function +CREATE OR REPLACE FUNCTION public.get_user_summary() +RETURNS SETOF record +LANGUAGE SQL STABLE +AS $$ + SELECT u.id, name, count(t.id) as todo_count + FROM public.users u + LEFT JOIN public.todos t ON t."user-id" = u.id + GROUP BY u.id, u.name; +$$; + +-- SETOF scalar_type - Returns multiple values of a basic type +CREATE OR REPLACE FUNCTION public.get_user_ids() +RETURNS SETOF bigint +LANGUAGE SQL STABLE +AS $$ + SELECT id FROM public.users; +$$; + + +-- Function returning view using scalar as input +CREATE OR REPLACE FUNCTION public.get_single_user_summary_from_view(search_user_id bigint) +RETURNS SETOF user_todos_summary_view +LANGUAGE SQL STABLE +ROWS 1 +AS $$ + SELECT * FROM user_todos_summary_view WHERE user_id = search_user_id; +$$; +-- Function returning view using table row as input +CREATE OR REPLACE FUNCTION public.get_single_user_summary_from_view(user_row users) +RETURNS SETOF user_todos_summary_view +LANGUAGE SQL STABLE +ROWS 1 +AS $$ + SELECT * FROM user_todos_summary_view WHERE user_id = user_row.id; +$$; +-- Function returning view using another view row as input +CREATE OR REPLACE FUNCTION public.get_single_user_summary_from_view(userview_row users_view) +RETURNS SETOF user_todos_summary_view +LANGUAGE SQL STABLE +ROWS 1 +AS $$ + SELECT * FROM user_todos_summary_view WHERE user_id = userview_row.id; +$$; + + +-- Function returning view using scalar as input +CREATE OR REPLACE FUNCTION public.get_todos_from_user(search_user_id bigint) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM todos WHERE "user-id" = search_user_id; +$$; +-- Function returning view using table row as input +CREATE OR REPLACE FUNCTION public.get_todos_from_user(user_row users) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM todos WHERE "user-id" = user_row.id; +$$; +-- Function returning view using another view row as input +CREATE OR REPLACE FUNCTION public.get_todos_from_user(userview_row users_view) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM todos WHERE "user-id" = userview_row.id; +$$; + +-- Valid postgresql function override but that produce an unresolvable postgrest function call +create function postgrest_unresolvable_function() returns void language sql as ''; +create function postgrest_unresolvable_function(a text) returns int language sql as 'select 1'; +create function postgrest_unresolvable_function(a int) returns text language sql as $$ + SELECT 'toto' +$$; +-- Valid postgresql function override with differents returns types depending of different arguments +create function postgrest_resolvable_with_override_function() returns void language sql as ''; +create function postgrest_resolvable_with_override_function(a text) returns int language sql as 'select 1'; +create function postgrest_resolvable_with_override_function(b int) returns text language sql as $$ + SELECT 'toto' +$$; +-- Function overrides returning setof tables +create function postgrest_resolvable_with_override_function(user_id bigint) returns setof users language sql stable as $$ + SELECT * FROM users WHERE id = user_id; +$$; +create function postgrest_resolvable_with_override_function(todo_id bigint, completed boolean) returns setof todos language sql stable as $$ + SELECT * FROM todos WHERE id = todo_id AND completed = completed; +$$; +-- Function override taking a table as argument and returning a setof +create function postgrest_resolvable_with_override_function(user_row users) returns setof todos language sql stable as $$ + SELECT * FROM todos WHERE "user-id" = user_row.id; +$$; + +create or replace function public.polymorphic_function_with_different_return(bool) returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_different_return(int) returns int language sql as 'SELECT 2'; +create or replace function public.polymorphic_function_with_different_return(text) returns text language sql as $$ SELECT 'foo' $$; + +create or replace function public.polymorphic_function_with_no_params_or_unnamed() returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_no_params_or_unnamed(bool) returns int language sql as 'SELECT 2'; +create or replace function public.polymorphic_function_with_no_params_or_unnamed(text) returns text language sql as $$ SELECT 'foo' $$; +-- Function with a single unnamed params that isn't a json/jsonb/text should never appears in the type gen as it won't be in postgrest schema +create or replace function public.polymorphic_function_with_unnamed_integer(int) returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_unnamed_json(json) returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_unnamed_jsonb(jsonb) returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_unnamed_text(text) returns int language sql as 'SELECT 1'; + +-- Functions with unnamed parameters that have default values +create or replace function public.polymorphic_function_with_unnamed_default() returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_unnamed_default(int default 42) returns int language sql as 'SELECT 2'; +create or replace function public.polymorphic_function_with_unnamed_default(text default 'default') returns text language sql as $$ SELECT 'foo' $$; + +-- Functions with unnamed parameters that have default values and multiple overloads +create or replace function public.polymorphic_function_with_unnamed_default_overload() returns int language sql as 'SELECT 1'; +create or replace function public.polymorphic_function_with_unnamed_default_overload(int default 42) returns int language sql as 'SELECT 2'; +create or replace function public.polymorphic_function_with_unnamed_default_overload(text default 'default') returns text language sql as $$ SELECT 'foo' $$; +create or replace function public.polymorphic_function_with_unnamed_default_overload(bool default true) returns int language sql as 'SELECT 3'; + +-- Test function with unnamed row parameter returning setof +CREATE OR REPLACE FUNCTION public.test_unnamed_row_setof(todos) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos WHERE "user-id" = $1."user-id"; +$$; + +CREATE OR REPLACE FUNCTION public.test_unnamed_row_setof(users) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos WHERE "user-id" = $1."id"; +$$; + + +CREATE OR REPLACE FUNCTION public.test_unnamed_row_setof(user_id bigint) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos WHERE "user-id" = user_id; +$$; + +-- Test function with unnamed row parameter returning scalar +CREATE OR REPLACE FUNCTION public.test_unnamed_row_scalar(todos) +RETURNS integer +LANGUAGE SQL STABLE +AS $$ + SELECT COUNT(*) FROM public.todos WHERE "user-id" = $1."user-id"; +$$; + +-- Test function with unnamed view row parameter +CREATE OR REPLACE FUNCTION public.test_unnamed_view_row(todos_view) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos WHERE "user-id" = $1."user-id"; +$$; + +-- Test function with multiple unnamed row parameters +CREATE OR REPLACE FUNCTION public.test_unnamed_multiple_rows(users, todos) +RETURNS SETOF todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos + WHERE "user-id" = $1.id + AND id = $2.id; +$$; + +-- Test function with unnamed row parameter returning composite +CREATE OR REPLACE FUNCTION public.test_unnamed_row_composite(users) +RETURNS composite_type_with_array_attribute +LANGUAGE SQL STABLE +AS $$ + SELECT ROW(ARRAY[$1.name])::composite_type_with_array_attribute; +$$; + +-- Function that returns a single element +CREATE OR REPLACE FUNCTION public.function_using_table_returns(user_row users) +RETURNS todos +LANGUAGE SQL STABLE +AS $$ + SELECT * FROM public.todos WHERE todos."user-id" = user_row.id LIMIT 1; +$$; + +CREATE OR REPLACE FUNCTION public.function_using_setof_rows_one(user_row users) +RETURNS SETOF todos +LANGUAGE SQL STABLE +ROWS 1 +AS $$ + SELECT * FROM public.todos WHERE todos."user-id" = user_row.id LIMIT 1; +$$; diff --git a/test/lib/functions.ts b/test/lib/functions.ts index fb2c4692..9d6088b6 100644 --- a/test/lib/functions.ts +++ b/test/lib/functions.ts @@ -36,6 +36,7 @@ test('list', async () => { "is_set_returning_function": false, "language": "sql", "name": "add", + "prorows": null, "return_type": "integer", "return_type_id": 23, "return_type_relation_id": null, @@ -46,6 +47,128 @@ test('list', async () => { ) }) +test('list set-returning function with single object limit', async () => { + const res = await pgMeta.functions.list() + expect(res.data?.filter(({ name }) => name === 'get_user_audit_setof_single_row')) + .toMatchInlineSnapshot(` + [ + { + "args": [ + { + "has_default": false, + "mode": "in", + "name": "user_row", + "type_id": 16395, + }, + ], + "argument_types": "user_row users", + "behavior": "STABLE", + "complete_statement": "CREATE OR REPLACE FUNCTION public.get_user_audit_setof_single_row(user_row users) + RETURNS SETOF users_audit + LANGUAGE sql + STABLE ROWS 1 + AS $function$ + SELECT * FROM public.users_audit WHERE user_id = user_row.id; + $function$ + ", + "config_params": null, + "definition": " + SELECT * FROM public.users_audit WHERE user_id = user_row.id; + ", + "id": 16506, + "identity_argument_types": "user_row users", + "is_set_returning_function": true, + "language": "sql", + "name": "get_user_audit_setof_single_row", + "prorows": 1, + "return_type": "SETOF users_audit", + "return_type_id": 16418, + "return_type_relation_id": 16416, + "schema": "public", + "security_definer": false, + }, + ] + `) +}) + +test('list set-returning function with multiples definitions', async () => { + const res = await pgMeta.functions.list() + expect(res.data?.filter(({ name }) => name === 'get_todos_setof_rows')).toMatchInlineSnapshot(` + [ + { + "args": [ + { + "has_default": false, + "mode": "in", + "name": "user_row", + "type_id": 16395, + }, + ], + "argument_types": "user_row users", + "behavior": "STABLE", + "complete_statement": "CREATE OR REPLACE FUNCTION public.get_todos_setof_rows(user_row users) + RETURNS SETOF todos + LANGUAGE sql + STABLE + AS $function$ + SELECT * FROM public.todos WHERE "user-id" = user_row.id; + $function$ + ", + "config_params": null, + "definition": " + SELECT * FROM public.todos WHERE "user-id" = user_row.id; + ", + "id": 16509, + "identity_argument_types": "user_row users", + "is_set_returning_function": true, + "language": "sql", + "name": "get_todos_setof_rows", + "prorows": 1000, + "return_type": "SETOF todos", + "return_type_id": 16404, + "return_type_relation_id": 16402, + "schema": "public", + "security_definer": false, + }, + { + "args": [ + { + "has_default": false, + "mode": "in", + "name": "todo_row", + "type_id": 16404, + }, + ], + "argument_types": "todo_row todos", + "behavior": "STABLE", + "complete_statement": "CREATE OR REPLACE FUNCTION public.get_todos_setof_rows(todo_row todos) + RETURNS SETOF todos + LANGUAGE sql + STABLE + AS $function$ + SELECT * FROM public.todos WHERE "user-id" = todo_row."user-id"; + $function$ + ", + "config_params": null, + "definition": " + SELECT * FROM public.todos WHERE "user-id" = todo_row."user-id"; + ", + "id": 16510, + "identity_argument_types": "todo_row todos", + "is_set_returning_function": true, + "language": "sql", + "name": "get_todos_setof_rows", + "prorows": 1000, + "return_type": "SETOF todos", + "return_type_id": 16404, + "return_type_relation_id": 16402, + "schema": "public", + "security_definer": false, + }, + ] + `) +}) + test('list functions with included schemas', async () => { let res = await pgMeta.functions.list({ includedSchemas: ['public'], @@ -136,6 +259,7 @@ test('retrieve, create, update, delete', async () => { "is_set_returning_function": false, "language": "sql", "name": "test_func", + "prorows": null, "return_type": "integer", "return_type_id": 23, "return_type_relation_id": null, @@ -186,6 +310,7 @@ test('retrieve, create, update, delete', async () => { "is_set_returning_function": false, "language": "sql", "name": "test_func", + "prorows": null, "return_type": "integer", "return_type_id": 23, "return_type_relation_id": null, @@ -240,6 +365,7 @@ test('retrieve, create, update, delete', async () => { "is_set_returning_function": false, "language": "sql", "name": "test_func_renamed", + "prorows": null, "return_type": "integer", "return_type_id": 23, "return_type_relation_id": null, @@ -290,6 +416,7 @@ test('retrieve, create, update, delete', async () => { "is_set_returning_function": false, "language": "sql", "name": "test_func_renamed", + "prorows": null, "return_type": "integer", "return_type_id": 23, "return_type_relation_id": null, @@ -345,6 +472,7 @@ test('retrieve set-returning function', async () => { "is_set_returning_function": true, "language": "sql", "name": "function_returning_set_of_rows", + "prorows": 1000, "return_type": "SETOF users", "return_type_id": Any, "return_type_relation_id": Any, diff --git a/test/lib/types.ts b/test/lib/types.ts index fb8c8f30..349a1b80 100644 --- a/test/lib/types.ts +++ b/test/lib/types.ts @@ -17,6 +17,7 @@ test('list', async () => { "id": Any, "name": "user_status", "schema": "public", + "type_relation_id": null, } ` ) @@ -73,6 +74,7 @@ test('list types with include Table Types', async () => { "id": Any, "name": "todos", "schema": "public", + "type_relation_id": 16402, } ` ) @@ -93,7 +95,7 @@ test('composite type attributes', async () => { const res = await pgMeta.types.list() expect(res.data?.find(({ name }) => name === 'test_composite')).toMatchInlineSnapshot( - { id: expect.any(Number) }, + { id: expect.any(Number), type_relation_id: expect.any(Number) }, ` { "attributes": [ @@ -112,6 +114,7 @@ test('composite type attributes', async () => { "id": Any, "name": "test_composite", "schema": "public", + "type_relation_id": Any, } ` ) diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 76ac6218..f101adde 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -129,6 +129,12 @@ test('typegen: typescript', async () => { details_is_long: boolean | null details_length: number | null details_words: string[] | null + test_unnamed_row_scalar: number | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null } Insert: { details?: string | null @@ -150,24 +156,1122 @@ test('typegen: typescript', async () => { { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + user_details: { + Row: { + details: string | null + user_id: number + } + Insert: { + details?: string | null + user_id: number + } + Update: { + details?: string | null + user_id?: number + } + Relationships: [ + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + users: { + Row: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + test_unnamed_row_composite: + | Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null + } + Insert: { + decimal?: number | null + id?: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Update: { + decimal?: number | null + id?: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + users_audit: { + Row: { + created_at: string | null + id: number + previous_value: Json | null + user_id: number | null + } + Insert: { + created_at?: string | null + id?: number + previous_value?: Json | null + user_id?: number | null + } + Update: { + created_at?: string | null + id?: number + previous_value?: Json | null + user_id?: number | null + } + Relationships: [] + } + } + Views: { + a_view: { + Row: { + id: number | null + } + Insert: { + id?: number | null + } + Update: { + id?: number | null + } + Relationships: [] + } + todos_matview: { + Row: { + details: string | null + id: number | null + "user-id": number | null + get_todos_by_matview: { + details: string | null + id: number + "user-id": number + } | null + } + Relationships: [ + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + todos_view: { + Row: { + details: string | null + id: number | null + "user-id": number | null + blurb_varchar: string | null + test_unnamed_view_row: { + details: string | null + id: number + "user-id": number + } | null + } + Insert: { + details?: string | null + id?: number | null + "user-id"?: number | null + } + Update: { + details?: string | null + id?: number | null + "user-id"?: number | null + } + Relationships: [ + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["initial_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + referencedRelation: "users_view_with_multiple_refs_to_users" + referencedColumns: ["second_id"] + }, + ] + } + user_todos_summary_view: { + Row: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + users_view: { + Row: { + decimal: number | null + id: number | null + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + Insert: { + decimal?: number | null + id?: number | null + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Update: { + decimal?: number | null + id?: number | null + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + users_view_with_multiple_refs_to_users: { + Row: { + initial_id: number | null + initial_name: string | null + second_id: number | null + second_name: string | null + } + Relationships: [] + } + } + Functions: { + blurb: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.blurb with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + blurb_varchar: + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + | { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + details_is_long: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.details_is_long with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + details_length: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.details_length with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + details_words: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.details_words with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + function_returning_row: { + Args: never + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "*" + to: "users" + isOneToOne: true + isSetofReturn: false + } + } + function_returning_set_of_rows: { + Args: never + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + function_returning_single_row: { + Args: { todos: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "todos" + to: "users" + isOneToOne: true + isSetofReturn: false + } + } + function_returning_table: { + Args: never + Returns: { + id: number + name: string + }[] + } + function_returning_table_with_args: { + Args: { user_id: number } + Returns: { + id: number + name: string + }[] + } + function_using_setof_rows_one: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: true + } + } + function_using_table_returns: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: false + } + } + get_composite_type_data: { + Args: never + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"][] + SetofOptions: { + from: "*" + to: "composite_type_with_array_attribute" + isOneToOne: false + isSetofReturn: true + } + } + get_single_user_summary_from_view: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users_view" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "*" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + get_todos_by_matview: { + Args: { "": unknown } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "todos_matview" + to: "todos" + isOneToOne: true + isSetofReturn: true + } + } + get_todos_from_user: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + get_todos_setof_rows: + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + get_user_audit_setof_single_row: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + created_at: string | null + id: number + previous_value: Json | null + user_id: number | null + } + SetofOptions: { + from: "users" + to: "users_audit" + isOneToOne: true + isSetofReturn: true + } + } + get_user_ids: { Args: never; Returns: number[] } + get_user_summary: { Args: never; Returns: Record[] } + polymorphic_function: { Args: { "": string }; Returns: undefined } + polymorphic_function_with_different_return: { + Args: { "": string } + Returns: string + } + polymorphic_function_with_no_params_or_unnamed: + | { Args: never; Returns: number } + | { Args: { "": string }; Returns: string } + polymorphic_function_with_unnamed_default: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default(), public.polymorphic_function_with_unnamed_default( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_default_overload: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default_overload(), public.polymorphic_function_with_unnamed_default_overload( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_json: { + Args: { "": Json } + Returns: number + } + polymorphic_function_with_unnamed_jsonb: { + Args: { "": Json } + Returns: number + } + polymorphic_function_with_unnamed_text: { + Args: { "": string } + Returns: number + } + postgres_fdw_disconnect: { Args: { "": string }; Returns: boolean } + postgres_fdw_disconnect_all: { Args: never; Returns: boolean } + postgres_fdw_get_connections: { + Args: never + Returns: Record[] + } + postgres_fdw_handler: { Args: never; Returns: unknown } + postgrest_resolvable_with_override_function: + | { Args: { a: string }; Returns: number } + | { + Args: { user_id: number } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { completed: boolean; todo_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { Args: { b: number }; Returns: string } + | { Args: never; Returns: undefined } + postgrest_unresolvable_function: + | { + Args: { a: string } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { + Args: { a: number } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: never; Returns: undefined } + search_todos_by_details: { + Args: { search_details: string } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_internal_query: { Args: never; Returns: undefined } + test_unnamed_row_composite: { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + SetofOptions: { + from: "users" + to: "composite_type_with_array_attribute" + isOneToOne: true + isSetofReturn: false + } + } + test_unnamed_row_scalar: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.test_unnamed_row_scalar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + test_unnamed_row_setof: + | { + Args: { user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_unnamed_view_row: { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + } + Enums: { + meme_status: "new" | "old" | "retired" + user_status: "ACTIVE" | "INACTIVE" + } + CompositeTypes: { + composite_type_with_array_attribute: { + my_text_array: string[] | null + } + composite_type_with_record_attribute: { + todo: Database["public"]["Tables"]["todos"]["Row"] | null + } + } + } + } + + type DatabaseWithoutInternals = Omit + + type DefaultSchema = DatabaseWithoutInternals[Extract] + + export type Tables< + DefaultSchemaTableNameOrOptions extends + | keyof (DefaultSchema["Tables"] & DefaultSchema["Views"]) + | { schema: keyof DatabaseWithoutInternals }, + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? keyof (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"]) + : never = never, + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? (DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] & + DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Views"])[TableName] extends { + Row: infer R + } + ? R + : never + : DefaultSchemaTableNameOrOptions extends keyof (DefaultSchema["Tables"] & + DefaultSchema["Views"]) + ? (DefaultSchema["Tables"] & + DefaultSchema["Views"])[DefaultSchemaTableNameOrOptions] extends { + Row: infer R + } + ? R + : never + : never + + export type TablesInsert< + DefaultSchemaTableNameOrOptions extends + | keyof DefaultSchema["Tables"] + | { schema: keyof DatabaseWithoutInternals }, + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + : never = never, + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + Insert: infer I + } + ? I + : never + : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { + Insert: infer I + } + ? I + : never + : never + + export type TablesUpdate< + DefaultSchemaTableNameOrOptions extends + | keyof DefaultSchema["Tables"] + | { schema: keyof DatabaseWithoutInternals }, + TableName extends DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? keyof DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"] + : never = never, + > = DefaultSchemaTableNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaTableNameOrOptions["schema"]]["Tables"][TableName] extends { + Update: infer U + } + ? U + : never + : DefaultSchemaTableNameOrOptions extends keyof DefaultSchema["Tables"] + ? DefaultSchema["Tables"][DefaultSchemaTableNameOrOptions] extends { + Update: infer U + } + ? U + : never + : never + + export type Enums< + DefaultSchemaEnumNameOrOptions extends + | keyof DefaultSchema["Enums"] + | { schema: keyof DatabaseWithoutInternals }, + EnumName extends DefaultSchemaEnumNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? keyof DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"] + : never = never, + > = DefaultSchemaEnumNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[DefaultSchemaEnumNameOrOptions["schema"]]["Enums"][EnumName] + : DefaultSchemaEnumNameOrOptions extends keyof DefaultSchema["Enums"] + ? DefaultSchema["Enums"][DefaultSchemaEnumNameOrOptions] + : never + + export type CompositeTypes< + PublicCompositeTypeNameOrOptions extends + | keyof DefaultSchema["CompositeTypes"] + | { schema: keyof DatabaseWithoutInternals }, + CompositeTypeName extends PublicCompositeTypeNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? keyof DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"] + : never = never, + > = PublicCompositeTypeNameOrOptions extends { + schema: keyof DatabaseWithoutInternals + } + ? DatabaseWithoutInternals[PublicCompositeTypeNameOrOptions["schema"]]["CompositeTypes"][CompositeTypeName] + : PublicCompositeTypeNameOrOptions extends keyof DefaultSchema["CompositeTypes"] + ? DefaultSchema["CompositeTypes"][PublicCompositeTypeNameOrOptions] + : never + + export const Constants = { + public: { + Enums: { + meme_status: ["new", "old", "retired"], + user_status: ["ACTIVE", "INACTIVE"], + }, + }, + } as const + " + ` + ) +}) + +test('typegen w/ one-to-one relationships', async () => { + const { body } = await app.inject({ + method: 'GET', + path: '/generators/typescript', + query: { detect_one_to_one_relationships: 'true' }, + }) + expect(body).toMatchInlineSnapshot( + ` + "export type Json = + | string + | number + | boolean + | null + | { [key: string]: Json | undefined } + | Json[] + + export type Database = { + public: { + Tables: { + category: { + Row: { + id: number + name: string + } + Insert: { + id?: number + name: string + } + Update: { + id?: number + name?: string + } + Relationships: [] + } + empty: { + Row: {} + Insert: {} + Update: {} + Relationships: [] + } + foreign_table: { + Row: { + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + Insert: { + id: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Update: { + id?: number + name?: string | null + status?: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } + memes: { + Row: { + category: number | null + created_at: string + id: number + metadata: Json | null + name: string + status: Database["public"]["Enums"]["meme_status"] | null + } + Insert: { + category?: number | null + created_at: string + id?: number + metadata?: Json | null + name: string + status?: Database["public"]["Enums"]["meme_status"] | null + } + Update: { + category?: number | null + created_at?: string + id?: number + metadata?: Json | null + name?: string + status?: Database["public"]["Enums"]["meme_status"] | null + } + Relationships: [ + { + foreignKeyName: "memes_category_fkey" + columns: ["category"] + isOneToOne: false + referencedRelation: "category" + referencedColumns: ["id"] + }, + ] + } + table_with_other_tables_row_type: { + Row: { + col1: Database["public"]["Tables"]["user_details"]["Row"] | null + col2: Database["public"]["Views"]["a_view"]["Row"] | null + } + Insert: { + col1?: Database["public"]["Tables"]["user_details"]["Row"] | null + col2?: Database["public"]["Views"]["a_view"]["Row"] | null + } + Update: { + col1?: Database["public"]["Tables"]["user_details"]["Row"] | null + col2?: Database["public"]["Views"]["a_view"]["Row"] | null + } + Relationships: [] + } + table_with_primary_key_other_than_id: { + Row: { + name: string | null + other_id: number + } + Insert: { + name?: string | null + other_id?: number + } + Update: { + name?: string | null + other_id?: number + } + Relationships: [] + } + todos: { + Row: { + details: string | null + id: number + "user-id": number + blurb: string | null + blurb_varchar: string | null + details_is_long: boolean | null + details_length: number | null + details_words: string[] | null + test_unnamed_row_scalar: number | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null + } + Insert: { + details?: string | null + id?: number + "user-id": number + } + Update: { + details?: string | null + id?: number + "user-id"?: number + } + Relationships: [ + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "a_view" + referencedColumns: ["id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false referencedRelation: "users" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["initial_id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["second_id"] }, @@ -190,30 +1294,42 @@ test('typegen: typescript', async () => { { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] + isOneToOne: true referencedRelation: "a_view" referencedColumns: ["id"] }, { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] + isOneToOne: true + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true referencedRelation: "users" referencedColumns: ["id"] }, { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] + isOneToOne: true referencedRelation: "users_view" referencedColumns: ["id"] }, { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] + isOneToOne: true referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["initial_id"] }, { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] + isOneToOne: true referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["second_id"] }, @@ -225,6 +1341,14 @@ test('typegen: typescript', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + test_unnamed_row_composite: + | Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null } Insert: { decimal?: number | null @@ -280,35 +1404,52 @@ test('typegen: typescript', async () => { details: string | null id: number | null "user-id": number | null + get_todos_by_matview: { + details: string | null + id: number + "user-id": number + } | null } Relationships: [ { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "a_view" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false referencedRelation: "users" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["initial_id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["second_id"] }, @@ -319,6 +1460,12 @@ test('typegen: typescript', async () => { details: string | null id: number | null "user-id": number | null + blurb_varchar: string | null + test_unnamed_view_row: { + details: string | null + id: number + "user-id": number + } | null } Insert: { details?: string | null @@ -334,35 +1481,57 @@ test('typegen: typescript', async () => { { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "a_view" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false referencedRelation: "users" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view" referencedColumns: ["id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["initial_id"] }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] + isOneToOne: false referencedRelation: "users_view_with_multiple_refs_to_users" referencedColumns: ["second_id"] }, ] } + user_todos_summary_view: { + Row: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } users_view: { Row: { decimal: number | null @@ -397,59 +1566,277 @@ test('typegen: typescript', async () => { Functions: { blurb: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string - } - blurb_varchar: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string + Returns: { + error: true + } & "the function public.blurb with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + blurb_varchar: + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + | { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } details_is_long: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: boolean + Returns: { + error: true + } & "the function public.details_is_long with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } details_length: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: number + Returns: { + error: true + } & "the function public.details_length with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } details_words: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string[] + Returns: { + error: true + } & "the function public.details_words with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } function_returning_row: { - Args: Record + Args: never Returns: { decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } + SetofOptions: { + from: "*" + to: "users" + isOneToOne: true + isSetofReturn: false + } } function_returning_set_of_rows: { - Args: Record + Args: never Returns: { decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + function_returning_single_row: { + Args: { todos: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "todos" + to: "users" + isOneToOne: true + isSetofReturn: false + } } function_returning_table: { - Args: Record + Args: never + Returns: { + id: number + name: string + }[] + } + function_returning_table_with_args: { + Args: { user_id: number } + Returns: { + id: number + name: string + }[] + } + function_using_setof_rows_one: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: true + } + } + function_using_table_returns: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { + details: string | null id: number - name: string - }[] + "user-id": number + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: false + } + } + get_composite_type_data: { + Args: never + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"][] + SetofOptions: { + from: "*" + to: "composite_type_with_array_attribute" + isOneToOne: false + isSetofReturn: true + } } - get_todos_setof_rows: { - Args: - | { todo_row: Database["public"]["Tables"]["todos"]["Row"] } - | { user_row: Database["public"]["Tables"]["users"]["Row"] } + get_single_user_summary_from_view: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users_view" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "*" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + get_todos_by_matview: { + Args: { "": unknown } Returns: { details: string | null id: number "user-id": number - }[] + } + SetofOptions: { + from: "todos_matview" + to: "todos" + isOneToOne: true + isSetofReturn: true + } } + get_todos_from_user: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + get_todos_setof_rows: + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } get_user_audit_setof_single_row: { Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { @@ -457,31 +1844,207 @@ test('typegen: typescript', async () => { id: number previous_value: Json | null user_id: number | null - }[] - } - polymorphic_function: { - Args: { "": boolean } | { "": string } - Returns: undefined + } + SetofOptions: { + from: "users" + to: "users_audit" + isOneToOne: true + isSetofReturn: true + } } - postgres_fdw_disconnect: { + get_user_ids: { Args: never; Returns: number[] } + get_user_summary: { Args: never; Returns: Record[] } + polymorphic_function: { Args: { "": string }; Returns: undefined } + polymorphic_function_with_different_return: { Args: { "": string } - Returns: boolean + Returns: string } - postgres_fdw_disconnect_all: { - Args: Record - Returns: boolean + polymorphic_function_with_no_params_or_unnamed: + | { Args: never; Returns: number } + | { Args: { "": string }; Returns: string } + polymorphic_function_with_unnamed_default: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default(), public.polymorphic_function_with_unnamed_default( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_default_overload: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default_overload(), public.polymorphic_function_with_unnamed_default_overload( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_json: { + Args: { "": Json } + Returns: number + } + polymorphic_function_with_unnamed_jsonb: { + Args: { "": Json } + Returns: number + } + polymorphic_function_with_unnamed_text: { + Args: { "": string } + Returns: number } + postgres_fdw_disconnect: { Args: { "": string }; Returns: boolean } + postgres_fdw_disconnect_all: { Args: never; Returns: boolean } postgres_fdw_get_connections: { - Args: Record + Args: never Returns: Record[] } - postgres_fdw_handler: { - Args: Record - Returns: unknown + postgres_fdw_handler: { Args: never; Returns: unknown } + postgrest_resolvable_with_override_function: + | { Args: { a: string }; Returns: number } + | { + Args: { user_id: number } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { completed: boolean; todo_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { Args: { b: number }; Returns: string } + | { Args: never; Returns: undefined } + postgrest_unresolvable_function: + | { + Args: { a: string } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { + Args: { a: number } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: never; Returns: undefined } + search_todos_by_details: { + Args: { search_details: string } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_internal_query: { Args: never; Returns: undefined } + test_unnamed_row_composite: { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + SetofOptions: { + from: "users" + to: "composite_type_with_array_attribute" + isOneToOne: true + isSetofReturn: false + } + } + test_unnamed_row_scalar: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.test_unnamed_row_scalar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } - test_internal_query: { - Args: Record - Returns: undefined + test_unnamed_row_setof: + | { + Args: { user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_unnamed_view_row: { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } } } Enums: { @@ -762,6 +2325,12 @@ test('typegen: typescript w/ one-to-one relationships', async () => { details_is_long: boolean | null details_length: number | null details_words: string[] | null + test_unnamed_row_scalar: number | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null } Insert: { details?: string | null @@ -781,6 +2350,13 @@ test('typegen: typescript w/ one-to-one relationships', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] @@ -832,6 +2408,13 @@ test('typegen: typescript w/ one-to-one relationships', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] @@ -868,6 +2451,14 @@ test('typegen: typescript w/ one-to-one relationships', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + test_unnamed_row_composite: + | Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null } Insert: { decimal?: number | null @@ -923,6 +2514,11 @@ test('typegen: typescript w/ one-to-one relationships', async () => { details: string | null id: number | null "user-id": number | null + get_todos_by_matview: { + details: string | null + id: number + "user-id": number + } | null } Relationships: [ { @@ -932,6 +2528,13 @@ test('typegen: typescript w/ one-to-one relationships', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] @@ -967,6 +2570,12 @@ test('typegen: typescript w/ one-to-one relationships', async () => { details: string | null id: number | null "user-id": number | null + blurb_varchar: string | null + test_unnamed_view_row: { + details: string | null + id: number + "user-id": number + } | null } Insert: { details?: string | null @@ -986,6 +2595,13 @@ test('typegen: typescript w/ one-to-one relationships', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] @@ -1016,6 +2632,16 @@ test('typegen: typescript w/ one-to-one relationships', async () => { }, ] } + user_todos_summary_view: { + Row: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } users_view: { Row: { decimal: number | null @@ -1050,59 +2676,277 @@ test('typegen: typescript w/ one-to-one relationships', async () => { Functions: { blurb: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string - } - blurb_varchar: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string + Returns: { + error: true + } & "the function public.blurb with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + blurb_varchar: + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + | { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } details_is_long: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: boolean + Returns: { + error: true + } & "the function public.details_is_long with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } details_length: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: number + Returns: { + error: true + } & "the function public.details_length with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } details_words: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string[] + Returns: { + error: true + } & "the function public.details_words with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } function_returning_row: { - Args: Record + Args: never Returns: { decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } + SetofOptions: { + from: "*" + to: "users" + isOneToOne: true + isSetofReturn: false + } } function_returning_set_of_rows: { - Args: Record + Args: never Returns: { decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + function_returning_single_row: { + Args: { todos: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "todos" + to: "users" + isOneToOne: true + isSetofReturn: false + } } function_returning_table: { - Args: Record + Args: never + Returns: { + id: number + name: string + }[] + } + function_returning_table_with_args: { + Args: { user_id: number } Returns: { id: number name: string }[] } - get_todos_setof_rows: { - Args: - | { todo_row: Database["public"]["Tables"]["todos"]["Row"] } - | { user_row: Database["public"]["Tables"]["users"]["Row"] } + function_using_setof_rows_one: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { details: string | null id: number "user-id": number - }[] + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: true + } + } + function_using_table_returns: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: false + } + } + get_composite_type_data: { + Args: never + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"][] + SetofOptions: { + from: "*" + to: "composite_type_with_array_attribute" + isOneToOne: false + isSetofReturn: true + } + } + get_single_user_summary_from_view: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users_view" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "*" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + get_todos_by_matview: { + Args: { "": unknown } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "todos_matview" + to: "todos" + isOneToOne: true + isSetofReturn: true + } } + get_todos_from_user: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + get_todos_setof_rows: + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } get_user_audit_setof_single_row: { Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { @@ -1110,31 +2954,207 @@ test('typegen: typescript w/ one-to-one relationships', async () => { id: number previous_value: Json | null user_id: number | null - }[] - } - polymorphic_function: { - Args: { "": boolean } | { "": string } - Returns: undefined + } + SetofOptions: { + from: "users" + to: "users_audit" + isOneToOne: true + isSetofReturn: true + } } - postgres_fdw_disconnect: { + get_user_ids: { Args: never; Returns: number[] } + get_user_summary: { Args: never; Returns: Record[] } + polymorphic_function: { Args: { "": string }; Returns: undefined } + polymorphic_function_with_different_return: { Args: { "": string } - Returns: boolean + Returns: string + } + polymorphic_function_with_no_params_or_unnamed: + | { Args: never; Returns: number } + | { Args: { "": string }; Returns: string } + polymorphic_function_with_unnamed_default: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default(), public.polymorphic_function_with_unnamed_default( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_default_overload: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default_overload(), public.polymorphic_function_with_unnamed_default_overload( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_json: { + Args: { "": Json } + Returns: number + } + polymorphic_function_with_unnamed_jsonb: { + Args: { "": Json } + Returns: number } - postgres_fdw_disconnect_all: { - Args: Record - Returns: boolean + polymorphic_function_with_unnamed_text: { + Args: { "": string } + Returns: number } + postgres_fdw_disconnect: { Args: { "": string }; Returns: boolean } + postgres_fdw_disconnect_all: { Args: never; Returns: boolean } postgres_fdw_get_connections: { - Args: Record + Args: never Returns: Record[] } - postgres_fdw_handler: { - Args: Record - Returns: unknown + postgres_fdw_handler: { Args: never; Returns: unknown } + postgrest_resolvable_with_override_function: + | { Args: { a: string }; Returns: number } + | { + Args: { user_id: number } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { completed: boolean; todo_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { Args: { b: number }; Returns: string } + | { Args: never; Returns: undefined } + postgrest_unresolvable_function: + | { + Args: { a: string } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { + Args: { a: number } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: never; Returns: undefined } + search_todos_by_details: { + Args: { search_details: string } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_internal_query: { Args: never; Returns: undefined } + test_unnamed_row_composite: { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + SetofOptions: { + from: "users" + to: "composite_type_with_array_attribute" + isOneToOne: true + isSetofReturn: false + } + } + test_unnamed_row_scalar: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.test_unnamed_row_scalar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } - test_internal_query: { - Args: Record - Returns: undefined + test_unnamed_row_setof: + | { + Args: { user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_unnamed_view_row: { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } } } Enums: { @@ -1420,6 +3440,12 @@ test('typegen: typescript w/ postgrestVersion', async () => { details_is_long: boolean | null details_length: number | null details_words: string[] | null + test_unnamed_row_scalar: number | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null } Insert: { details?: string | null @@ -1439,6 +3465,13 @@ test('typegen: typescript w/ postgrestVersion', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] @@ -1490,6 +3523,13 @@ test('typegen: typescript w/ postgrestVersion', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "user_details_user_id_fkey" + columns: ["user_id"] + isOneToOne: true + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "user_details_user_id_fkey" columns: ["user_id"] @@ -1526,6 +3566,14 @@ test('typegen: typescript w/ postgrestVersion', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + test_unnamed_row_composite: + | Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + | null + test_unnamed_row_setof: { + details: string | null + id: number + "user-id": number + } | null } Insert: { decimal?: number | null @@ -1581,6 +3629,11 @@ test('typegen: typescript w/ postgrestVersion', async () => { details: string | null id: number | null "user-id": number | null + get_todos_by_matview: { + details: string | null + id: number + "user-id": number + } | null } Relationships: [ { @@ -1590,6 +3643,13 @@ test('typegen: typescript w/ postgrestVersion', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] @@ -1625,6 +3685,12 @@ test('typegen: typescript w/ postgrestVersion', async () => { details: string | null id: number | null "user-id": number | null + blurb_varchar: string | null + test_unnamed_view_row: { + details: string | null + id: number + "user-id": number + } | null } Insert: { details?: string | null @@ -1644,6 +3710,13 @@ test('typegen: typescript w/ postgrestVersion', async () => { referencedRelation: "a_view" referencedColumns: ["id"] }, + { + foreignKeyName: "todos_user-id_fkey" + columns: ["user-id"] + isOneToOne: false + referencedRelation: "user_todos_summary_view" + referencedColumns: ["user_id"] + }, { foreignKeyName: "todos_user-id_fkey" columns: ["user-id"] @@ -1674,6 +3747,16 @@ test('typegen: typescript w/ postgrestVersion', async () => { }, ] } + user_todos_summary_view: { + Row: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + Relationships: [] + } users_view: { Row: { decimal: number | null @@ -1708,59 +3791,277 @@ test('typegen: typescript w/ postgrestVersion', async () => { Functions: { blurb: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string - } - blurb_varchar: { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string + Returns: { + error: true + } & "the function public.blurb with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + blurb_varchar: + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + | { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + error: true + } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } details_is_long: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: boolean + Returns: { + error: true + } & "the function public.details_is_long with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } details_length: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: number + Returns: { + error: true + } & "the function public.details_length with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } details_words: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } - Returns: string[] + Returns: { + error: true + } & "the function public.details_words with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } function_returning_row: { - Args: Record + Args: never Returns: { decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null } + SetofOptions: { + from: "*" + to: "users" + isOneToOne: true + isSetofReturn: false + } } function_returning_set_of_rows: { - Args: Record + Args: never Returns: { decimal: number | null id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + function_returning_single_row: { + Args: { todos: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "todos" + to: "users" + isOneToOne: true + isSetofReturn: false + } } function_returning_table: { - Args: Record + Args: never + Returns: { + id: number + name: string + }[] + } + function_returning_table_with_args: { + Args: { user_id: number } Returns: { id: number name: string }[] } - get_todos_setof_rows: { - Args: - | { todo_row: Database["public"]["Tables"]["todos"]["Row"] } - | { user_row: Database["public"]["Tables"]["users"]["Row"] } + function_using_setof_rows_one: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { details: string | null id: number "user-id": number - }[] + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: true + } } + function_using_table_returns: { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: true + isSetofReturn: false + } + } + get_composite_type_data: { + Args: never + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"][] + SetofOptions: { + from: "*" + to: "composite_type_with_array_attribute" + isOneToOne: false + isSetofReturn: true + } + } + get_single_user_summary_from_view: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users_view" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "users" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + todo_count: number | null + todo_details: string[] | null + user_id: number | null + user_name: string | null + user_status: Database["public"]["Enums"]["user_status"] | null + } + SetofOptions: { + from: "*" + to: "user_todos_summary_view" + isOneToOne: true + isSetofReturn: true + } + } + get_todos_by_matview: { + Args: { "": unknown } + Returns: { + details: string | null + id: number + "user-id": number + } + SetofOptions: { + from: "todos_matview" + to: "todos" + isOneToOne: true + isSetofReturn: true + } + } + get_todos_from_user: + | { + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { search_user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + get_todos_setof_rows: + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } get_user_audit_setof_single_row: { Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { @@ -1768,31 +4069,207 @@ test('typegen: typescript w/ postgrestVersion', async () => { id: number previous_value: Json | null user_id: number | null - }[] - } - polymorphic_function: { - Args: { "": boolean } | { "": string } - Returns: undefined + } + SetofOptions: { + from: "users" + to: "users_audit" + isOneToOne: true + isSetofReturn: true + } } - postgres_fdw_disconnect: { + get_user_ids: { Args: never; Returns: number[] } + get_user_summary: { Args: never; Returns: Record[] } + polymorphic_function: { Args: { "": string }; Returns: undefined } + polymorphic_function_with_different_return: { Args: { "": string } - Returns: boolean + Returns: string + } + polymorphic_function_with_no_params_or_unnamed: + | { Args: never; Returns: number } + | { Args: { "": string }; Returns: string } + polymorphic_function_with_unnamed_default: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default(), public.polymorphic_function_with_unnamed_default( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_default_overload: + | { + Args: never + Returns: { + error: true + } & "Could not choose the best candidate function between: public.polymorphic_function_with_unnamed_default_overload(), public.polymorphic_function_with_unnamed_default_overload( => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: { ""?: string }; Returns: string } + polymorphic_function_with_unnamed_json: { + Args: { "": Json } + Returns: number } - postgres_fdw_disconnect_all: { - Args: Record - Returns: boolean + polymorphic_function_with_unnamed_jsonb: { + Args: { "": Json } + Returns: number + } + polymorphic_function_with_unnamed_text: { + Args: { "": string } + Returns: number } + postgres_fdw_disconnect: { Args: { "": string }; Returns: boolean } + postgres_fdw_disconnect_all: { Args: never; Returns: boolean } postgres_fdw_get_connections: { - Args: Record + Args: never Returns: Record[] } - postgres_fdw_handler: { - Args: Record - Returns: unknown + postgres_fdw_handler: { Args: never; Returns: unknown } + postgrest_resolvable_with_override_function: + | { Args: { a: string }; Returns: number } + | { + Args: { user_id: number } + Returns: { + decimal: number | null + id: number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null + }[] + SetofOptions: { + from: "*" + to: "users" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { completed: boolean; todo_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { Args: { b: number }; Returns: string } + | { Args: never; Returns: undefined } + postgrest_unresolvable_function: + | { + Args: { a: string } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { + Args: { a: number } + Returns: { + error: true + } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" + } + | { Args: never; Returns: undefined } + search_todos_by_details: { + Args: { search_details: string } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_internal_query: { Args: never; Returns: undefined } + test_unnamed_row_composite: { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] + SetofOptions: { + from: "users" + to: "composite_type_with_array_attribute" + isOneToOne: true + isSetofReturn: false + } + } + test_unnamed_row_scalar: { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + error: true + } & "the function public.test_unnamed_row_scalar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } - test_internal_query: { - Args: Record - Returns: undefined + test_unnamed_row_setof: + | { + Args: { user_id: number } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "*" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + | { + Args: { "": Database["public"]["Tables"]["users"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "users" + to: "todos" + isOneToOne: false + isSetofReturn: true + } + } + test_unnamed_view_row: { + Args: { "": Database["public"]["Views"]["todos_view"]["Row"] } + Returns: { + details: string | null + id: number + "user-id": number + }[] + SetofOptions: { + from: "todos_view" + to: "todos" + isOneToOne: false + isSetofReturn: true + } } } Enums: { @@ -1964,9 +4441,9 @@ test('typegen: typescript consistent types definitions orders', async () => { DROP FUNCTION IF EXISTS test_func_gamma(integer, text, boolean) CASCADE; -- Alternative signatures for functions (different parameter orders) - DROP FUNCTION IF EXISTS test_func_alpha(text, boolean, integer) CASCADE; - DROP FUNCTION IF EXISTS test_func_beta(boolean, integer, text) CASCADE; - DROP FUNCTION IF EXISTS test_func_gamma(boolean, text, integer) CASCADE; + DROP FUNCTION IF EXISTS test_func_alpha_2(boolean, text, integer) CASCADE; + DROP FUNCTION IF EXISTS test_func_beta_2(text, boolean, integer) CASCADE; + DROP FUNCTION IF EXISTS test_func_gamma_2(boolean, integer, text) CASCADE; -- Drop tables DROP TABLE IF EXISTS test_table_alpha CASCADE; @@ -2136,19 +4613,19 @@ test('typegen: typescript consistent types definitions orders', async () => { }, }) - // Create functions in reverse order: gamma, beta, alpha with different parameter orders + // Create functions in reverse order: gamma, beta, alpha with same parameter orders await app.inject({ method: 'POST', path: '/query', payload: { query: ` - CREATE FUNCTION test_func_gamma(param_c boolean, param_a integer, param_b text) + CREATE FUNCTION test_func_gamma(param_a integer, param_b text, param_c boolean) RETURNS boolean AS 'SELECT NOT param_c' LANGUAGE sql IMMUTABLE; - CREATE FUNCTION test_func_beta(param_b text, param_c boolean, param_a integer) + CREATE FUNCTION test_func_beta(param_a integer, param_b text, param_c boolean) RETURNS text AS 'SELECT param_b || ''_processed''' LANGUAGE sql IMMUTABLE; - CREATE FUNCTION test_func_alpha(param_c boolean, param_b text, param_a integer) + CREATE FUNCTION test_func_alpha(param_a integer, param_b text, param_c boolean) RETURNS integer AS 'SELECT param_a + 1' LANGUAGE sql IMMUTABLE; `, }, @@ -2371,6 +4848,10 @@ test('typegen: go', async () => { Status *string \`json:"status"\` } + type PublicAViewSelect struct { + Id *int64 \`json:"id"\` + } + type PublicTodosViewSelect struct { Details *string \`json:"details"\` Id *int64 \`json:"id"\` @@ -2384,8 +4865,12 @@ test('typegen: go', async () => { Status *string \`json:"status"\` } - type PublicAViewSelect struct { - Id *int64 \`json:"id"\` + type PublicUserTodosSummaryViewSelect struct { + TodoCount *int64 \`json:"todo_count"\` + TodoDetails []*string \`json:"todo_details"\` + UserId *int64 \`json:"user_id"\` + UserName *string \`json:"user_name"\` + UserStatus *string \`json:"user_status"\` } type PublicUsersViewWithMultipleRefsToUsersSelect struct { @@ -2738,6 +5223,20 @@ test('typegen: swift', async () => { case userId = "user-id" } } + internal struct UserTodosSummaryViewSelect: Codable, Hashable, Sendable { + internal let todoCount: Int64? + internal let todoDetails: [String]? + internal let userId: Int64? + internal let userName: String? + internal let userStatus: UserStatus? + internal enum CodingKeys: String, CodingKey { + case todoCount = "todo_count" + case todoDetails = "todo_details" + case userId = "user_id" + case userName = "user_name" + case userStatus = "user_status" + } + } internal struct UsersViewSelect: Codable, Hashable, Sendable { internal let decimal: Decimal? internal let id: Int64? @@ -3109,6 +5608,20 @@ test('typegen: swift w/ public access control', async () => { case userId = "user-id" } } + public struct UserTodosSummaryViewSelect: Codable, Hashable, Sendable { + public let todoCount: Int64? + public let todoDetails: [String]? + public let userId: Int64? + public let userName: String? + public let userStatus: UserStatus? + public enum CodingKeys: String, CodingKey { + case todoCount = "todo_count" + case todoDetails = "todo_details" + case userId = "user_id" + case userName = "user_name" + case userStatus = "user_status" + } + } public struct UsersViewSelect: Codable, Hashable, Sendable { public let decimal: Decimal? public let id: Int64? From e561d8289eb84e7ea9efa8d42d4299f9d5e578b3 Mon Sep 17 00:00:00 2001 From: Greg Richardson Date: Fri, 17 Oct 2025 09:53:56 -0600 Subject: [PATCH 40/72] feat: optional parameter binding on query endpoint --- src/lib/PostgresMeta.ts | 2 +- src/lib/db.ts | 16 +++--- src/server/routes/query.ts | 21 +++----- test/server/query.ts | 106 +++++++++++++++++++++++++++++++++---- vitest.config.ts | 4 +- 5 files changed, 113 insertions(+), 36 deletions(-) diff --git a/src/lib/PostgresMeta.ts b/src/lib/PostgresMeta.ts index 91050383..eb931624 100644 --- a/src/lib/PostgresMeta.ts +++ b/src/lib/PostgresMeta.ts @@ -24,7 +24,7 @@ import { PostgresMetaResult, PoolConfig } from './types.js' export default class PostgresMeta { query: ( sql: string, - opts?: { statementQueryTimeout?: number; trackQueryInSentry?: boolean } + opts?: { statementQueryTimeout?: number; trackQueryInSentry?: boolean; parameters?: unknown[] } ) => Promise> end: () => Promise columnPrivileges: PostgresMetaColumnPrivileges diff --git a/src/lib/db.ts b/src/lib/db.ts index 263be4d8..d43ef8f5 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -23,7 +23,11 @@ pg.types.setTypeParser(1017, (x) => x) // _point // Ensure any query will have an appropriate error handler on the pool to prevent connections errors // to bubble up all the stack eventually killing the server -const poolerQueryHandleError = (pgpool: pg.Pool, sql: string): Promise> => { +const poolerQueryHandleError = ( + pgpool: pg.Pool, + sql: string, + parameters?: unknown[] +): Promise> => { return Sentry.startSpan( { op: 'db', name: 'poolerQuery' }, () => @@ -44,7 +48,7 @@ const poolerQueryHandleError = (pgpool: pg.Pool, sql: string): Promise) => { if (!rejected) { return resolve(results) @@ -64,7 +68,7 @@ const poolerQueryHandleError = (pgpool: pg.Pool, sql: string): Promise { query: ( sql: string, - opts?: { statementQueryTimeout?: number; trackQueryInSentry?: boolean } + opts?: { statementQueryTimeout?: number; trackQueryInSentry?: boolean; parameters?: unknown[] } ) => Promise> end: () => Promise } = (config) => { @@ -108,7 +112,7 @@ export const init: (config: PoolConfig) => { return { async query( sql, - { statementQueryTimeout, trackQueryInSentry } = { trackQueryInSentry: true } + { statementQueryTimeout, trackQueryInSentry, parameters } = { trackQueryInSentry: true } ) { return Sentry.startSpan( // For metrics purposes, log the query that will be run if it's not an user provided query (with possibly sentitives infos) @@ -131,7 +135,7 @@ export const init: (config: PoolConfig) => { try { if (!pool) { const pool = new pg.Pool(config) - let res = await poolerQueryHandleError(pool, sqlWithStatementTimeout) + let res = await poolerQueryHandleError(pool, sqlWithStatementTimeout, parameters) if (Array.isArray(res)) { res = res.reverse().find((x) => x.rows.length !== 0) ?? { rows: [] } } @@ -139,7 +143,7 @@ export const init: (config: PoolConfig) => { return { data: res.rows, error: null } } - let res = await poolerQueryHandleError(pool, sqlWithStatementTimeout) + let res = await poolerQueryHandleError(pool, sqlWithStatementTimeout, parameters) if (Array.isArray(res)) { res = res.reverse().find((x) => x.rows.length !== 0) ?? { rows: [] } } diff --git a/src/server/routes/query.ts b/src/server/routes/query.ts index c8f23bc9..467b1558 100644 --- a/src/server/routes/query.ts +++ b/src/server/routes/query.ts @@ -16,12 +16,8 @@ const errorOnEmptyQuery = (request: FastifyRequest) => { export default async (fastify: FastifyInstance) => { fastify.post<{ Headers: { pg: string; 'x-pg-application-name'?: string } - Body: { - query: string - } - Querystring: { - statementTimeoutSecs?: number - } + Body: { query: string; parameters?: any[] } + Querystring: { statementTimeoutSecs?: number } }>('/', async (request, reply) => { const statementTimeoutSecs = request.query.statementTimeoutSecs errorOnEmptyQuery(request) @@ -30,6 +26,7 @@ export default async (fastify: FastifyInstance) => { const { data, error } = await pgMeta.query(request.body.query, { trackQueryInSentry: true, statementQueryTimeout: statementTimeoutSecs, + parameters: request.body.parameters, }) await pgMeta.end() if (error) { @@ -43,9 +40,7 @@ export default async (fastify: FastifyInstance) => { fastify.post<{ Headers: { pg: string; 'x-pg-application-name'?: string } - Body: { - query: string - } + Body: { query: string } }>('/format', async (request, reply) => { errorOnEmptyQuery(request) const { data, error } = await Parser.Format(request.body.query) @@ -61,9 +56,7 @@ export default async (fastify: FastifyInstance) => { fastify.post<{ Headers: { pg: string; 'x-pg-application-name'?: string } - Body: { - query: string - } + Body: { query: string } }>('/parse', async (request, reply) => { errorOnEmptyQuery(request) const { data, error } = Parser.Parse(request.body.query) @@ -79,9 +72,7 @@ export default async (fastify: FastifyInstance) => { fastify.post<{ Headers: { pg: string; 'x-pg-application-name'?: string } - Body: { - ast: object - } + Body: { ast: object } }>('/deparse', async (request, reply) => { const { data, error } = Parser.Deparse(request.body.ast) diff --git a/test/server/query.ts b/test/server/query.ts index 8a9d6076..2cd86f52 100644 --- a/test/server/query.ts +++ b/test/server/query.ts @@ -547,9 +547,7 @@ test('return interval as string', async () => { const res = await app.inject({ method: 'POST', path: '/query', - payload: { - query: `SELECT '1 day 1 hour 45 minutes'::interval`, - }, + payload: { query: `SELECT '1 day 1 hour 45 minutes'::interval` }, }) expect(res.json()).toMatchInlineSnapshot(` [ @@ -703,9 +701,7 @@ test('error with internalQuery property', async () => { const res = await app.inject({ method: 'POST', path: '/query', - payload: { - query: 'SELECT test_internal_query();', - }, + payload: { query: 'SELECT test_internal_query();' }, }) expect(res.json()).toMatchInlineSnapshot(` @@ -737,19 +733,107 @@ test('custom application_name', async () => { const res = await app.inject({ method: 'POST', path: '/query', - headers: { - 'x-pg-application-name': 'test', - }, + headers: { 'x-pg-application-name': 'test' }, + payload: { query: 'SHOW application_name;' }, + }) + + expect(res.json()).toMatchInlineSnapshot(` + [ + { + "application_name": "test", + }, + ] + `) +}) + +test('parameter binding with positional parameters', async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', payload: { - query: 'SHOW application_name;', + query: 'SELECT * FROM users WHERE id = $1 AND status = $2', + parameters: [1, 'ACTIVE'], }, }) + expect(res.json()).toMatchInlineSnapshot(` + [ + { + "decimal": null, + "id": 1, + "name": "Joe Bloggs", + "status": "ACTIVE", + }, + ] + `) +}) +test('parameter binding with single parameter', async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { query: 'SELECT name FROM users WHERE id = $1', parameters: [2] }, + }) expect(res.json()).toMatchInlineSnapshot(` [ { - "application_name": "test", + "name": "Jane Doe", }, ] `) }) + +test('parameter binding with no matches', async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { query: 'SELECT * FROM users WHERE id = $1', parameters: [999] }, + }) + expect(res.json()).toMatchInlineSnapshot(`[]`) +}) + +test('no parameters field', async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { query: 'SELECT COUNT(*) as count FROM users' }, + }) + expect(res.json()).toMatchInlineSnapshot(` + [ + { + "count": 2, + }, + ] + `) +}) + +test('parameter binding with empty parameters array', async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { query: 'SELECT COUNT(*) as count FROM users', parameters: [] }, + }) + expect(res.json()).toMatchInlineSnapshot(` + [ + { + "count": 2, + }, + ] + `) +}) + +test('parameter binding error - wrong parameter count', async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: 'SELECT * FROM users WHERE id = $1 AND status = $2', + parameters: [1], // Missing second parameter + }, + }) + expect(res.statusCode).toBe(400) + const json = res.json() + expect(json.code).toBe('08P01') + expect(json.message).toContain( + 'bind message supplies 1 parameters, but prepared statement "" requires 2' + ) +}) diff --git a/vitest.config.ts b/vitest.config.ts index 460baf6e..da50a76b 100644 --- a/vitest.config.ts +++ b/vitest.config.ts @@ -3,9 +3,7 @@ import { defineConfig } from 'vitest/config' export default defineConfig({ test: { - coverage: { - reporter: ['lcov'], - }, + coverage: { reporter: ['lcov'] }, maxConcurrency: 1, // https://github.com/vitest-dev/vitest/issues/317#issuecomment-1542319622 pool: 'forks', From 455b7445eb923169e93ff129f4807200c193a89a Mon Sep 17 00:00:00 2001 From: Greg Richardson Date: Fri, 17 Oct 2025 10:32:32 -0600 Subject: [PATCH 41/72] fix: any -> unknown --- src/server/routes/query.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/server/routes/query.ts b/src/server/routes/query.ts index 467b1558..2cc6ad94 100644 --- a/src/server/routes/query.ts +++ b/src/server/routes/query.ts @@ -16,7 +16,7 @@ const errorOnEmptyQuery = (request: FastifyRequest) => { export default async (fastify: FastifyInstance) => { fastify.post<{ Headers: { pg: string; 'x-pg-application-name'?: string } - Body: { query: string; parameters?: any[] } + Body: { query: string; parameters?: unknown[] } Querystring: { statementTimeoutSecs?: number } }>('/', async (request, reply) => { const statementTimeoutSecs = request.query.statementTimeoutSecs From f8471a30876802b474de529c32474355071d234b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Oct 2025 04:53:31 +0000 Subject: [PATCH 42/72] chore(deps): bump vite from 6.3.6 to 6.4.1 (#1004) Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) from 6.3.6 to 6.4.1. - [Release notes](https://github.com/vitejs/vite/releases) - [Changelog](https://github.com/vitejs/vite/blob/main/packages/vite/CHANGELOG.md) - [Commits](https://github.com/vitejs/vite/commits/create-vite@6.4.1/packages/vite) --- updated-dependencies: - dependency-name: vite dependency-version: 6.4.1 dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 2787f0d6..b19d3a9c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8064,9 +8064,9 @@ } }, "node_modules/vite": { - "version": "6.3.6", - "resolved": "https://registry.npmjs.org/vite/-/vite-6.3.6.tgz", - "integrity": "sha512-0msEVHJEScQbhkbVTb/4iHZdJ6SXp/AvxL2sjwYQFfBqleHtnCqv1J3sa9zbWz/6kW1m9Tfzn92vW+kZ1WV6QA==", + "version": "6.4.1", + "resolved": "https://registry.npmjs.org/vite/-/vite-6.4.1.tgz", + "integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==", "dev": true, "license": "MIT", "dependencies": { From dc50199ca163b32ba4bdfa601dd3a9076ed2b640 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Mon, 27 Oct 2025 16:21:27 +0100 Subject: [PATCH 43/72] fix(typegen): include partitioned tables types to types introspection (#1006) --- src/lib/sql/types.sql.ts | 2 +- test/db/00-init.sql | 36 +++ test/server/typegen.ts | 587 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 624 insertions(+), 1 deletion(-) diff --git a/src/lib/sql/types.sql.ts b/src/lib/sql/types.sql.ts index c230f23f..cc94ba54 100644 --- a/src/lib/sql/types.sql.ts +++ b/src/lib/sql/types.sql.ts @@ -47,7 +47,7 @@ from t.typrelid = 0 or ( select - c.relkind ${props.includeTableTypes ? `in ('c', 'r', 'v', 'm')` : `= 'c'`} + c.relkind ${props.includeTableTypes ? `in ('c', 'r', 'v', 'm', 'p')` : `= 'c'`} from pg_class c where diff --git a/test/db/00-init.sql b/test/db/00-init.sql index 64107713..f2161591 100644 --- a/test/db/00-init.sql +++ b/test/db/00-init.sql @@ -429,3 +429,39 @@ ROWS 1 AS $$ SELECT * FROM public.todos WHERE todos."user-id" = user_row.id LIMIT 1; $$; + +-- Function that return the created_ago computed field +CREATE OR REPLACE FUNCTION "public"."created_ago" ("public"."users_audit") RETURNS numeric LANGUAGE "sql" +SET + "search_path" TO '' AS $_$ + SELECT ROUND(EXTRACT(EPOCH FROM (NOW() - $1.created_at))); +$_$; + +-- Create a partitioned table for testing computed fields on partitioned tables +CREATE TABLE public.events ( + id bigint generated by default as identity, + created_at timestamptz default now(), + event_type text, + data jsonb, + primary key (id, created_at) +) partition by range (created_at); + +-- Create partitions for the events table +CREATE TABLE public.events_2024 PARTITION OF public.events +FOR VALUES FROM ('2024-01-01') TO ('2025-01-01'); + +CREATE TABLE public.events_2025 PARTITION OF public.events +FOR VALUES FROM ('2025-01-01') TO ('2026-01-01'); + +-- Insert some test data +INSERT INTO public.events (created_at, event_type, data) +VALUES + ('2024-06-15', 'login', '{"user": "alice"}'), + ('2025-03-20', 'logout', '{"user": "bob"}'); + +-- Function that returns computed field for partitioned table +CREATE OR REPLACE FUNCTION "public"."days_since_event" ("public"."events") RETURNS numeric LANGUAGE "sql" +SET + "search_path" TO '' AS $_$ + SELECT ROUND(EXTRACT(EPOCH FROM (NOW() - $1.created_at)) / 86400); +$_$; \ No newline at end of file diff --git a/test/server/typegen.ts b/test/server/typegen.ts index f101adde..4bb83d94 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -37,6 +37,70 @@ test('typegen: typescript', async () => { Update: {} Relationships: [] } + events: { + Row: { + created_at: string + data: Json | null + event_type: string | null + id: number + days_since_event: number | null + } + Insert: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Update: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Relationships: [] + } + events_2024: { + Row: { + created_at: string + data: Json | null + event_type: string | null + id: number + } + Insert: { + created_at?: string + data?: Json | null + event_type?: string | null + id: number + } + Update: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Relationships: [] + } + events_2025: { + Row: { + created_at: string + data: Json | null + event_type: string | null + id: number + } + Insert: { + created_at?: string + data?: Json | null + event_type?: string | null + id: number + } + Update: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Relationships: [] + } foreign_table: { Row: { id: number @@ -272,6 +336,7 @@ test('typegen: typescript', async () => { id: number previous_value: Json | null user_id: number | null + created_ago: number | null } Insert: { created_at?: string | null @@ -473,6 +538,18 @@ test('typegen: typescript', async () => { error: true } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + created_ago: { + Args: { "": Database["public"]["Tables"]["users_audit"]["Row"] } + Returns: { + error: true + } & "the function public.created_ago with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + days_since_event: { + Args: { "": Database["public"]["Tables"]["events"]["Row"] } + Returns: { + error: true + } & "the function public.days_since_event with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } details_is_long: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } Returns: { @@ -1122,6 +1199,70 @@ test('typegen w/ one-to-one relationships', async () => { Update: {} Relationships: [] } + events: { + Row: { + created_at: string + data: Json | null + event_type: string | null + id: number + days_since_event: number | null + } + Insert: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Update: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Relationships: [] + } + events_2024: { + Row: { + created_at: string + data: Json | null + event_type: string | null + id: number + } + Insert: { + created_at?: string + data?: Json | null + event_type?: string | null + id: number + } + Update: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Relationships: [] + } + events_2025: { + Row: { + created_at: string + data: Json | null + event_type: string | null + id: number + } + Insert: { + created_at?: string + data?: Json | null + event_type?: string | null + id: number + } + Update: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Relationships: [] + } foreign_table: { Row: { id: number @@ -1370,6 +1511,7 @@ test('typegen w/ one-to-one relationships', async () => { id: number previous_value: Json | null user_id: number | null + created_ago: number | null } Insert: { created_at?: string | null @@ -1583,6 +1725,18 @@ test('typegen w/ one-to-one relationships', async () => { error: true } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + created_ago: { + Args: { "": Database["public"]["Tables"]["users_audit"]["Row"] } + Returns: { + error: true + } & "the function public.created_ago with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + days_since_event: { + Args: { "": Database["public"]["Tables"]["events"]["Row"] } + Returns: { + error: true + } & "the function public.days_since_event with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } details_is_long: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } Returns: { @@ -2232,6 +2386,70 @@ test('typegen: typescript w/ one-to-one relationships', async () => { Update: {} Relationships: [] } + events: { + Row: { + created_at: string + data: Json | null + event_type: string | null + id: number + days_since_event: number | null + } + Insert: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Update: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Relationships: [] + } + events_2024: { + Row: { + created_at: string + data: Json | null + event_type: string | null + id: number + } + Insert: { + created_at?: string + data?: Json | null + event_type?: string | null + id: number + } + Update: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Relationships: [] + } + events_2025: { + Row: { + created_at: string + data: Json | null + event_type: string | null + id: number + } + Insert: { + created_at?: string + data?: Json | null + event_type?: string | null + id: number + } + Update: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Relationships: [] + } foreign_table: { Row: { id: number @@ -2480,6 +2698,7 @@ test('typegen: typescript w/ one-to-one relationships', async () => { id: number previous_value: Json | null user_id: number | null + created_ago: number | null } Insert: { created_at?: string | null @@ -2693,6 +2912,18 @@ test('typegen: typescript w/ one-to-one relationships', async () => { error: true } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + created_ago: { + Args: { "": Database["public"]["Tables"]["users_audit"]["Row"] } + Returns: { + error: true + } & "the function public.created_ago with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + days_since_event: { + Args: { "": Database["public"]["Tables"]["events"]["Row"] } + Returns: { + error: true + } & "the function public.days_since_event with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } details_is_long: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } Returns: { @@ -3347,6 +3578,70 @@ test('typegen: typescript w/ postgrestVersion', async () => { Update: {} Relationships: [] } + events: { + Row: { + created_at: string + data: Json | null + event_type: string | null + id: number + days_since_event: number | null + } + Insert: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Update: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Relationships: [] + } + events_2024: { + Row: { + created_at: string + data: Json | null + event_type: string | null + id: number + } + Insert: { + created_at?: string + data?: Json | null + event_type?: string | null + id: number + } + Update: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Relationships: [] + } + events_2025: { + Row: { + created_at: string + data: Json | null + event_type: string | null + id: number + } + Insert: { + created_at?: string + data?: Json | null + event_type?: string | null + id: number + } + Update: { + created_at?: string + data?: Json | null + event_type?: string | null + id?: number + } + Relationships: [] + } foreign_table: { Row: { id: number @@ -3595,6 +3890,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { id: number previous_value: Json | null user_id: number | null + created_ago: number | null } Insert: { created_at?: string | null @@ -3808,6 +4104,18 @@ test('typegen: typescript w/ postgrestVersion', async () => { error: true } & "the function public.blurb_varchar with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + created_ago: { + Args: { "": Database["public"]["Tables"]["users_audit"]["Row"] } + Returns: { + error: true + } & "the function public.created_ago with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } + days_since_event: { + Args: { "": Database["public"]["Tables"]["events"]["Row"] } + Returns: { + error: true + } & "the function public.days_since_event with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" + } details_is_long: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } Returns: { @@ -4806,6 +5114,69 @@ test('typegen: go', async () => { OtherId *int64 \`json:"other_id"\` } + type PublicEventsSelect struct { + CreatedAt string \`json:"created_at"\` + Data interface{} \`json:"data"\` + EventType *string \`json:"event_type"\` + Id int64 \`json:"id"\` + } + + type PublicEventsInsert struct { + CreatedAt *string \`json:"created_at"\` + Data interface{} \`json:"data"\` + EventType *string \`json:"event_type"\` + Id *int64 \`json:"id"\` + } + + type PublicEventsUpdate struct { + CreatedAt *string \`json:"created_at"\` + Data interface{} \`json:"data"\` + EventType *string \`json:"event_type"\` + Id *int64 \`json:"id"\` + } + + type PublicEvents2024Select struct { + CreatedAt string \`json:"created_at"\` + Data interface{} \`json:"data"\` + EventType *string \`json:"event_type"\` + Id int64 \`json:"id"\` + } + + type PublicEvents2024Insert struct { + CreatedAt *string \`json:"created_at"\` + Data interface{} \`json:"data"\` + EventType *string \`json:"event_type"\` + Id int64 \`json:"id"\` + } + + type PublicEvents2024Update struct { + CreatedAt *string \`json:"created_at"\` + Data interface{} \`json:"data"\` + EventType *string \`json:"event_type"\` + Id *int64 \`json:"id"\` + } + + type PublicEvents2025Select struct { + CreatedAt string \`json:"created_at"\` + Data interface{} \`json:"data"\` + EventType *string \`json:"event_type"\` + Id int64 \`json:"id"\` + } + + type PublicEvents2025Insert struct { + CreatedAt *string \`json:"created_at"\` + Data interface{} \`json:"data"\` + EventType *string \`json:"event_type"\` + Id int64 \`json:"id"\` + } + + type PublicEvents2025Update struct { + CreatedAt *string \`json:"created_at"\` + Data interface{} \`json:"data"\` + EventType *string \`json:"event_type"\` + Id *int64 \`json:"id"\` + } + type PublicCategorySelect struct { Id int32 \`json:"id"\` Name string \`json:"name"\` @@ -4942,6 +5313,114 @@ test('typegen: swift', async () => { } internal struct EmptyUpdate: Codable, Hashable, Sendable { } + internal struct EventsSelect: Codable, Hashable, Sendable, Identifiable { + internal let createdAt: String + internal let data: AnyJSON? + internal let eventType: String? + internal let id: Int64 + internal enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + internal struct EventsInsert: Codable, Hashable, Sendable, Identifiable { + internal let createdAt: String? + internal let data: AnyJSON? + internal let eventType: String? + internal let id: Int64? + internal enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + internal struct EventsUpdate: Codable, Hashable, Sendable, Identifiable { + internal let createdAt: String? + internal let data: AnyJSON? + internal let eventType: String? + internal let id: Int64? + internal enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + internal struct Events2024Select: Codable, Hashable, Sendable { + internal let createdAt: String + internal let data: AnyJSON? + internal let eventType: String? + internal let id: Int64 + internal enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + internal struct Events2024Insert: Codable, Hashable, Sendable { + internal let createdAt: String? + internal let data: AnyJSON? + internal let eventType: String? + internal let id: Int64 + internal enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + internal struct Events2024Update: Codable, Hashable, Sendable { + internal let createdAt: String? + internal let data: AnyJSON? + internal let eventType: String? + internal let id: Int64? + internal enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + internal struct Events2025Select: Codable, Hashable, Sendable { + internal let createdAt: String + internal let data: AnyJSON? + internal let eventType: String? + internal let id: Int64 + internal enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + internal struct Events2025Insert: Codable, Hashable, Sendable { + internal let createdAt: String? + internal let data: AnyJSON? + internal let eventType: String? + internal let id: Int64 + internal enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + internal struct Events2025Update: Codable, Hashable, Sendable { + internal let createdAt: String? + internal let data: AnyJSON? + internal let eventType: String? + internal let id: Int64? + internal enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } internal struct ForeignTableSelect: Codable, Hashable, Sendable { internal let id: Int64 internal let name: String? @@ -5327,6 +5806,114 @@ test('typegen: swift w/ public access control', async () => { } public struct EmptyUpdate: Codable, Hashable, Sendable { } + public struct EventsSelect: Codable, Hashable, Sendable, Identifiable { + public let createdAt: String + public let data: AnyJSON? + public let eventType: String? + public let id: Int64 + public enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + public struct EventsInsert: Codable, Hashable, Sendable, Identifiable { + public let createdAt: String? + public let data: AnyJSON? + public let eventType: String? + public let id: Int64? + public enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + public struct EventsUpdate: Codable, Hashable, Sendable, Identifiable { + public let createdAt: String? + public let data: AnyJSON? + public let eventType: String? + public let id: Int64? + public enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + public struct Events2024Select: Codable, Hashable, Sendable { + public let createdAt: String + public let data: AnyJSON? + public let eventType: String? + public let id: Int64 + public enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + public struct Events2024Insert: Codable, Hashable, Sendable { + public let createdAt: String? + public let data: AnyJSON? + public let eventType: String? + public let id: Int64 + public enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + public struct Events2024Update: Codable, Hashable, Sendable { + public let createdAt: String? + public let data: AnyJSON? + public let eventType: String? + public let id: Int64? + public enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + public struct Events2025Select: Codable, Hashable, Sendable { + public let createdAt: String + public let data: AnyJSON? + public let eventType: String? + public let id: Int64 + public enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + public struct Events2025Insert: Codable, Hashable, Sendable { + public let createdAt: String? + public let data: AnyJSON? + public let eventType: String? + public let id: Int64 + public enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } + public struct Events2025Update: Codable, Hashable, Sendable { + public let createdAt: String? + public let data: AnyJSON? + public let eventType: String? + public let id: Int64? + public enum CodingKeys: String, CodingKey { + case createdAt = "created_at" + case data = "data" + case eventType = "event_type" + case id = "id" + } + } public struct ForeignTableSelect: Codable, Hashable, Sendable { public let id: Int64 public let name: String? From 747025e25c91fbccd4cb1f8844bda01b61d0f9b9 Mon Sep 17 00:00:00 2001 From: Etienne Stalmans Date: Tue, 28 Oct 2025 12:13:12 +0100 Subject: [PATCH 44/72] chore: switch to trusted publishing NPM package publishing should happen through trusted publisher rather than npm token --- .github/workflows/release.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5afcd6ee..39280fba 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,6 +18,7 @@ jobs: new-release-version: ${{ steps.semantic-release.outputs.new_release_version }} permissions: contents: write + id-token: write steps: - uses: actions/checkout@v5 @@ -30,12 +31,11 @@ jobs: npm run build - id: semantic-release - uses: cycjimmy/semantic-release-action@v4 + uses: cycjimmy/semantic-release-action@v5 with: - semantic_version: 21 + semantic_version: 25.0.1 # version with latest npm and support for trusted publishing env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - NPM_TOKEN: ${{ secrets.NPM_TOKEN }} docker-hub: name: Release on Docker Hub From 16a2a49d885099586e78bc48bb363d0c9caedc4f Mon Sep 17 00:00:00 2001 From: Bobbie Soedirgo Date: Mon, 10 Nov 2025 13:13:35 -0500 Subject: [PATCH 45/72] chore: update CODEOWNERS Repo has been actively developed by @avallete & @soedirgo, and we're pulling in the SDK team to reduce bus factor & add typegen for Python. API & FE teams have not been actively working on this repo. --- .github/CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f27f863b..8ce78c0b 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @supabase/api @supabase/frontend +* @avallete @soedirgo @supabase/sdk From 2b11c29964e41765353dbeae27b4727a45871be5 Mon Sep 17 00:00:00 2001 From: Bobbie Soedirgo Date: Mon, 10 Nov 2025 12:08:53 -0500 Subject: [PATCH 46/72] chore: bump pgsql-parser The latest version doesn't require node-gyp (for C compilation), so it should be less brittle on less common systems (e.g. NixOS) --- package-lock.json | 1170 ++---------------------------------- package.json | 2 +- src/lib/Parser.ts | 4 +- src/server/routes/query.ts | 2 +- test/server/query.ts | 113 ++-- 5 files changed, 125 insertions(+), 1166 deletions(-) diff --git a/package-lock.json b/package-lock.json index b19d3a9c..bb25a75e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -23,7 +23,7 @@ "pg-connection-string": "^2.7.0", "pg-format": "^1.0.4", "pg-protocol": "npm:@supabase/pg-protocol@0.0.2", - "pgsql-parser": "^13.16.0", + "pgsql-parser": "^17.8.2", "pino": "^9.5.0", "postgres-array": "^3.0.1", "prettier": "^3.3.3", @@ -639,12 +639,6 @@ "@sinclair/typebox": ">=0.26 <=0.32" } }, - "node_modules/@gar/promisify": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@gar/promisify/-/promisify-1.1.3.tgz", - "integrity": "sha512-k2Ty1JcVojjJFwrg/ThKi2ujJ7XNLYaFGNB/bWT9wGR+oSMJHMa5w+CUq6p/pVrKeNNgA7pCqEcjSnHVoqJQFw==", - "license": "MIT" - }, "node_modules/@isaacs/cliui": { "version": "8.0.2", "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", @@ -780,66 +774,6 @@ "@jridgewell/sourcemap-codec": "^1.4.14" } }, - "node_modules/@mapbox/node-pre-gyp": { - "version": "1.0.11", - "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz", - "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==", - "license": "BSD-3-Clause", - "dependencies": { - "detect-libc": "^2.0.0", - "https-proxy-agent": "^5.0.0", - "make-dir": "^3.1.0", - "node-fetch": "^2.6.7", - "nopt": "^5.0.0", - "npmlog": "^5.0.1", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.11" - }, - "bin": { - "node-pre-gyp": "bin/node-pre-gyp" - } - }, - "node_modules/@mapbox/node-pre-gyp/node_modules/make-dir": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", - "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", - "license": "MIT", - "dependencies": { - "semver": "^6.0.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@mapbox/node-pre-gyp/node_modules/make-dir/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@mapbox/node-pre-gyp/node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "deprecated": "Rimraf versions prior to v4 are no longer supported", - "license": "ISC", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", @@ -878,51 +812,12 @@ "node": ">= 8" } }, - "node_modules/@npmcli/fs": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-1.1.1.tgz", - "integrity": "sha512-8KG5RD0GVP4ydEzRn/I4BNDuxDtqVbOdm8675T49OIG/NGhaK0pjPX7ZcDlvKYbA+ulvVK3ztfcF4uBdOxuJbQ==", - "license": "ISC", - "dependencies": { - "@gar/promisify": "^1.0.1", - "semver": "^7.3.5" - } - }, - "node_modules/@npmcli/move-file": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@npmcli/move-file/-/move-file-1.1.2.tgz", - "integrity": "sha512-1SUf/Cg2GzGDyaf15aR9St9TWlb+XvbZXWpDx8YKs7MLzMH/BCeopv+y9vzrzgkfykCGuWOlSu3mZhj2+FQcrg==", - "deprecated": "This functionality has been moved to @npmcli/fs", - "license": "MIT", - "dependencies": { - "mkdirp": "^1.0.4", - "rimraf": "^3.0.2" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@npmcli/move-file/node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "deprecated": "Rimraf versions prior to v4 are no longer supported", - "license": "ISC", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/@opentelemetry/api": { "version": "1.9.0", "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.0.tgz", "integrity": "sha512-3giAOQvZiH5F9bMlMiv8+GSPMeqg0dbaeo58/0SlA9sxSqZhnUtxzX9/2FzyhS9sWQf5S0GJE0AKBrFqjpeYcg==", "license": "Apache-2.0", + "peer": true, "engines": { "node": ">=8.0.0" } @@ -944,6 +839,7 @@ "resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-1.30.1.tgz", "integrity": "sha512-s5vvxXPVdjqS3kTLKMeBMvop9hbWkwzBpu+mUO2M7sZtlkyDJGwFe33wRKnbaYDo8ExRVBIIdwIGrqpxHuKttA==", "license": "Apache-2.0", + "peer": true, "engines": { "node": ">=14" }, @@ -980,6 +876,7 @@ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.57.2.tgz", "integrity": "sha512-BdBGhQBh8IjZ2oIIX6F2/Q3LKm/FDDKi6ccYKcBTeilh6SNdNKveDOLk73BkSJjQLJk6qe4Yh+hHw1UPhCDdrg==", "license": "Apache-2.0", + "peer": true, "dependencies": { "@opentelemetry/api-logs": "0.57.2", "@types/shimmer": "^1.2.0", @@ -1519,6 +1416,7 @@ "resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.32.0.tgz", "integrity": "sha512-s0OpmpQFSfMrmedAn9Lhg4KWJELHCU6uU9dtIJ28N8UGhf9Y55im5X8fEzwhwDwiSqN+ZPSNrDJF7ivf/AuRPQ==", "license": "Apache-2.0", + "peer": true, "engines": { "node": ">=14" } @@ -1539,9 +1437,9 @@ } }, "node_modules/@pgsql/types": { - "version": "13.9.0", - "resolved": "https://registry.npmjs.org/@pgsql/types/-/types-13.9.0.tgz", - "integrity": "sha512-R26mn0zMkwfR8imEQ1Q4NedHwG9gTUfgVnLJUBqPn33JyhOUi2H6iEVTcC9kHAm7gQGpwSBKfuCItWgenAlm9g==", + "version": "17.6.1", + "resolved": "https://registry.npmjs.org/@pgsql/types/-/types-17.6.1.tgz", + "integrity": "sha512-Hk51+nyOxS7Dy5oySWywyNZxo5HndX1VDXT4ZEBD+p+vvMFM2Vc+sKSuByCiI8banou4edbgdnOC251IOuq7QQ==", "license": "SEE LICENSE IN LICENSE" }, "node_modules/@pkgjs/parseargs": { @@ -1956,16 +1854,8 @@ "version": "0.31.28", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.31.28.tgz", "integrity": "sha512-/s55Jujywdw/Jpan+vsy6JZs1z2ZTGxTmbZTPiuSL2wz9mfzA2gN1zzaqmvfi4pq+uOt7Du85fkiwv5ymW84aQ==", - "license": "MIT" - }, - "node_modules/@tootallnate/once": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/@tootallnate/once/-/once-1.1.2.tgz", - "integrity": "sha512-RbzJvlNzmRq5c3O09UipeuXno4tA1FE6ikOjxZK0tuxVv3412l64l5t1W5pj4+rJq9vpkm/kwiR07aZXnsKPxw==", "license": "MIT", - "engines": { - "node": ">= 6" - } + "peer": true }, "node_modules/@tsconfig/node10": { "version": "1.0.11", @@ -2032,6 +1922,7 @@ "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.28.tgz", "integrity": "sha512-DHlH/fNL6Mho38jTy7/JT7sn2wnXI+wULR6PV4gy4VHLVvnrV/d3pHAMQHhc4gjdLmK2ZiPoMxzp6B3yRajLSQ==", "license": "MIT", + "peer": true, "dependencies": { "undici-types": "~6.19.2" } @@ -2224,12 +2115,6 @@ "url": "https://opencollective.com/vitest" } }, - "node_modules/abbrev": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", - "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==", - "license": "ISC" - }, "node_modules/abstract-logging": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/abstract-logging/-/abstract-logging-2.0.1.tgz", @@ -2241,6 +2126,7 @@ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.1.tgz", "integrity": "sha512-OvQ/2pUDKmgfCg++xsTX1wGxfTaszcHVcTctW4UJB4hibJx2HXxxO5UmVgyjMa+ZDsiaf5wWLXYpRWMmBI0QHg==", "license": "MIT", + "peer": true, "bin": { "acorn": "bin/acorn" }, @@ -2270,43 +2156,6 @@ "node": ">=0.4.0" } }, - "node_modules/agent-base": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", - "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", - "license": "MIT", - "dependencies": { - "debug": "4" - }, - "engines": { - "node": ">= 6.0.0" - } - }, - "node_modules/agentkeepalive": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/agentkeepalive/-/agentkeepalive-4.6.0.tgz", - "integrity": "sha512-kja8j7PjmncONqaTsB8fQ+wE2mSU2DJ9D4XKoJ5PFWIdRMa6SLSN1ff4mOr4jCbfRSsxR4keIiySJU0N9T5hIQ==", - "license": "MIT", - "dependencies": { - "humanize-ms": "^1.2.1" - }, - "engines": { - "node": ">= 8.0.0" - } - }, - "node_modules/aggregate-error": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz", - "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==", - "license": "MIT", - "dependencies": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/ajv": { "version": "8.17.1", "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", @@ -2360,6 +2209,7 @@ "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, "license": "MIT", "engines": { "node": ">=8" @@ -2392,26 +2242,6 @@ "node": ">= 8" } }, - "node_modules/aproba": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", - "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==", - "license": "ISC" - }, - "node_modules/are-we-there-yet": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", - "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", - "deprecated": "This package is no longer supported.", - "license": "ISC", - "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/arg": { "version": "4.1.3", "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz", @@ -2536,6 +2366,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true, "license": "MIT" }, "node_modules/big-integer": { @@ -2570,6 +2401,7 @@ "version": "1.1.12", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "dev": true, "license": "MIT", "dependencies": { "balanced-match": "^1.0.0", @@ -2599,66 +2431,6 @@ "node": ">=8" } }, - "node_modules/cacache": { - "version": "15.3.0", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-15.3.0.tgz", - "integrity": "sha512-VVdYzXEn+cnbXpFgWs5hTT7OScegHVmLhJIR8Ufqk3iFD6A6j5iSX1KuBTfNEv4tdJWE2PzA6IVFtcLC7fN9wQ==", - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^1.0.0", - "@npmcli/move-file": "^1.0.1", - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "glob": "^7.1.4", - "infer-owner": "^1.0.4", - "lru-cache": "^6.0.0", - "minipass": "^3.1.1", - "minipass-collect": "^1.0.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.2", - "mkdirp": "^1.0.3", - "p-map": "^4.0.0", - "promise-inflight": "^1.0.1", - "rimraf": "^3.0.2", - "ssri": "^8.0.1", - "tar": "^6.0.2", - "unique-filename": "^1.1.1" - }, - "engines": { - "node": ">= 10" - } - }, - "node_modules/cacache/node_modules/p-map": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz", - "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==", - "license": "MIT", - "dependencies": { - "aggregate-error": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cacache/node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "deprecated": "Rimraf versions prior to v4 are no longer supported", - "license": "ISC", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/call-bind": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", @@ -2799,30 +2571,12 @@ "fsevents": "~2.3.2" } }, - "node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "license": "ISC", - "engines": { - "node": ">=10" - } - }, "node_modules/cjs-module-lexer": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.3.tgz", "integrity": "sha512-9z8TZaGM1pfswYeXrUpzPrkx8UnWYdhJclsiYMm6x/w5+nN+8Tf/LnAgfLGQCm59qAOxU8WwHEq2vNwF6i4j+Q==", "license": "MIT" }, - "node_modules/clean-stack": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz", - "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, "node_modules/close-with-grace": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/close-with-grace/-/close-with-grace-2.2.0.tgz", @@ -2846,15 +2600,6 @@ "dev": true, "license": "MIT" }, - "node_modules/color-support": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", - "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", - "license": "ISC", - "bin": { - "color-support": "bin.js" - } - }, "node_modules/colorette": { "version": "2.0.20", "resolved": "https://registry.npmjs.org/colorette/-/colorette-2.0.20.tgz", @@ -2872,14 +2617,9 @@ "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true, "license": "MIT" }, - "node_modules/console-control-strings": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", - "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==", - "license": "ISC" - }, "node_modules/cookie": { "version": "0.7.2", "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", @@ -3117,12 +2857,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/delegates": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", - "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==", - "license": "MIT" - }, "node_modules/detect-libc": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", @@ -3161,12 +2895,6 @@ "integrity": "sha512-c68LpLbO+7kP/b1Hr1qs8/BJ09F5khZGTxqxZuhzxpmwJKOgRFHJWIb9/KmqnqHhLdO55aOxFH/EGBvUQbL/RQ==", "license": "MIT" }, - "node_modules/dotty": { - "version": "0.1.2", - "resolved": "https://registry.npmjs.org/dotty/-/dotty-0.1.2.tgz", - "integrity": "sha512-V0EWmKeH3DEhMwAZ+8ZB2Ao4OK6p++Z0hsDtZq3N0+0ZMVqkzrcEGROvOnZpLnvBg5PTNG23JEDLAm64gPaotQ==", - "license": "BSD-3-Clause" - }, "node_modules/dunder-proto": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", @@ -3193,18 +2921,9 @@ "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true, "license": "MIT" }, - "node_modules/encoding": { - "version": "0.1.13", - "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz", - "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==", - "license": "MIT", - "optional": true, - "dependencies": { - "iconv-lite": "^0.6.2" - } - }, "node_modules/end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", @@ -3215,21 +2934,6 @@ "once": "^1.4.0" } }, - "node_modules/env-paths": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz", - "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==", - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/err-code": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", - "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", - "license": "MIT" - }, "node_modules/error-ex": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", @@ -3571,6 +3275,7 @@ } ], "license": "MIT", + "peer": true, "dependencies": { "@fastify/ajv-compiler": "^3.5.0", "@fastify/error": "^3.4.0", @@ -3770,24 +3475,6 @@ "integrity": "sha512-alTFZZQDKMporBH77856pXgzhEzaUVmLCDk+egLgIgHst3Tpndzz8MnKe+GzRJRfvVdn69HhpW7cmXzvtLvJAw==", "license": "MIT" }, - "node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", - "license": "ISC" - }, "node_modules/fsevents": { "version": "2.3.3", "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", @@ -3843,27 +3530,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/gauge": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", - "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", - "deprecated": "This package is no longer supported.", - "license": "ISC", - "dependencies": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.2", - "console-control-strings": "^1.0.0", - "has-unicode": "^2.0.1", - "object-assign": "^4.1.1", - "signal-exit": "^3.0.0", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.2" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/get-intrinsic": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", @@ -3933,27 +3599,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", @@ -4021,6 +3666,7 @@ "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true, "license": "ISC" }, "node_modules/has-bigints": { @@ -4104,12 +3750,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/has-unicode": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", - "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==", - "license": "ISC" - }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", @@ -4143,61 +3783,6 @@ "dev": true, "license": "MIT" }, - "node_modules/http-cache-semantics": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", - "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", - "license": "BSD-2-Clause" - }, - "node_modules/http-proxy-agent": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-4.0.1.tgz", - "integrity": "sha512-k0zdNgqWTGA6aeIRVpvfVob4fL52dTfaehylg0Y4UvSySvOq/Y+BOyPrgpUrA7HylqvU8vIZGsRuXmspskV0Tg==", - "license": "MIT", - "dependencies": { - "@tootallnate/once": "1", - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/https-proxy-agent": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", - "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", - "license": "MIT", - "dependencies": { - "agent-base": "6", - "debug": "4" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/humanize-ms": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/humanize-ms/-/humanize-ms-1.2.1.tgz", - "integrity": "sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==", - "license": "MIT", - "dependencies": { - "ms": "^2.0.0" - } - }, - "node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "license": "MIT", - "optional": true, - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/ignore": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", @@ -4227,79 +3812,25 @@ "module-details-from-path": "^1.0.3" } }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", - "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "node_modules/internal-slot": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", + "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", + "dev": true, "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "hasown": "^2.0.2", + "side-channel": "^1.1.0" + }, "engines": { - "node": ">=0.8.19" + "node": ">= 0.4" } }, - "node_modules/indent-string": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz", - "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==", - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/infer-owner": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/infer-owner/-/infer-owner-1.0.4.tgz", - "integrity": "sha512-IClj+Xz94+d7irH5qRyfJonOdfTzuDaifE6ZPWfx0N0+/ATZCbuTPq2prFl526urkQd90WyUKIh1DfBQ2hMz9A==", - "license": "ISC" - }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", - "license": "ISC", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "license": "ISC" - }, - "node_modules/internal-slot": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz", - "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "hasown": "^2.0.2", - "side-channel": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/ip-address": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz", - "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==", - "license": "MIT", - "dependencies": { - "jsbn": "1.1.0", - "sprintf-js": "^1.1.3" - }, - "engines": { - "node": ">= 12" - } - }, - "node_modules/ipaddr.js": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", - "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "node_modules/ipaddr.js": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", + "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", "license": "MIT", "engines": { "node": ">= 0.10" @@ -4489,6 +4020,7 @@ "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, "license": "MIT", "engines": { "node": ">=8" @@ -4526,12 +4058,6 @@ "node": ">=0.10.0" } }, - "node_modules/is-lambda": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz", - "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==", - "license": "MIT" - }, "node_modules/is-map": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz", @@ -4728,6 +4254,7 @@ "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true, "license": "ISC" }, "node_modules/istanbul-lib-coverage": { @@ -4810,12 +4337,6 @@ "node": ">=10" } }, - "node_modules/jsbn": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz", - "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==", - "license": "MIT" - }, "node_modules/json-parse-better-errors": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz", @@ -4878,15 +4399,12 @@ } }, "node_modules/libpg-query": { - "version": "13.3.2", - "resolved": "https://registry.npmjs.org/libpg-query/-/libpg-query-13.3.2.tgz", - "integrity": "sha512-6ft2qyk+LO1hdmPU389RvN7inRGLU0T8Ge4RG+q4usE+dAA4nl+WVp4HVpBC+1Ku4lgxM38PkoW7OzAw8VDebA==", - "hasInstallScript": true, + "version": "17.6.0", + "resolved": "https://registry.npmjs.org/libpg-query/-/libpg-query-17.6.0.tgz", + "integrity": "sha512-r4zOTcLTGYS5PlLQAicJ6Yi/tvZFag42YUuNEO8pi8bwt/ZZ4kj514J4QV5bOx0mZzPLF6agbfNXQVxGgmHR8g==", "license": "LICENSE IN LICENSE", "dependencies": { - "@mapbox/node-pre-gyp": "^1.0.8", - "node-addon-api": "^1.6.3", - "node-gyp": "^8.0.0" + "@pgsql/types": "^17.6.0" } }, "node_modules/light-my-request": { @@ -4923,18 +4441,6 @@ "dev": true, "license": "MIT" }, - "node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/magic-string": { "version": "0.30.17", "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.17.tgz", @@ -4980,33 +4486,6 @@ "dev": true, "license": "ISC" }, - "node_modules/make-fetch-happen": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-9.1.0.tgz", - "integrity": "sha512-+zopwDy7DNknmwPQplem5lAZX/eCOzSvSNNcSKm5eVwTkOBzoktEfXsa9L23J/GIRhxRsaxzkPEhrJEpE2F4Gg==", - "license": "ISC", - "dependencies": { - "agentkeepalive": "^4.1.3", - "cacache": "^15.2.0", - "http-cache-semantics": "^4.1.0", - "http-proxy-agent": "^4.0.1", - "https-proxy-agent": "^5.0.0", - "is-lambda": "^1.0.1", - "lru-cache": "^6.0.0", - "minipass": "^3.1.3", - "minipass-collect": "^1.0.2", - "minipass-fetch": "^1.3.2", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.2", - "promise-retry": "^2.0.1", - "socks-proxy-agent": "^6.0.0", - "ssri": "^8.0.0" - }, - "engines": { - "node": ">= 10" - } - }, "node_modules/math-intrinsics": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", @@ -5067,6 +4546,7 @@ "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, "license": "ISC", "dependencies": { "brace-expansion": "^1.1.7" @@ -5079,113 +4559,12 @@ "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-collect": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-1.0.2.tgz", - "integrity": "sha512-6T6lH0H8OG9kITm/Jm6tdooIbogG9e0tLgpY6mphXSm/A9u8Nq1ryBG+Qspiub9LjWlBPsPS3tWQ/Botq4FdxA==", - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minipass-fetch": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-1.4.1.tgz", - "integrity": "sha512-CGH1eblLq26Y15+Azk7ey4xh0J/XfJfrCox5LDJiKqI2Q2iwOLOKrlmIaODiSQS8d18jalF6y2K2ePUm0CmShw==", - "license": "MIT", - "dependencies": { - "minipass": "^3.1.0", - "minipass-sized": "^1.0.3", - "minizlib": "^2.0.0" - }, - "engines": { - "node": ">=8" - }, - "optionalDependencies": { - "encoding": "^0.1.12" - } - }, - "node_modules/minipass-flush": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz", - "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==", - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minipass-pipeline": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz", - "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==", - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-sized": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", - "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "license": "MIT", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "license": "MIT", - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/mnemonist": { "version": "0.39.6", "resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.39.6.tgz", @@ -5254,15 +4633,6 @@ "url": "https://nearley.js.org/#give-to-nearley" } }, - "node_modules/negotiator": { - "version": "0.6.4", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz", - "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==", - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, "node_modules/nested-error-stacks": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/nested-error-stacks/-/nested-error-stacks-2.1.1.tgz", @@ -5289,137 +4659,6 @@ "node": ">=10" } }, - "node_modules/node-addon-api": { - "version": "1.7.2", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-1.7.2.tgz", - "integrity": "sha512-ibPK3iA+vaY1eEjESkQkM0BbCqFOaZMiXRTtdB0u7b4djtY6JnsjvPdUHVMg6xQt3B8fpTTWHI9A+ADjM9frzg==", - "license": "MIT" - }, - "node_modules/node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "license": "MIT", - "dependencies": { - "whatwg-url": "^5.0.0" - }, - "engines": { - "node": "4.x || >=6.0.0" - }, - "peerDependencies": { - "encoding": "^0.1.0" - }, - "peerDependenciesMeta": { - "encoding": { - "optional": true - } - } - }, - "node_modules/node-gyp": { - "version": "8.4.1", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-8.4.1.tgz", - "integrity": "sha512-olTJRgUtAb/hOXG0E93wZDs5YiJlgbXxTwQAFHyNlRsXQnYzUaF2aGgujZbw+hR8aF4ZG/rST57bWMWD16jr9w==", - "license": "MIT", - "dependencies": { - "env-paths": "^2.2.0", - "glob": "^7.1.4", - "graceful-fs": "^4.2.6", - "make-fetch-happen": "^9.1.0", - "nopt": "^5.0.0", - "npmlog": "^6.0.0", - "rimraf": "^3.0.2", - "semver": "^7.3.5", - "tar": "^6.1.2", - "which": "^2.0.2" - }, - "bin": { - "node-gyp": "bin/node-gyp.js" - }, - "engines": { - "node": ">= 10.12.0" - } - }, - "node_modules/node-gyp/node_modules/are-we-there-yet": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-3.0.1.tgz", - "integrity": "sha512-QZW4EDmGwlYur0Yyf/b2uGucHQMa8aFUP7eu9ddR73vvhFyt4V0Vl3QHPcTNJ8l6qYOBdxgXdnBXQrHilfRQBg==", - "deprecated": "This package is no longer supported.", - "license": "ISC", - "dependencies": { - "delegates": "^1.0.0", - "readable-stream": "^3.6.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/node-gyp/node_modules/gauge": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/gauge/-/gauge-4.0.4.tgz", - "integrity": "sha512-f9m+BEN5jkg6a0fZjleidjN51VE1X+mPFQ2DJ0uv1V39oCLCbsGe6yjbBnp7eK7z/+GAon99a3nHuqbuuthyPg==", - "deprecated": "This package is no longer supported.", - "license": "ISC", - "dependencies": { - "aproba": "^1.0.3 || ^2.0.0", - "color-support": "^1.1.3", - "console-control-strings": "^1.1.0", - "has-unicode": "^2.0.1", - "signal-exit": "^3.0.7", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wide-align": "^1.1.5" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/node-gyp/node_modules/npmlog": { - "version": "6.0.2", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-6.0.2.tgz", - "integrity": "sha512-/vBvz5Jfr9dT/aFWd0FIRf+T/Q2WBsLENygUaFUqstqsycmZAP/t5BvFJTK0viFmSUxiUKTUplWy5vt+rvKIxg==", - "deprecated": "This package is no longer supported.", - "license": "ISC", - "dependencies": { - "are-we-there-yet": "^3.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^4.0.3", - "set-blocking": "^2.0.0" - }, - "engines": { - "node": "^12.13.0 || ^14.15.0 || >=16.0.0" - } - }, - "node_modules/node-gyp/node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "deprecated": "Rimraf versions prior to v4 are no longer supported", - "license": "ISC", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/node-gyp/node_modules/which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/node-sql-parser": { "version": "4.18.0", "resolved": "https://registry.npmjs.org/node-sql-parser/-/node-sql-parser-4.18.0.tgz", @@ -5484,21 +4723,6 @@ "node": ">=4" } }, - "node_modules/nopt": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", - "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", - "license": "ISC", - "dependencies": { - "abbrev": "1" - }, - "bin": { - "nopt": "bin/nopt.js" - }, - "engines": { - "node": ">=6" - } - }, "node_modules/normalize-package-data": { "version": "2.5.0", "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz", @@ -5558,28 +4782,6 @@ "node": ">= 4" } }, - "node_modules/npmlog": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", - "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", - "deprecated": "This package is no longer supported.", - "license": "ISC", - "dependencies": { - "are-we-there-yet": "^2.0.0", - "console-control-strings": "^1.1.0", - "gauge": "^3.0.0", - "set-blocking": "^2.0.0" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", - "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/object-inspect": { "version": "1.13.4", "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", @@ -5649,6 +4851,7 @@ "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "dev": true, "license": "ISC", "dependencies": { "wrappy": "1" @@ -5832,15 +5035,6 @@ "node": ">=4" } }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, "node_modules/path-key": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", @@ -6019,7 +5213,6 @@ "resolved": "https://registry.npmjs.org/pg/-/pg-8.14.1.tgz", "integrity": "sha512-0TdbqfjwIun9Fm/r89oB7RFQ0bLgduAhiIqIXOsyKoiC/L54DbuAAzIEN/9Op0f1Po9X7iCPXGoa/Ah+2aI8Xw==", "license": "MIT", - "peer": true, "dependencies": { "pg-connection-string": "^2.7.0", "pg-pool": "^3.8.0", @@ -6071,8 +5264,7 @@ "version": "1.8.0", "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.8.0.tgz", "integrity": "sha512-jvuYlEkL03NRvOoyoRktBK7+qU5kOvlAwvmrH8sr3wbLrOdVWsRxQfz8mMy9sZFsqJ1hEWNfdWKI4SAmoL+j7g==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/pg/node_modules/postgres-array": { "version": "2.0.0", @@ -6123,35 +5315,23 @@ } }, "node_modules/pgsql-deparser": { - "version": "13.15.0", - "resolved": "https://registry.npmjs.org/pgsql-deparser/-/pgsql-deparser-13.15.0.tgz", - "integrity": "sha512-6d4YeDE/y+AZ/C4tlzTrFwbOqDW4ma/jvYlXRgXYVdPU2WF5IQISksIQ8uhNMXW7QxL/4gw0bzLhRNwckf3t/Q==", + "version": "17.11.1", + "resolved": "https://registry.npmjs.org/pgsql-deparser/-/pgsql-deparser-17.11.1.tgz", + "integrity": "sha512-BGKgwC4qs+FPcG8Ai989LO6i4E8KF5HEvlTnI8uhS4qUyu6P1xCyP9pJDky95ZL8DolaGUDFAJtxteDBw33OCg==", "license": "SEE LICENSE IN LICENSE", "dependencies": { - "@pgsql/types": "^13.9.0", - "dotty": "^0.1.0", - "pgsql-enums": "^13.10.0" + "@pgsql/types": "^17.6.1" } }, - "node_modules/pgsql-enums": { - "version": "13.10.0", - "resolved": "https://registry.npmjs.org/pgsql-enums/-/pgsql-enums-13.10.0.tgz", - "integrity": "sha512-L0vO9RwwPENvB07YlIVTnRu3JMnmjHQhxWR2NQbHOUPIpfF6khhfv+OC51By2ATts3jfZRSi8TLjNf9O6rP9iA==", - "license": "SEE LICENSE IN LICENSE" - }, "node_modules/pgsql-parser": { - "version": "13.16.0", - "resolved": "https://registry.npmjs.org/pgsql-parser/-/pgsql-parser-13.16.0.tgz", - "integrity": "sha512-LdHFWjotgN7y2rEAb2K/LeLZrMJvpLy0Qe+1+8ZByf5C2pmKTo98VXiVfGpxC6vkfWgP9VsT4vYQ4ZlQexHcHw==", - "license": "SEE LICENSE IN LICENSE", + "version": "17.8.2", + "resolved": "https://registry.npmjs.org/pgsql-parser/-/pgsql-parser-17.8.2.tgz", + "integrity": "sha512-/uHZL7mq3Bj23v/nDShb8gN8LwUKdejFii6IFBxRYRXxWlRrbsdky1KtevIxiVGasWZfI+E5t1//Wq+D3cVmAg==", + "license": "MIT", "dependencies": { - "libpg-query": "13.3.2", - "minimist": "^1.2.6", - "pgsql-deparser": "^13.15.0", - "pgsql-enums": "^13.10.0" - }, - "bin": { - "pgsql-parser": "main/cli.js" + "@pgsql/types": "17.6.1", + "libpg-query": "17.6.0", + "pgsql-deparser": "17.11.1" } }, "node_modules/picocolors": { @@ -6381,6 +5561,7 @@ "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz", "integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==", "license": "MIT", + "peer": true, "bin": { "prettier": "bin/prettier.cjs" }, @@ -6430,25 +5611,6 @@ "node": ">=10" } }, - "node_modules/promise-inflight": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz", - "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==", - "license": "ISC" - }, - "node_modules/promise-retry": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", - "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", - "license": "MIT", - "dependencies": { - "err-code": "^2.0.2", - "retry": "^0.12.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/proxy-addr": { "version": "2.0.7", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", @@ -6572,20 +5734,6 @@ "node": ">=4" } }, - "node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "license": "MIT", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, "node_modules/readdirp": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", @@ -6704,15 +5852,6 @@ "node": ">=10" } }, - "node_modules/retry": { - "version": "0.12.0", - "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", - "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, "node_modules/reusify": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", @@ -6900,26 +6039,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, "node_modules/safe-push-apply": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz", @@ -6973,13 +6092,6 @@ "node": ">=10" } }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", - "license": "MIT", - "optional": true - }, "node_modules/secure-json-parse": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/secure-json-parse/-/secure-json-parse-2.7.0.tgz", @@ -6998,12 +6110,6 @@ "node": ">=10" } }, - "node_modules/set-blocking": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", - "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==", - "license": "ISC" - }, "node_modules/set-cookie-parser": { "version": "2.7.1", "resolved": "https://registry.npmjs.org/set-cookie-parser/-/set-cookie-parser-2.7.1.tgz", @@ -7184,12 +6290,6 @@ "dev": true, "license": "ISC" }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "license": "ISC" - }, "node_modules/simple-update-notifier": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/simple-update-notifier/-/simple-update-notifier-2.0.0.tgz", @@ -7216,44 +6316,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/smart-buffer": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz", - "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==", - "license": "MIT", - "engines": { - "node": ">= 6.0.0", - "npm": ">= 3.0.0" - } - }, - "node_modules/socks": { - "version": "2.8.4", - "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.4.tgz", - "integrity": "sha512-D3YaD0aRxR3mEcqnidIs7ReYJFVzWdd6fXJYUM8ixcQcJRGTka/b3saV0KflYhyVJXKhb947GndU35SxYNResQ==", - "license": "MIT", - "dependencies": { - "ip-address": "^9.0.5", - "smart-buffer": "^4.2.0" - }, - "engines": { - "node": ">= 10.0.0", - "npm": ">= 3.0.0" - } - }, - "node_modules/socks-proxy-agent": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-6.2.1.tgz", - "integrity": "sha512-a6KW9G+6B3nWZ1yB8G7pJwL3ggLy1uTzKAgCb7ttblwqdz9fMGJUuTy3uFzEP48FAs9FLILlmzDlE2JJhVQaXQ==", - "license": "MIT", - "dependencies": { - "agent-base": "^6.0.2", - "debug": "^4.3.3", - "socks": "^2.6.2" - }, - "engines": { - "node": ">= 10" - } - }, "node_modules/sonic-boom": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/sonic-boom/-/sonic-boom-4.2.0.tgz", @@ -7318,12 +6380,6 @@ "node": ">= 10.x" } }, - "node_modules/sprintf-js": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", - "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", - "license": "BSD-3-Clause" - }, "node_modules/sql-formatter": { "version": "14.0.0", "resolved": "https://registry.npmjs.org/sql-formatter/-/sql-formatter-14.0.0.tgz", @@ -7338,18 +6394,6 @@ "sql-formatter": "bin/sql-formatter-cli.cjs" } }, - "node_modules/ssri": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-8.0.1.tgz", - "integrity": "sha512-97qShzy1AiyxvPNIkLWoGua7xoQzzPjQ0HAH4B0rWKo7SZ6USuPcrUiAFrws0UH8RrbWmgq3LMTObhPIHbbBeQ==", - "license": "ISC", - "dependencies": { - "minipass": "^3.1.1" - }, - "engines": { - "node": ">= 8" - } - }, "node_modules/stackback": { "version": "0.0.2", "resolved": "https://registry.npmjs.org/stackback/-/stackback-0.0.2.tgz", @@ -7364,19 +6408,11 @@ "dev": true, "license": "MIT" }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, "node_modules/string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, "license": "MIT", "dependencies": { "emoji-regex": "^8.0.0", @@ -7485,6 +6521,7 @@ "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, "license": "MIT", "dependencies": { "ansi-regex": "^5.0.1" @@ -7555,32 +6592,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "license": "ISC", - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/tar/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "license": "ISC", - "engines": { - "node": ">=8" - } - }, "node_modules/tdigest": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.2.tgz", @@ -7763,6 +6774,7 @@ "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -7832,12 +6844,6 @@ "nodetouch": "bin/nodetouch.js" } }, - "node_modules/tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==", - "license": "MIT" - }, "node_modules/ts-node": { "version": "10.9.2", "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz", @@ -7972,6 +6978,7 @@ "integrity": "sha512-aJn6wq13/afZp/jT9QZmwEjDqqvSGp1VT5GVg+f/t6/oVyrgXM6BY1h9BRh/O5p3PlUPAe+WuiEZOmb/49RqoQ==", "dev": true, "license": "Apache-2.0", + "peer": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -8012,24 +7019,6 @@ "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", "license": "MIT" }, - "node_modules/unique-filename": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-1.1.1.tgz", - "integrity": "sha512-Vmp0jIp2ln35UTXuryvjzkjGdRyf9b2lTXuSYUiPmzRcl3FDtYqAwOnTJkAngD9SWhnoJzDbTKwaOrZ+STtxNQ==", - "license": "ISC", - "dependencies": { - "unique-slug": "^2.0.0" - } - }, - "node_modules/unique-slug": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-2.0.2.tgz", - "integrity": "sha512-zoWr9ObaxALD3DOPfjPSqxt4fnZiWblxHIgeWqW8x7UqDzEtHEQLzji2cuJYQFCU6KmoJikOYAZlrTHHebjx2w==", - "license": "ISC", - "dependencies": { - "imurmurhash": "^0.1.4" - } - }, "node_modules/uri-js": { "version": "4.4.1", "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", @@ -8039,12 +7028,6 @@ "punycode": "^2.1.0" } }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "license": "MIT" - }, "node_modules/v8-compile-cache-lib": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", @@ -8069,6 +7052,7 @@ "integrity": "sha512-+Oxm7q9hDoLMyJOYfUYBuHQo+dkAloi33apOPP56pzj+vsdJDzr+j1NISE5pyaAuKL4A3UD34qd0lx5+kfKp2g==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "esbuild": "^0.25.0", "fdir": "^6.4.4", @@ -8182,6 +7166,7 @@ "integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==", "dev": true, "license": "MIT", + "peer": true, "engines": { "node": ">=12" }, @@ -8195,6 +7180,7 @@ "integrity": "sha512-BbcFDqNyBlfSpATmTtXOAOj71RNKDDvjBM/uPfnxxVGrG+FSH2RQIwgeEngTaTkuU/h0ScFvf+tRcKfYXzBybQ==", "dev": true, "license": "MIT", + "peer": true, "dependencies": { "@vitest/expect": "3.0.9", "@vitest/mocker": "3.0.9", @@ -8259,22 +7245,6 @@ } } }, - "node_modules/webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==", - "license": "BSD-2-Clause" - }, - "node_modules/whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "license": "MIT", - "dependencies": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, "node_modules/which": { "version": "1.3.1", "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", @@ -8394,15 +7364,6 @@ "node": ">=8" } }, - "node_modules/wide-align": { - "version": "1.1.5", - "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", - "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", - "license": "ISC", - "dependencies": { - "string-width": "^1.0.2 || 2 || 3 || 4" - } - }, "node_modules/wrap-ansi": { "version": "8.1.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", @@ -8547,6 +7508,7 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", + "dev": true, "license": "ISC" }, "node_modules/xtend": { @@ -8558,12 +7520,6 @@ "node": ">=0.4" } }, - "node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "license": "ISC" - }, "node_modules/yaml": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.7.0.tgz", diff --git a/package.json b/package.json index e903e455..b00d0ae6 100644 --- a/package.json +++ b/package.json @@ -53,7 +53,7 @@ "pg-connection-string": "^2.7.0", "pg-format": "^1.0.4", "pg-protocol": "npm:@supabase/pg-protocol@0.0.2", - "pgsql-parser": "^13.16.0", + "pgsql-parser": "^17.8.2", "pino": "^9.5.0", "postgres-array": "^3.0.1", "prettier": "^3.3.3", diff --git a/src/lib/Parser.ts b/src/lib/Parser.ts index abfc269d..ae1741ff 100644 --- a/src/lib/Parser.ts +++ b/src/lib/Parser.ts @@ -31,9 +31,9 @@ interface ParseReturnValues { /** * Deparses an AST into SQL string. */ -export function Deparse(parsedSql: object): DeparseReturnValues { +export async function Deparse(parsedSql: object): Promise { try { - const data = deparse(parsedSql, {}) + const data = await deparse(parsedSql, {}) return { data, error: null } } catch (error) { return { data: null, error: error as Error } diff --git a/src/server/routes/query.ts b/src/server/routes/query.ts index 2cc6ad94..c6bea0c6 100644 --- a/src/server/routes/query.ts +++ b/src/server/routes/query.ts @@ -74,7 +74,7 @@ export default async (fastify: FastifyInstance) => { Headers: { pg: string; 'x-pg-application-name'?: string } Body: { ast: object } }>('/deparse', async (request, reply) => { - const { data, error } = Parser.Deparse(request.body.ast) + const { data, error } = await Parser.Deparse(request.body.ast) if (error) { request.log.error({ error, request: extractRequestForLogging(request) }) diff --git a/test/server/query.ts b/test/server/query.ts index 2cd86f52..9d6c0e1b 100644 --- a/test/server/query.ts +++ b/test/server/query.ts @@ -56,9 +56,9 @@ test('parser select statements', async () => { payload: { query: 'SELECT id, name FROM users where user_id = 1234' }, }) expect(res.json()).toMatchInlineSnapshot(` - [ - { - "RawStmt": { + { + "stmts": [ + { "stmt": { "SelectStmt": { "fromClause": [ @@ -82,7 +82,7 @@ test('parser select statements', async () => { "fields": [ { "String": { - "str": "id", + "sval": "id", }, }, ], @@ -99,7 +99,7 @@ test('parser select statements', async () => { "fields": [ { "String": { - "str": "name", + "sval": "name", }, }, ], @@ -117,7 +117,7 @@ test('parser select statements', async () => { "fields": [ { "String": { - "str": "user_id", + "sval": "user_id", }, }, ], @@ -128,28 +128,26 @@ test('parser select statements', async () => { "name": [ { "String": { - "str": "=", + "sval": "=", }, }, ], "rexpr": { "A_Const": { - "location": 43, - "val": { - "Integer": { - "ival": 1234, - }, + "ival": { + "ival": 1234, }, + "location": 43, }, }, }, }, }, }, - "stmt_location": 0, }, - }, - ] + ], + "version": 170004, + } `) }) @@ -163,7 +161,12 @@ test('parser comments', async () => { `, }, }) - expect(res.json()).toMatchInlineSnapshot(`[]`) + expect(res.json()).toMatchInlineSnapshot(` + { + "stmts": [], + "version": 170004, + } + `) }) test('parser create schema', async () => { @@ -177,9 +180,9 @@ create schema if not exists test_schema; }, }) expect(res.json()).toMatchInlineSnapshot(` - [ - { - "RawStmt": { + { + "stmts": [ + { "stmt": { "CreateSchemaStmt": { "if_not_exists": true, @@ -187,10 +190,10 @@ create schema if not exists test_schema; }, }, "stmt_len": 40, - "stmt_location": 0, }, - }, - ] + ], + "version": 170004, + } `) }) @@ -211,9 +214,9 @@ CREATE TABLE table_name ( }, }) expect(res.json()).toMatchInlineSnapshot(` - [ - { - "RawStmt": { + { + "stmts": [ + { "stmt": { "CreateStmt": { "oncommit": "ONCOMMIT_NOOP", @@ -249,12 +252,12 @@ CREATE TABLE table_name ( "names": [ { "String": { - "str": "pg_catalog", + "sval": "pg_catalog", }, }, { "String": { - "str": "int8", + "sval": "int8", }, }, ], @@ -278,10 +281,8 @@ CREATE TABLE table_name ( "arg": { "A_Const": { "location": 141, - "val": { - "String": { - "str": "utc", - }, + "sval": { + "sval": "utc", }, }, }, @@ -291,7 +292,7 @@ CREATE TABLE table_name ( "names": [ { "String": { - "str": "text", + "sval": "text", }, }, ], @@ -301,10 +302,11 @@ CREATE TABLE table_name ( }, { "FuncCall": { + "funcformat": "COERCE_EXPLICIT_CALL", "funcname": [ { "String": { - "str": "now", + "sval": "now", }, }, ], @@ -312,10 +314,11 @@ CREATE TABLE table_name ( }, }, ], + "funcformat": "COERCE_EXPLICIT_CALL", "funcname": [ { "String": { - "str": "timezone", + "sval": "timezone", }, }, ], @@ -338,12 +341,12 @@ CREATE TABLE table_name ( "names": [ { "String": { - "str": "pg_catalog", + "sval": "pg_catalog", }, }, { "String": { - "str": "timestamptz", + "sval": "timestamptz", }, }, ], @@ -367,10 +370,8 @@ CREATE TABLE table_name ( "arg": { "A_Const": { "location": 226, - "val": { - "String": { - "str": "utc", - }, + "sval": { + "sval": "utc", }, }, }, @@ -380,7 +381,7 @@ CREATE TABLE table_name ( "names": [ { "String": { - "str": "text", + "sval": "text", }, }, ], @@ -390,10 +391,11 @@ CREATE TABLE table_name ( }, { "FuncCall": { + "funcformat": "COERCE_EXPLICIT_CALL", "funcname": [ { "String": { - "str": "now", + "sval": "now", }, }, ], @@ -401,10 +403,11 @@ CREATE TABLE table_name ( }, }, ], + "funcformat": "COERCE_EXPLICIT_CALL", "funcname": [ { "String": { - "str": "timezone", + "sval": "timezone", }, }, ], @@ -427,12 +430,12 @@ CREATE TABLE table_name ( "names": [ { "String": { - "str": "pg_catalog", + "sval": "pg_catalog", }, }, { "String": { - "str": "timestamptz", + "sval": "timestamptz", }, }, ], @@ -450,7 +453,7 @@ CREATE TABLE table_name ( "names": [ { "String": { - "str": "jsonb", + "sval": "jsonb", }, }, ], @@ -468,7 +471,7 @@ CREATE TABLE table_name ( "names": [ { "String": { - "str": "text", + "sval": "text", }, }, ], @@ -480,10 +483,10 @@ CREATE TABLE table_name ( }, }, "stmt_len": 283, - "stmt_location": 0, }, - }, - ] + ], + "version": 170004, + } `) const deparse = await app.inject({ @@ -493,11 +496,11 @@ CREATE TABLE table_name ( }) expect(deparse.body).toMatchInlineSnapshot(` "CREATE TABLE table_name ( - id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - inserted_at pg_catalog.timestamptz DEFAULT ( timezone('utc'::text, now()) ) NOT NULL, - updated_at pg_catalog.timestamptz DEFAULT ( timezone('utc'::text, now()) ) NOT NULL, - data jsonb, - name text + id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + inserted_at timestamp with time zone DEFAULT timezone('utc'::text, now()) NOT NULL, + updated_at timestamp with time zone DEFAULT timezone('utc'::text, now()) NOT NULL, + data jsonb, + name text );" `) }) From 5dfd66ebcdf90a3a25372085113737a1043a0b51 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Wed, 12 Nov 2025 15:58:50 -0300 Subject: [PATCH 47/72] feat: add python generator --- src/server/routes/generators/python.ts | 35 +++ src/server/routes/index.ts | 2 + src/server/templates/python.ts | 364 +++++++++++++++++++++++++ 3 files changed, 401 insertions(+) create mode 100644 src/server/routes/generators/python.ts create mode 100644 src/server/templates/python.ts diff --git a/src/server/routes/generators/python.ts b/src/server/routes/generators/python.ts new file mode 100644 index 00000000..51385373 --- /dev/null +++ b/src/server/routes/generators/python.ts @@ -0,0 +1,35 @@ +import type { FastifyInstance } from 'fastify' +import { PostgresMeta } from '../../../lib/index.js' +import { DEFAULT_POOL_CONFIG } from '../../constants.js' +import { extractRequestForLogging } from '../../utils.js' +import { apply as applyPyTemplate } from '../../templates/python.js' +import { getGeneratorMetadata } from '../../../lib/generators.js' + +export default async (fastify: FastifyInstance) => { + fastify.get<{ + Headers: { pg: string } + Querystring: { + excluded_schemas?: string + included_schemas?: string + } + }>('/', async (request, reply) => { + const connectionString = request.headers.pg + const excludedSchemas = + request.query.excluded_schemas?.split(',').map((schema) => schema.trim()) ?? [] + const includedSchemas = + request.query.included_schemas?.split(',').map((schema) => schema.trim()) ?? [] + + const pgMeta: PostgresMeta = new PostgresMeta({ ...DEFAULT_POOL_CONFIG, connectionString }) + const { data: generatorMeta, error: generatorMetaError } = await getGeneratorMetadata(pgMeta, { + includedSchemas, + excludedSchemas, + }) + if (generatorMetaError) { + request.log.error({ error: generatorMetaError, request: extractRequestForLogging(request) }) + reply.code(500) + return { error: generatorMetaError.message } + } + + return applyPyTemplate(generatorMeta) + }) +} diff --git a/src/server/routes/index.ts b/src/server/routes/index.ts index 1532c4ea..46ffba0f 100644 --- a/src/server/routes/index.ts +++ b/src/server/routes/index.ts @@ -21,6 +21,7 @@ import ViewsRoute from './views.js' import TypeScriptTypeGenRoute from './generators/typescript.js' import GoTypeGenRoute from './generators/go.js' import SwiftTypeGenRoute from './generators/swift.js' +import PythonTypeGenRoute from './generators/python.js' import { PG_CONNECTION, CRYPTO_KEY } from '../constants.js' export default async (fastify: FastifyInstance) => { @@ -82,4 +83,5 @@ export default async (fastify: FastifyInstance) => { fastify.register(TypeScriptTypeGenRoute, { prefix: '/generators/typescript' }) fastify.register(GoTypeGenRoute, { prefix: '/generators/go' }) fastify.register(SwiftTypeGenRoute, { prefix: '/generators/swift' }) + fastify.register(PythonTypeGenRoute, { prefix: '/generators/python' }) } diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts new file mode 100644 index 00000000..7a782bac --- /dev/null +++ b/src/server/templates/python.ts @@ -0,0 +1,364 @@ +import type { + PostgresColumn, + PostgresMaterializedView, + PostgresSchema, + PostgresTable, + PostgresType, + PostgresView, +} from '../../lib/index.js' +import type { GeneratorMetadata } from '../../lib/generators.js' +import { console } from 'inspector/promises'; + +type Operation = 'Select' | 'Insert' | 'Update' + +interface Serializable { + serialize(): string +} + +class PythonContext { + types: { [k: string]: PostgresType }; + user_enums: { [k: string]: PythonEnum }; + columns: Record; + schemas: { [k: string]: PostgresSchema }; + + constructor(types: PostgresType[], columns: PostgresColumn[], schemas: PostgresSchema[]) { + this.schemas = Object.fromEntries(schemas.map((schema) => [schema.name, schema])); + this.types = Object.fromEntries(types.map((type) => [type.name, type])); + this.columns = columns + .sort(({ name: a }, { name: b }) => a.localeCompare(b)) + .reduce( + (acc, curr) => { + acc[curr.table_id] ??= [] + acc[curr.table_id].push(curr) + return acc + }, + {} as Record + ); + this.user_enums = Object.fromEntries(types + .filter((type) => type.enums.length > 0) + .map((type) => [type.name, new PythonEnum(type)])); + } + + resolveTypeName(name: string) : string { + if (name in this.user_enums) { + return this.user_enums[name].name; + } + if (name in PY_TYPE_MAP) { + return PY_TYPE_MAP[name] + } + if (name in this.types) { + return formatForPyClassName(this.types[name].name); + } + throw new TypeError(`Unknown row type: ${name}`); + } + + parsePgType(pg_type: string) : PythonType { + if (pg_type.endsWith('[]')) { + const inner_str = pg_type.slice(0, -2); + const inner = this.parsePgType(inner_str); + return new PythonListType(inner); + } else { + const type_name = this.resolveTypeName(pg_type); + return new PythonSimpleType(type_name); + } + } + + tableToClass(table: PostgresTable) : PythonClass { + const attributes: PythonClassAttribute[] = (this.columns[table.id] ?? []) + .map((col) => { + const type = new PythonConcreteType(this, col.format, col.is_nullable); + return new PythonClassAttribute(col.name, type); + }); + return new PythonClass(table.name, this.schemas[table.schema], attributes) + } + + typeToClass(type: PostgresType) : PythonClass { + const types = Object.values(this.types); + const attributes = type.attributes.map((attribute) => { + const type = types.find((type) => type.id === attribute.type_id) + return { + ...attribute, + type, + } + }); + const attributeEntries: PythonClassAttribute[] = attributes + .map((attribute) => { + const type = new PythonConcreteType(this, attribute.type!.format, false); + return new PythonClassAttribute(attribute.name, type); + }); + const schema = this.schemas[type.schema]; + return new PythonClass(type.name, schema, attributeEntries); + } +} + + +class PythonEnum implements Serializable { + name: string; + variants: string[]; + constructor(type: PostgresType) { + this.name = formatForPyClassName(type.name); + this.variants = type.enums.map(formatForPyAttributeName); + } + serialize(): string { + const variants = this.variants.map((item) => `"${item}"`).join(', '); + return `${this.name}: TypeAlias = Literal[${variants}]`; + } +} + +type PythonType = PythonListType | PythonSimpleType; + +class PythonSimpleType implements Serializable { + name: string; + constructor(name: string) { + this.name = name; + } + serialize() : string { + return this.name; + } +} + +class PythonListType implements Serializable { + inner: PythonType; + constructor(inner: PythonType) { + this.inner = inner; + } + serialize() : string { + return `List[${this.inner.serialize()}]`; + } +} + +class PythonConcreteType implements Serializable { + py_type: PythonType; + pg_name: string; + nullable: boolean; + default_value: string | null; + constructor(ctx: PythonContext, pg_name: string, nullable: boolean) { + const py_type = ctx.parsePgType(pg_name); + + this.py_type = py_type; + this.pg_name = pg_name; + this.nullable = nullable; + this.default_value = null; + } + + serialize() : string { + return this.nullable + ? `Optional[${this.py_type.serialize()}]` + : this.py_type.serialize(); + } +} + +class PythonClassAttribute implements Serializable { + name: string; + pg_name: string; + py_type: PythonConcreteType; + constructor(name: string, py_type: PythonConcreteType) { + this.name = formatForPyAttributeName(name); + this.pg_name = name; + this.py_type = py_type; + } + serialize(): string { + return ` ${this.name}: Annotated[${this.py_type.serialize()}, Field(alias="${this.pg_name}")]` + } +} + +class PythonClass implements Serializable { + name: string; + schema: PostgresSchema; + class_attributes: PythonClassAttribute[]; + + + constructor(name: string, schema: PostgresSchema, class_attributes: PythonClassAttribute[]) { + this.schema = schema; + this.class_attributes = class_attributes; + this.name = `${formatForPyClassName(schema.name)}${formatForPyClassName(name)}`; + } + serialize(): string { + const attributes = this.class_attributes.length > 0 + ? this.class_attributes.map((attr) => attr.serialize()).join('\n') + : " pass"; + return `class ${this.name}(BaseModel):\n${attributes}`.trim(); + } +} + +function concatLines(items: Serializable[]): string { + return items.map((item) => item.serialize()).join('\n\n'); +} + +const PY_TYPE_MAP: Record = { + // Bool + bool: 'bool', + + // Numbers + int2: 'int', + int4: 'int', + int8: 'int', + float4: 'float', + float8: 'float', + numeric: 'float', + + // Strings + bytea: 'bytes', + bpchar: 'str', + varchar: 'str', + string: 'str', + date: 'datetime.date', + text: 'str', + citext: 'str', + time: 'datetime.time', + timetz: 'datetime.time', + timestamp: 'datetime.datetime', + timestamptz: 'datetime.datetime', + uuid: 'uuid.UUID', + vector: 'list[Any]', + + // JSON + json: 'Json[Any]', + jsonb: 'Json[Any]', + + // Range types (can be adjusted to more complex types if needed) + int4range: 'str', + int4multirange: 'str', + int8range: 'str', + int8multirange: 'str', + numrange: 'str', + nummultirange: 'str', + tsrange: 'str', + tsmultirange: 'str', + tstzrange: 'str', + tstzmultirange: 'str', + daterange: 'str', + datemultirange: 'str', + + // Miscellaneous types + void: 'None', + record: 'dict[str, Any]', +} as const + +export const apply = ({ + schemas, + tables, + views, + materializedViews, + columns, + types, +}: GeneratorMetadata): string => { + const ctx = new PythonContext(types, columns, schemas); + const py_tables = tables + .filter((table) => schemas.some((schema) => schema.name === table.schema)) + .map((table) => ctx.tableToClass(table)); + console.log('composite_types'); + const composite_types = types.filter((type) => type.attributes.length > 0).map((type) => ctx.typeToClass(type)); + + let output = ` +from pydantic import BaseModel, Json, Field +from typing import Any, Annotated, Literal, Optional, TypeAlias +import datetime + +${concatLines(Object.values(ctx.user_enums))} + +${concatLines(py_tables)} + +${concatLines(composite_types)} + +`.trim() + +// ${views +// .filter((view) => schemas.some((schema) => schema.name === view.schema)) +// .flatMap((view) => +// generateTableStructsForOperations( +// schemas.find((schema) => schema.name === view.schema)!, +// view, +// columnsByTableId[view.id], +// types, +// ['Select'] +// ) +// ) +// .join('\n\n')} + +// ${materializedViews +// .filter((materializedView) => schemas.some((schema) => schema.name === materializedView.schema)) +// .flatMap((materializedView) => +// generateTableStructsForOperations( +// schemas.find((schema) => schema.name === materializedView.schema)!, +// materializedView, +// columnsByTableId[materializedView.id], +// types, +// ['Select'] +// ) +// ) +// .join('\n\n')} + +// ${compositeTypes +// .filter((compositeType) => schemas.some((schema) => schema.name === compositeType.schema)) +// .map((compositeType) => +// generateCompositeTypeStruct( +// schemas.find((schema) => schema.name === compositeType.schema)!, +// compositeType, +// types +// ) +// ) +// .join('\n\n')} +// `.trim() + + return output +} + +/** + * Converts a Postgres name to PascalCase. + * + * @example + * ```ts + * formatForPyTypeName('pokedex') // Pokedex + * formatForPyTypeName('pokemon_center') // PokemonCenter + * formatForPyTypeName('victory-road') // VictoryRoad + * formatForPyTypeName('pokemon league') // PokemonLeague + * ``` + */ +function formatForPyClassName(name: string): string { + console.log(name) + return name + .split(/[^a-zA-Z0-9]/) + .map((word) => `${word[0].toUpperCase()}${word.slice(1)}`) + .join('') +} + +/** + * Converts a Postgres name to snake_case. + * + * @example + * ```ts + * formatForPyTypeName('Pokedex') // pokedex + * formatForPyTypeName('PokemonCenter') // pokemon_enter + * formatForPyTypeName('victory-road') // victory_road + * formatForPyTypeName('pokemon league') // pokemon_league + * ``` + */ +function formatForPyAttributeName(name: string): string { + return name + .split(/[^a-zA-Z0-9]+/) // Split on non-alphanumeric characters (like spaces, dashes, etc.) + .map(word => word.toLowerCase()) // Convert each word to lowercase + .join('_'); // Join with underscores +} + +function pgTypeToPythonType(pgType: string, nullable: boolean, types: PostgresType[] = []): string { + let pythonType: string | undefined = undefined + + if (pgType in PY_TYPE_MAP) { + pythonType = PY_TYPE_MAP[pgType as keyof typeof PY_TYPE_MAP] + } + + // Enums + const enumType = types.find((type) => type.name === pgType && type.enums.length > 0) + if (enumType) { + pythonType = formatForPyClassName(String(pgType)) + } + + if (pythonType) { + // If the type is nullable, append "| None" to the type + return nullable ? `${pythonType} | None` : pythonType + } + + // Fallback + return nullable ? String(pgType)+' | None' : String(pgType) +} From 3bcc3e63b6f60c41449c36e0e36154549a06af9a Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Thu, 13 Nov 2025 10:06:58 -0300 Subject: [PATCH 48/72] fix: use schema in type name --- src/server/templates/python.ts | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts index 7a782bac..d31b15b4 100644 --- a/src/server/templates/python.ts +++ b/src/server/templates/python.ts @@ -47,7 +47,9 @@ class PythonContext { return PY_TYPE_MAP[name] } if (name in this.types) { - return formatForPyClassName(this.types[name].name); + const type = this.types[name]; + const schema = type!.schema; + return `${formatForPyClassName(schema)}${formatForPyClassName(name)}`; } throw new TypeError(`Unknown row type: ${name}`); } @@ -251,9 +253,10 @@ export const apply = ({ const composite_types = types.filter((type) => type.attributes.length > 0).map((type) => ctx.typeToClass(type)); let output = ` -from pydantic import BaseModel, Json, Field -from typing import Any, Annotated, Literal, Optional, TypeAlias import datetime +from typing import Annotated, Any, List, Literal, Optional, TypeAlias + +from pydantic import BaseModel, Field, Json ${concatLines(Object.values(ctx.user_enums))} From 5f48036d2f58f00f386a92ce87a733b9b8a8f67e Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Thu, 13 Nov 2025 10:17:13 -0300 Subject: [PATCH 49/72] fix: add views and materialized views --- src/server/templates/python.ts | 67 ++++++++++++++-------------------- 1 file changed, 27 insertions(+), 40 deletions(-) diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts index d31b15b4..f68ddb20 100644 --- a/src/server/templates/python.ts +++ b/src/server/templates/python.ts @@ -91,6 +91,24 @@ class PythonContext { const schema = this.schemas[type.schema]; return new PythonClass(type.name, schema, attributeEntries); } + + viewToClass(view: PostgresView) : PythonClass { + const attributes: PythonClassAttribute[] = (this.columns[view.id] ?? []) + .map((col) => { + const type = new PythonConcreteType(this, col.format, col.is_nullable); + return new PythonClassAttribute(col.name, type); + }); + return new PythonClass(view.name, this.schemas[view.schema], attributes) + } + + matViewToClass(matview: PostgresMaterializedView) : PythonClass { + const attributes: PythonClassAttribute[] = (this.columns[matview.id] ?? []) + .map((col) => { + const type = new PythonConcreteType(this, col.format, col.is_nullable); + return new PythonClassAttribute(col.name, type); + }); + return new PythonClass(matview.name, this.schemas[matview.schema], attributes) + } } @@ -249,8 +267,11 @@ export const apply = ({ const py_tables = tables .filter((table) => schemas.some((schema) => schema.name === table.schema)) .map((table) => ctx.tableToClass(table)); - console.log('composite_types'); + const composite_types = types.filter((type) => type.attributes.length > 0).map((type) => ctx.typeToClass(type)); + console.log(views); + const py_views = views.map((view) => ctx.viewToClass(view)); + const py_matviews = materializedViews.map((matview) => ctx.matViewToClass(matview)); let output = ` import datetime @@ -260,49 +281,15 @@ from pydantic import BaseModel, Field, Json ${concatLines(Object.values(ctx.user_enums))} +${concatLines(composite_types)} + ${concatLines(py_tables)} -${concatLines(composite_types)} +${concatLines(py_views)} -`.trim() +${concatLines(py_matviews)} -// ${views -// .filter((view) => schemas.some((schema) => schema.name === view.schema)) -// .flatMap((view) => -// generateTableStructsForOperations( -// schemas.find((schema) => schema.name === view.schema)!, -// view, -// columnsByTableId[view.id], -// types, -// ['Select'] -// ) -// ) -// .join('\n\n')} - -// ${materializedViews -// .filter((materializedView) => schemas.some((schema) => schema.name === materializedView.schema)) -// .flatMap((materializedView) => -// generateTableStructsForOperations( -// schemas.find((schema) => schema.name === materializedView.schema)!, -// materializedView, -// columnsByTableId[materializedView.id], -// types, -// ['Select'] -// ) -// ) -// .join('\n\n')} - -// ${compositeTypes -// .filter((compositeType) => schemas.some((schema) => schema.name === compositeType.schema)) -// .map((compositeType) => -// generateCompositeTypeStruct( -// schemas.find((schema) => schema.name === compositeType.schema)!, -// compositeType, -// types -// ) -// ) -// .join('\n\n')} -// `.trim() +`.trim() return output } From 5a514cc7b8da26014374e59d454426b1012ab833 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Thu, 13 Nov 2025 16:34:37 -0300 Subject: [PATCH 50/72] fix: add views and materialized_views, add insert and update methods too --- src/server/templates/python.ts | 146 ++++++++++++++++++--------------- 1 file changed, 82 insertions(+), 64 deletions(-) diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts index f68ddb20..bded9688 100644 --- a/src/server/templates/python.ts +++ b/src/server/templates/python.ts @@ -7,9 +7,6 @@ import type { PostgresView, } from '../../lib/index.js' import type { GeneratorMetadata } from '../../lib/generators.js' -import { console } from 'inspector/promises'; - -type Operation = 'Select' | 'Insert' | 'Update' interface Serializable { serialize(): string @@ -18,7 +15,7 @@ interface Serializable { class PythonContext { types: { [k: string]: PostgresType }; user_enums: { [k: string]: PythonEnum }; - columns: Record; + columns: Record; schemas: { [k: string]: PostgresSchema }; constructor(types: PostgresType[], columns: PostgresColumn[], schemas: PostgresSchema[]) { @@ -32,7 +29,7 @@ class PythonContext { acc[curr.table_id].push(curr) return acc }, - {} as Record + {} as Record ); this.user_enums = Object.fromEntries(types .filter((type) => type.enums.length > 0) @@ -49,14 +46,15 @@ class PythonContext { if (name in this.types) { const type = this.types[name]; const schema = type!.schema; - return `${formatForPyClassName(schema)}${formatForPyClassName(name)}`; + return `${formatForPyClassName(schema)}${formatForPyClassName(type.name)}`; } - throw new TypeError(`Unknown row type: ${name}`); + console.log(`Unknown recognized row type ${name}`); + return 'Any'; } parsePgType(pg_type: string) : PythonType { - if (pg_type.endsWith('[]')) { - const inner_str = pg_type.slice(0, -2); + if (pg_type.startsWith('_')) { + const inner_str = pg_type.slice(1); const inner = this.parsePgType(inner_str); return new PythonListType(inner); } else { @@ -65,15 +63,6 @@ class PythonContext { } } - tableToClass(table: PostgresTable) : PythonClass { - const attributes: PythonClassAttribute[] = (this.columns[table.id] ?? []) - .map((col) => { - const type = new PythonConcreteType(this, col.format, col.is_nullable); - return new PythonClassAttribute(col.name, type); - }); - return new PythonClass(table.name, this.schemas[table.schema], attributes) - } - typeToClass(type: PostgresType) : PythonClass { const types = Object.values(this.types); const attributes = type.attributes.map((attribute) => { @@ -85,28 +74,38 @@ class PythonContext { }); const attributeEntries: PythonClassAttribute[] = attributes .map((attribute) => { - const type = new PythonConcreteType(this, attribute.type!.format, false); - return new PythonClassAttribute(attribute.name, type); + const type = this.parsePgType(attribute.type!.name); + return new PythonClassAttribute(attribute.name, type, false, false, false, false); }); const schema = this.schemas[type.schema]; return new PythonClass(type.name, schema, attributeEntries); } + columnsToClassAttrs(table_id: number) : PythonClassAttribute[] { + const attrs = this.columns[table_id] ?? []; + return attrs.map((col) => { + const type = this.parsePgType(col.format); + return new PythonClassAttribute(col.name, type, + col.is_nullable, + col.is_updatable, + col.is_generated || !!col.default_value, + col.is_identity); + }); + } + + tableToClass(table: PostgresTable) : PythonClass { + const attributes = this.columnsToClassAttrs(table.id); + return new PythonClass(table.name, this.schemas[table.schema], attributes) + } + + viewToClass(view: PostgresView) : PythonClass { - const attributes: PythonClassAttribute[] = (this.columns[view.id] ?? []) - .map((col) => { - const type = new PythonConcreteType(this, col.format, col.is_nullable); - return new PythonClassAttribute(col.name, type); - }); + const attributes = this.columnsToClassAttrs(view.id); return new PythonClass(view.name, this.schemas[view.schema], attributes) } matViewToClass(matview: PostgresMaterializedView) : PythonClass { - const attributes: PythonClassAttribute[] = (this.columns[matview.id] ?? []) - .map((col) => { - const type = new PythonConcreteType(this, col.format, col.is_nullable); - return new PythonClassAttribute(col.name, type); - }); + const attributes = this.columnsToClassAttrs(matview.id); return new PythonClass(matview.name, this.schemas[matview.schema], attributes) } } @@ -116,7 +115,7 @@ class PythonEnum implements Serializable { name: string; variants: string[]; constructor(type: PostgresType) { - this.name = formatForPyClassName(type.name); + this.name = `${formatForPyClassName(type.schema)}${formatForPyClassName(type.name)}`; this.variants = type.enums.map(formatForPyAttributeName); } serialize(): string { @@ -147,58 +146,71 @@ class PythonListType implements Serializable { } } -class PythonConcreteType implements Serializable { - py_type: PythonType; +class PythonClassAttribute implements Serializable { + name: string; pg_name: string; + py_type: PythonType; nullable: boolean; - default_value: string | null; - constructor(ctx: PythonContext, pg_name: string, nullable: boolean) { - const py_type = ctx.parsePgType(pg_name); + mutable: boolean; + has_default: boolean; + is_identity: boolean; + + constructor(name: string, py_type: PythonType, nullable: boolean, mutable: boolean, has_default: boolean, is_identity: boolean) { + this.name = formatForPyAttributeName(name); + this.pg_name = name; this.py_type = py_type; - this.pg_name = pg_name; this.nullable = nullable; - this.default_value = null; + this.mutable = mutable; + this.has_default = has_default; + this.is_identity = is_identity; } - - serialize() : string { - return this.nullable + + serialize(): string { + const py_type = this.nullable ? `Optional[${this.py_type.serialize()}]` : this.py_type.serialize(); + return ` ${this.name}: Annotated[${py_type}, Field(alias="${this.pg_name}")]` } -} -class PythonClassAttribute implements Serializable { - name: string; - pg_name: string; - py_type: PythonConcreteType; - constructor(name: string, py_type: PythonConcreteType) { - this.name = formatForPyAttributeName(name); - this.pg_name = name; - this.py_type = py_type; - } - serialize(): string { - return ` ${this.name}: Annotated[${this.py_type.serialize()}, Field(alias="${this.pg_name}")]` - } } class PythonClass implements Serializable { name: string; + table_name: string; + parent_class: string; schema: PostgresSchema; class_attributes: PythonClassAttribute[]; - - constructor(name: string, schema: PostgresSchema, class_attributes: PythonClassAttribute[]) { + constructor(name: string, schema: PostgresSchema, class_attributes: PythonClassAttribute[], parent_class: string="BaseModel") { this.schema = schema; this.class_attributes = class_attributes; + this.table_name = name; this.name = `${formatForPyClassName(schema.name)}${formatForPyClassName(name)}`; + this.parent_class = parent_class; } serialize(): string { const attributes = this.class_attributes.length > 0 ? this.class_attributes.map((attr) => attr.serialize()).join('\n') : " pass"; - return `class ${this.name}(BaseModel):\n${attributes}`.trim(); + return `class ${this.name}(${this.parent_class}):\n${attributes}`; } + + update() : PythonClass { + // Converts all attributes to nullable + const attrs = this.class_attributes + .filter((attr) => attr.mutable || attr.is_identity) + .map((attr) => new PythonClassAttribute(attr.name, attr.py_type, true, attr.mutable, attr.has_default, attr.is_identity)) + return new PythonClass(`${this.table_name}_update`, this.schema, attrs, "TypedDict") + } + + insert() : PythonClass { + // Converts all attributes that have a default to nullable. + const attrs = this.class_attributes + .map((attr) => new PythonClassAttribute(attr.name, attr.py_type, attr.has_default || attr.nullable, attr.mutable, attr.has_default, attr.is_identity)); + return new PythonClass(`${this.table_name}_insert`, this.schema, attrs, "TypedDict") + } + } function concatLines(items: Serializable[]): string { @@ -266,29 +278,36 @@ export const apply = ({ const ctx = new PythonContext(types, columns, schemas); const py_tables = tables .filter((table) => schemas.some((schema) => schema.name === table.schema)) - .map((table) => ctx.tableToClass(table)); + .flatMap((table) => { + const py_class = ctx.tableToClass(table); + return [py_class, py_class.insert(), py_class.update()]; + }); + + const composite_types = types + .filter((type) => type.attributes.length > 0) + .map((type) => ctx.typeToClass(type)); - const composite_types = types.filter((type) => type.attributes.length > 0).map((type) => ctx.typeToClass(type)); - console.log(views); const py_views = views.map((view) => ctx.viewToClass(view)); const py_matviews = materializedViews.map((matview) => ctx.matViewToClass(matview)); let output = ` +from __future__ import annotations + import datetime -from typing import Annotated, Any, List, Literal, Optional, TypeAlias +from typing import Annotated, Any, List, Literal, Optional, TypeAlias, TypedDict from pydantic import BaseModel, Field, Json ${concatLines(Object.values(ctx.user_enums))} -${concatLines(composite_types)} - ${concatLines(py_tables)} ${concatLines(py_views)} ${concatLines(py_matviews)} +${concatLines(composite_types)} + `.trim() return output @@ -306,7 +325,6 @@ ${concatLines(py_matviews)} * ``` */ function formatForPyClassName(name: string): string { - console.log(name) return name .split(/[^a-zA-Z0-9]/) .map((word) => `${word[0].toUpperCase()}${word.slice(1)}`) From 74507d6523a8eb1d63fb1a5d185f585f7d9e461f Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 14 Nov 2025 10:31:15 -0300 Subject: [PATCH 51/72] chore: add TypedDict for Insert and Update new class is needed because typeddict uses NonRequired for missing attributes --- src/server/templates/python.ts | 245 ++++++++++++++++--------------- test/server/typegen.ts | 253 +++++++++++++++++++++++++++++++++ 2 files changed, 384 insertions(+), 114 deletions(-) diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts index bded9688..8e8b2955 100644 --- a/src/server/templates/python.ts +++ b/src/server/templates/python.ts @@ -8,6 +8,60 @@ import type { } from '../../lib/index.js' import type { GeneratorMetadata } from '../../lib/generators.js' +export const apply = ({ + schemas, + tables, + views, + materializedViews, + columns, + types, +}: GeneratorMetadata): string => { + const ctx = new PythonContext(types, columns, schemas); + const py_tables = tables + .filter((table) => schemas.some((schema) => schema.name === table.schema)) + .flatMap((table) => { + const py_class_and_methods = ctx.tableToClass(table); + return py_class_and_methods; + }); + const composite_types = types + .filter((type) => type.attributes.length > 0) + .map((type) => ctx.typeToClass(type)); + const py_views = views.map((view) => ctx.viewToClass(view)); + const py_matviews = materializedViews.map((matview) => ctx.matViewToClass(matview)); + + let output = ` +from __future__ import annotations + +import datetime +from typing import ( + Annotated, + Any, + List, + Literal, + NotRequired, + Optional, + TypeAlias, + TypedDict, +) + +from pydantic import BaseModel, Field, Json + +${concatLines(Object.values(ctx.user_enums))} + +${concatLines(py_tables)} + +${concatLines(py_views)} + +${concatLines(py_matviews)} + +${concatLines(composite_types)} + +`.trim() + + return output +} + + interface Serializable { serialize(): string } @@ -63,7 +117,7 @@ class PythonContext { } } - typeToClass(type: PostgresType) : PythonClass { + typeToClass(type: PostgresType) : PythonBaseModel { const types = Object.values(this.types); const attributes = type.attributes.map((attribute) => { const type = types.find((type) => type.id === attribute.type_id) @@ -72,41 +126,49 @@ class PythonContext { type, } }); - const attributeEntries: PythonClassAttribute[] = attributes + const attributeEntries: PythonBaseModelAttr[] = attributes .map((attribute) => { const type = this.parsePgType(attribute.type!.name); - return new PythonClassAttribute(attribute.name, type, false, false, false, false); + return new PythonBaseModelAttr(attribute.name, type, false); }); + const schema = this.schemas[type.schema]; - return new PythonClass(type.name, schema, attributeEntries); + return new PythonBaseModel(type.name, schema, attributeEntries); + } + + columnsToClassAttrs(table_id: number) : PythonBaseModelAttr[] { + const attrs = this.columns[table_id] ?? []; + return attrs.map((col) => { + const type = this.parsePgType(col.format); + return new PythonBaseModelAttr(col.name, type, col.is_nullable); + }); } - columnsToClassAttrs(table_id: number) : PythonClassAttribute[] { + columnsToDictAttrs(table_id: number, not_required: boolean) : PythonTypedDictAttr[] { const attrs = this.columns[table_id] ?? []; return attrs.map((col) => { const type = this.parsePgType(col.format); - return new PythonClassAttribute(col.name, type, - col.is_nullable, - col.is_updatable, - col.is_generated || !!col.default_value, - col.is_identity); + return new PythonTypedDictAttr(col.name, type, col.is_nullable, not_required || col.is_nullable || col.is_identity || (col.default_value !== null)); }); } - tableToClass(table: PostgresTable) : PythonClass { - const attributes = this.columnsToClassAttrs(table.id); - return new PythonClass(table.name, this.schemas[table.schema], attributes) + tableToClass(table: PostgresTable) : [PythonBaseModel, PythonTypedDict, PythonTypedDict] { + const schema = this.schemas[table.schema]; + const select = new PythonBaseModel(table.name, schema, this.columnsToClassAttrs(table.id)); + const insert = new PythonTypedDict(table.name, "Insert", schema, this.columnsToDictAttrs(table.id, false)); + const update = new PythonTypedDict(table.name, "Update", schema, this.columnsToDictAttrs(table.id, true)); + return [select, insert, update]; } - viewToClass(view: PostgresView) : PythonClass { + viewToClass(view: PostgresView) : PythonBaseModel { const attributes = this.columnsToClassAttrs(view.id); - return new PythonClass(view.name, this.schemas[view.schema], attributes) + return new PythonBaseModel(view.name, this.schemas[view.schema], attributes) } - matViewToClass(matview: PostgresMaterializedView) : PythonClass { + matViewToClass(matview: PostgresMaterializedView) : PythonBaseModel { const attributes = this.columnsToClassAttrs(matview.id); - return new PythonClass(matview.name, this.schemas[matview.schema], attributes) + return new PythonBaseModel(matview.name, this.schemas[matview.schema], attributes) } } @@ -116,7 +178,7 @@ class PythonEnum implements Serializable { variants: string[]; constructor(type: PostgresType) { this.name = `${formatForPyClassName(type.schema)}${formatForPyClassName(type.name)}`; - this.variants = type.enums.map(formatForPyAttributeName); + this.variants = type.enums; } serialize(): string { const variants = this.variants.map((item) => `"${item}"`).join(', '); @@ -146,71 +208,93 @@ class PythonListType implements Serializable { } } -class PythonClassAttribute implements Serializable { +class PythonBaseModelAttr implements Serializable { name: string; pg_name: string; py_type: PythonType; nullable: boolean; - mutable: boolean; - has_default: boolean; - is_identity: boolean; - - constructor(name: string, py_type: PythonType, nullable: boolean, mutable: boolean, has_default: boolean, is_identity: boolean) { + constructor(name: string, py_type: PythonType, nullable: boolean) { this.name = formatForPyAttributeName(name); this.pg_name = name; this.py_type = py_type; this.nullable = nullable; - this.mutable = mutable; - this.has_default = has_default; - this.is_identity = is_identity; } - + serialize(): string { const py_type = this.nullable ? `Optional[${this.py_type.serialize()}]` : this.py_type.serialize(); - return ` ${this.name}: Annotated[${py_type}, Field(alias="${this.pg_name}")]` + return ` ${this.name}: ${py_type} = Field(alias="${this.pg_name}")` } - } -class PythonClass implements Serializable { +class PythonBaseModel implements Serializable { name: string; table_name: string; - parent_class: string; schema: PostgresSchema; - class_attributes: PythonClassAttribute[]; + class_attributes: PythonBaseModelAttr[]; - constructor(name: string, schema: PostgresSchema, class_attributes: PythonClassAttribute[], parent_class: string="BaseModel") { + constructor(name: string, schema: PostgresSchema, class_attributes: PythonBaseModelAttr[]) { this.schema = schema; this.class_attributes = class_attributes; this.table_name = name; this.name = `${formatForPyClassName(schema.name)}${formatForPyClassName(name)}`; - this.parent_class = parent_class; } serialize(): string { const attributes = this.class_attributes.length > 0 ? this.class_attributes.map((attr) => attr.serialize()).join('\n') : " pass"; - return `class ${this.name}(${this.parent_class}):\n${attributes}`; + return `class ${this.name}(BaseModel):\n${attributes}`; } +} + +class PythonTypedDictAttr implements Serializable { + name: string; + pg_name: string; + py_type: PythonType; + nullable: boolean; + not_required: boolean; - update() : PythonClass { - // Converts all attributes to nullable - const attrs = this.class_attributes - .filter((attr) => attr.mutable || attr.is_identity) - .map((attr) => new PythonClassAttribute(attr.name, attr.py_type, true, attr.mutable, attr.has_default, attr.is_identity)) - return new PythonClass(`${this.table_name}_update`, this.schema, attrs, "TypedDict") + constructor(name: string, py_type: PythonType, nullable: boolean, required: boolean) { + this.name = formatForPyAttributeName(name); + this.pg_name = name; + this.py_type = py_type; + this.nullable = nullable; + this.not_required = required; } - insert() : PythonClass { - // Converts all attributes that have a default to nullable. - const attrs = this.class_attributes - .map((attr) => new PythonClassAttribute(attr.name, attr.py_type, attr.has_default || attr.nullable, attr.mutable, attr.has_default, attr.is_identity)); - return new PythonClass(`${this.table_name}_insert`, this.schema, attrs, "TypedDict") + serialize(): string { + const annotation = `Annotated[${this.py_type.serialize()}, Field(alias="${this.pg_name}")]`; + const rhs = this.not_required + ? `NotRequired[${annotation}]` + : annotation; + return ` ${this.name}: ${rhs}`; } +} + +class PythonTypedDict implements Serializable { + name: string; + table_name: string; + parent_class: string; + schema: PostgresSchema; + dict_attributes: PythonTypedDictAttr[]; + operation: "Insert" | "Update"; + constructor(name: string, operation: "Insert" | "Update", schema: PostgresSchema, dict_attributes: PythonTypedDictAttr[], parent_class: string="BaseModel") { + this.schema = schema; + this.dict_attributes = dict_attributes; + this.table_name = name; + this.name = `${formatForPyClassName(schema.name)}${formatForPyClassName(name)}`; + this.parent_class = parent_class; + this.operation = operation; + } + serialize(): string { + const attributes = this.dict_attributes.length > 0 + ? this.dict_attributes.map((attr) => attr.serialize()).join('\n') + : " pass"; + return `class ${this.name}${this.operation}(TypedDict):\n${attributes}`; + } } function concatLines(items: Serializable[]): string { @@ -267,52 +351,6 @@ const PY_TYPE_MAP: Record = { record: 'dict[str, Any]', } as const -export const apply = ({ - schemas, - tables, - views, - materializedViews, - columns, - types, -}: GeneratorMetadata): string => { - const ctx = new PythonContext(types, columns, schemas); - const py_tables = tables - .filter((table) => schemas.some((schema) => schema.name === table.schema)) - .flatMap((table) => { - const py_class = ctx.tableToClass(table); - return [py_class, py_class.insert(), py_class.update()]; - }); - - const composite_types = types - .filter((type) => type.attributes.length > 0) - .map((type) => ctx.typeToClass(type)); - - const py_views = views.map((view) => ctx.viewToClass(view)); - const py_matviews = materializedViews.map((matview) => ctx.matViewToClass(matview)); - - let output = ` -from __future__ import annotations - -import datetime -from typing import Annotated, Any, List, Literal, Optional, TypeAlias, TypedDict - -from pydantic import BaseModel, Field, Json - -${concatLines(Object.values(ctx.user_enums))} - -${concatLines(py_tables)} - -${concatLines(py_views)} - -${concatLines(py_matviews)} - -${concatLines(composite_types)} - -`.trim() - - return output -} - /** * Converts a Postgres name to PascalCase. * @@ -349,24 +387,3 @@ function formatForPyAttributeName(name: string): string { .join('_'); // Join with underscores } -function pgTypeToPythonType(pgType: string, nullable: boolean, types: PostgresType[] = []): string { - let pythonType: string | undefined = undefined - - if (pgType in PY_TYPE_MAP) { - pythonType = PY_TYPE_MAP[pgType as keyof typeof PY_TYPE_MAP] - } - - // Enums - const enumType = types.find((type) => type.name === pgType && type.enums.length > 0) - if (enumType) { - pythonType = formatForPyClassName(String(pgType)) - } - - if (pythonType) { - // If the type is nullable, append "| None" to the type - return nullable ? `${pythonType} | None` : pythonType - } - - // Fallback - return nullable ? String(pgType)+' | None' : String(pgType) -} diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 4bb83d94..240a62a3 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -6248,3 +6248,256 @@ test('typegen: swift w/ public access control', async () => { }" `) }) + + +test('typegen: python', async () => { + const { body } = await app.inject({ + method: 'GET', + path: '/generators/python', + query: { access_control: 'public' }, + }) + expect(body).toMatchInlineSnapshot(` +"from __future__ import annotations + +import datetime +from typing import ( + Annotated, + Any, + List, + Literal, + NotRequired, + Optional, + TypeAlias, + TypedDict, +) + +from pydantic import BaseModel, Field, Json + +PublicUserStatus: TypeAlias = Literal["ACTIVE", "INACTIVE"] + +PublicMemeStatus: TypeAlias = Literal["new", "old", "retired"] + +class PublicUsers(BaseModel): + decimal: Optional[float] = Field(alias="decimal") + id: int = Field(alias="id") + name: Optional[str] = Field(alias="name") + status: Optional[PublicUserStatus] = Field(alias="status") + +class PublicUsersInsert(TypedDict): + decimal: NotRequired[Annotated[float, Field(alias="decimal")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + name: NotRequired[Annotated[str, Field(alias="name")]] + status: NotRequired[Annotated[PublicUserStatus, Field(alias="status")]] + +class PublicUsersUpdate(TypedDict): + decimal: NotRequired[Annotated[float, Field(alias="decimal")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + name: NotRequired[Annotated[str, Field(alias="name")]] + status: NotRequired[Annotated[PublicUserStatus, Field(alias="status")]] + +class PublicTodos(BaseModel): + details: Optional[str] = Field(alias="details") + id: int = Field(alias="id") + user_id: int = Field(alias="user-id") + +class PublicTodosInsert(TypedDict): + details: NotRequired[Annotated[str, Field(alias="details")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + user_id: Annotated[int, Field(alias="user-id")] + +class PublicTodosUpdate(TypedDict): + details: NotRequired[Annotated[str, Field(alias="details")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + user_id: NotRequired[Annotated[int, Field(alias="user-id")]] + +class PublicUsersAudit(BaseModel): + created_at: Optional[datetime.datetime] = Field(alias="created_at") + id: int = Field(alias="id") + previous_value: Optional[Json[Any]] = Field(alias="previous_value") + user_id: Optional[int] = Field(alias="user_id") + +class PublicUsersAuditInsert(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + previous_value: NotRequired[Annotated[Json[Any], Field(alias="previous_value")]] + user_id: NotRequired[Annotated[int, Field(alias="user_id")]] + +class PublicUsersAuditUpdate(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + previous_value: NotRequired[Annotated[Json[Any], Field(alias="previous_value")]] + user_id: NotRequired[Annotated[int, Field(alias="user_id")]] + +class PublicUserDetails(BaseModel): + details: Optional[str] = Field(alias="details") + user_id: int = Field(alias="user_id") + +class PublicUserDetailsInsert(TypedDict): + details: NotRequired[Annotated[str, Field(alias="details")]] + user_id: Annotated[int, Field(alias="user_id")] + +class PublicUserDetailsUpdate(TypedDict): + details: NotRequired[Annotated[str, Field(alias="details")]] + user_id: NotRequired[Annotated[int, Field(alias="user_id")]] + +class PublicEmpty(BaseModel): + pass + +class PublicEmptyInsert(TypedDict): + pass + +class PublicEmptyUpdate(TypedDict): + pass + +class PublicTableWithOtherTablesRowType(BaseModel): + col1: Optional[PublicUserDetails] = Field(alias="col1") + col2: Optional[PublicAView] = Field(alias="col2") + +class PublicTableWithOtherTablesRowTypeInsert(TypedDict): + col1: NotRequired[Annotated[PublicUserDetails, Field(alias="col1")]] + col2: NotRequired[Annotated[PublicAView, Field(alias="col2")]] + +class PublicTableWithOtherTablesRowTypeUpdate(TypedDict): + col1: NotRequired[Annotated[PublicUserDetails, Field(alias="col1")]] + col2: NotRequired[Annotated[PublicAView, Field(alias="col2")]] + +class PublicTableWithPrimaryKeyOtherThanId(BaseModel): + name: Optional[str] = Field(alias="name") + other_id: int = Field(alias="other_id") + +class PublicTableWithPrimaryKeyOtherThanIdInsert(TypedDict): + name: NotRequired[Annotated[str, Field(alias="name")]] + other_id: NotRequired[Annotated[int, Field(alias="other_id")]] + +class PublicTableWithPrimaryKeyOtherThanIdUpdate(TypedDict): + name: NotRequired[Annotated[str, Field(alias="name")]] + other_id: NotRequired[Annotated[int, Field(alias="other_id")]] + +class PublicEvents(BaseModel): + created_at: datetime.datetime = Field(alias="created_at") + data: Optional[Json[Any]] = Field(alias="data") + event_type: Optional[str] = Field(alias="event_type") + id: int = Field(alias="id") + +class PublicEventsInsert(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + data: NotRequired[Annotated[Json[Any], Field(alias="data")]] + event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + +class PublicEventsUpdate(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + data: NotRequired[Annotated[Json[Any], Field(alias="data")]] + event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + +class PublicEvents2024(BaseModel): + created_at: datetime.datetime = Field(alias="created_at") + data: Optional[Json[Any]] = Field(alias="data") + event_type: Optional[str] = Field(alias="event_type") + id: int = Field(alias="id") + +class PublicEvents2024Insert(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + data: NotRequired[Annotated[Json[Any], Field(alias="data")]] + event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + id: Annotated[int, Field(alias="id")] + +class PublicEvents2024Update(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + data: NotRequired[Annotated[Json[Any], Field(alias="data")]] + event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + +class PublicEvents2025(BaseModel): + created_at: datetime.datetime = Field(alias="created_at") + data: Optional[Json[Any]] = Field(alias="data") + event_type: Optional[str] = Field(alias="event_type") + id: int = Field(alias="id") + +class PublicEvents2025Insert(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + data: NotRequired[Annotated[Json[Any], Field(alias="data")]] + event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + id: Annotated[int, Field(alias="id")] + +class PublicEvents2025Update(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + data: NotRequired[Annotated[Json[Any], Field(alias="data")]] + event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + +class PublicCategory(BaseModel): + id: int = Field(alias="id") + name: str = Field(alias="name") + +class PublicCategoryInsert(TypedDict): + id: NotRequired[Annotated[int, Field(alias="id")]] + name: Annotated[str, Field(alias="name")] + +class PublicCategoryUpdate(TypedDict): + id: NotRequired[Annotated[int, Field(alias="id")]] + name: NotRequired[Annotated[str, Field(alias="name")]] + +class PublicMemes(BaseModel): + category: Optional[int] = Field(alias="category") + created_at: datetime.datetime = Field(alias="created_at") + id: int = Field(alias="id") + metadata: Optional[Json[Any]] = Field(alias="metadata") + name: str = Field(alias="name") + status: Optional[PublicMemeStatus] = Field(alias="status") + +class PublicMemesInsert(TypedDict): + category: NotRequired[Annotated[int, Field(alias="category")]] + created_at: Annotated[datetime.datetime, Field(alias="created_at")] + id: NotRequired[Annotated[int, Field(alias="id")]] + metadata: NotRequired[Annotated[Json[Any], Field(alias="metadata")]] + name: Annotated[str, Field(alias="name")] + status: NotRequired[Annotated[PublicMemeStatus, Field(alias="status")]] + +class PublicMemesUpdate(TypedDict): + category: NotRequired[Annotated[int, Field(alias="category")]] + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + metadata: NotRequired[Annotated[Json[Any], Field(alias="metadata")]] + name: NotRequired[Annotated[str, Field(alias="name")]] + status: NotRequired[Annotated[PublicMemeStatus, Field(alias="status")]] + +class PublicAView(BaseModel): + id: Optional[int] = Field(alias="id") + +class PublicTodosView(BaseModel): + details: Optional[str] = Field(alias="details") + id: Optional[int] = Field(alias="id") + user_id: Optional[int] = Field(alias="user-id") + +class PublicUsersView(BaseModel): + decimal: Optional[float] = Field(alias="decimal") + id: Optional[int] = Field(alias="id") + name: Optional[str] = Field(alias="name") + status: Optional[PublicUserStatus] = Field(alias="status") + +class PublicUserTodosSummaryView(BaseModel): + todo_count: Optional[int] = Field(alias="todo_count") + todo_details: Optional[List[str]] = Field(alias="todo_details") + user_id: Optional[int] = Field(alias="user_id") + user_name: Optional[str] = Field(alias="user_name") + user_status: Optional[PublicUserStatus] = Field(alias="user_status") + +class PublicUsersViewWithMultipleRefsToUsers(BaseModel): + initial_id: Optional[int] = Field(alias="initial_id") + initial_name: Optional[str] = Field(alias="initial_name") + second_id: Optional[int] = Field(alias="second_id") + second_name: Optional[str] = Field(alias="second_name") + +class PublicTodosMatview(BaseModel): + details: Optional[str] = Field(alias="details") + id: Optional[int] = Field(alias="id") + user_id: Optional[int] = Field(alias="user-id") + +class PublicCompositeTypeWithArrayAttribute(BaseModel): + my_text_array: List[str] = Field(alias="my_text_array") + +class PublicCompositeTypeWithRecordAttribute(BaseModel): + todo: PublicTodos = Field(alias="todo")" +`)}) From a73dd6e7fb2a695a30096494c7fa2d38628f57ea Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Fri, 14 Nov 2025 10:58:14 -0300 Subject: [PATCH 52/72] format: run npx prettier --write --- src/server/templates/python.ts | 298 +++++++++++++++++---------------- test/server/typegen.ts | 4 +- 2 files changed, 159 insertions(+), 143 deletions(-) diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts index 8e8b2955..2ac80996 100644 --- a/src/server/templates/python.ts +++ b/src/server/templates/python.ts @@ -16,18 +16,18 @@ export const apply = ({ columns, types, }: GeneratorMetadata): string => { - const ctx = new PythonContext(types, columns, schemas); + const ctx = new PythonContext(types, columns, schemas) const py_tables = tables .filter((table) => schemas.some((schema) => schema.name === table.schema)) .flatMap((table) => { - const py_class_and_methods = ctx.tableToClass(table); - return py_class_and_methods; - }); + const py_class_and_methods = ctx.tableToClass(table) + return py_class_and_methods + }) const composite_types = types .filter((type) => type.attributes.length > 0) - .map((type) => ctx.typeToClass(type)); - const py_views = views.map((view) => ctx.viewToClass(view)); - const py_matviews = materializedViews.map((matview) => ctx.matViewToClass(matview)); + .map((type) => ctx.typeToClass(type)) + const py_views = views.map((view) => ctx.viewToClass(view)) + const py_matviews = materializedViews.map((matview) => ctx.matViewToClass(matview)) let output = ` from __future__ import annotations @@ -61,20 +61,19 @@ ${concatLines(composite_types)} return output } - interface Serializable { serialize(): string } class PythonContext { - types: { [k: string]: PostgresType }; - user_enums: { [k: string]: PythonEnum }; - columns: Record; - schemas: { [k: string]: PostgresSchema }; + types: { [k: string]: PostgresType } + user_enums: { [k: string]: PythonEnum } + columns: Record + schemas: { [k: string]: PostgresSchema } constructor(types: PostgresType[], columns: PostgresColumn[], schemas: PostgresSchema[]) { - this.schemas = Object.fromEntries(schemas.map((schema) => [schema.name, schema])); - this.types = Object.fromEntries(types.map((type) => [type.name, type])); + this.schemas = Object.fromEntries(schemas.map((schema) => [schema.name, schema])) + this.types = Object.fromEntries(types.map((type) => [type.name, type])) this.columns = columns .sort(({ name: a }, { name: b }) => a.localeCompare(b)) .reduce( @@ -84,221 +83,239 @@ class PythonContext { return acc }, {} as Record - ); - this.user_enums = Object.fromEntries(types - .filter((type) => type.enums.length > 0) - .map((type) => [type.name, new PythonEnum(type)])); + ) + this.user_enums = Object.fromEntries( + types.filter((type) => type.enums.length > 0).map((type) => [type.name, new PythonEnum(type)]) + ) } - resolveTypeName(name: string) : string { + resolveTypeName(name: string): string { if (name in this.user_enums) { - return this.user_enums[name].name; + return this.user_enums[name].name } if (name in PY_TYPE_MAP) { return PY_TYPE_MAP[name] } if (name in this.types) { - const type = this.types[name]; - const schema = type!.schema; - return `${formatForPyClassName(schema)}${formatForPyClassName(type.name)}`; + const type = this.types[name] + const schema = type!.schema + return `${formatForPyClassName(schema)}${formatForPyClassName(type.name)}` } - console.log(`Unknown recognized row type ${name}`); - return 'Any'; + console.log(`Unknown recognized row type ${name}`) + return 'Any' } - parsePgType(pg_type: string) : PythonType { + parsePgType(pg_type: string): PythonType { if (pg_type.startsWith('_')) { - const inner_str = pg_type.slice(1); - const inner = this.parsePgType(inner_str); - return new PythonListType(inner); + const inner_str = pg_type.slice(1) + const inner = this.parsePgType(inner_str) + return new PythonListType(inner) } else { - const type_name = this.resolveTypeName(pg_type); - return new PythonSimpleType(type_name); + const type_name = this.resolveTypeName(pg_type) + return new PythonSimpleType(type_name) } } - typeToClass(type: PostgresType) : PythonBaseModel { - const types = Object.values(this.types); + typeToClass(type: PostgresType): PythonBaseModel { + const types = Object.values(this.types) const attributes = type.attributes.map((attribute) => { const type = types.find((type) => type.id === attribute.type_id) return { ...attribute, type, } - }); - const attributeEntries: PythonBaseModelAttr[] = attributes - .map((attribute) => { - const type = this.parsePgType(attribute.type!.name); - return new PythonBaseModelAttr(attribute.name, type, false); - }); - - const schema = this.schemas[type.schema]; - return new PythonBaseModel(type.name, schema, attributeEntries); + }) + const attributeEntries: PythonBaseModelAttr[] = attributes.map((attribute) => { + const type = this.parsePgType(attribute.type!.name) + return new PythonBaseModelAttr(attribute.name, type, false) + }) + + const schema = this.schemas[type.schema] + return new PythonBaseModel(type.name, schema, attributeEntries) } - columnsToClassAttrs(table_id: number) : PythonBaseModelAttr[] { - const attrs = this.columns[table_id] ?? []; + columnsToClassAttrs(table_id: number): PythonBaseModelAttr[] { + const attrs = this.columns[table_id] ?? [] return attrs.map((col) => { - const type = this.parsePgType(col.format); - return new PythonBaseModelAttr(col.name, type, col.is_nullable); - }); + const type = this.parsePgType(col.format) + return new PythonBaseModelAttr(col.name, type, col.is_nullable) + }) } - columnsToDictAttrs(table_id: number, not_required: boolean) : PythonTypedDictAttr[] { - const attrs = this.columns[table_id] ?? []; + columnsToDictAttrs(table_id: number, not_required: boolean): PythonTypedDictAttr[] { + const attrs = this.columns[table_id] ?? [] return attrs.map((col) => { - const type = this.parsePgType(col.format); - return new PythonTypedDictAttr(col.name, type, col.is_nullable, not_required || col.is_nullable || col.is_identity || (col.default_value !== null)); - }); + const type = this.parsePgType(col.format) + return new PythonTypedDictAttr( + col.name, + type, + col.is_nullable, + not_required || col.is_nullable || col.is_identity || col.default_value !== null + ) + }) } - tableToClass(table: PostgresTable) : [PythonBaseModel, PythonTypedDict, PythonTypedDict] { - const schema = this.schemas[table.schema]; - const select = new PythonBaseModel(table.name, schema, this.columnsToClassAttrs(table.id)); - const insert = new PythonTypedDict(table.name, "Insert", schema, this.columnsToDictAttrs(table.id, false)); - const update = new PythonTypedDict(table.name, "Update", schema, this.columnsToDictAttrs(table.id, true)); - return [select, insert, update]; + tableToClass(table: PostgresTable): [PythonBaseModel, PythonTypedDict, PythonTypedDict] { + const schema = this.schemas[table.schema] + const select = new PythonBaseModel(table.name, schema, this.columnsToClassAttrs(table.id)) + const insert = new PythonTypedDict( + table.name, + 'Insert', + schema, + this.columnsToDictAttrs(table.id, false) + ) + const update = new PythonTypedDict( + table.name, + 'Update', + schema, + this.columnsToDictAttrs(table.id, true) + ) + return [select, insert, update] } - - viewToClass(view: PostgresView) : PythonBaseModel { - const attributes = this.columnsToClassAttrs(view.id); + viewToClass(view: PostgresView): PythonBaseModel { + const attributes = this.columnsToClassAttrs(view.id) return new PythonBaseModel(view.name, this.schemas[view.schema], attributes) } - matViewToClass(matview: PostgresMaterializedView) : PythonBaseModel { - const attributes = this.columnsToClassAttrs(matview.id); + matViewToClass(matview: PostgresMaterializedView): PythonBaseModel { + const attributes = this.columnsToClassAttrs(matview.id) return new PythonBaseModel(matview.name, this.schemas[matview.schema], attributes) } } - class PythonEnum implements Serializable { - name: string; - variants: string[]; + name: string + variants: string[] constructor(type: PostgresType) { - this.name = `${formatForPyClassName(type.schema)}${formatForPyClassName(type.name)}`; - this.variants = type.enums; + this.name = `${formatForPyClassName(type.schema)}${formatForPyClassName(type.name)}` + this.variants = type.enums } serialize(): string { - const variants = this.variants.map((item) => `"${item}"`).join(', '); - return `${this.name}: TypeAlias = Literal[${variants}]`; + const variants = this.variants.map((item) => `"${item}"`).join(', ') + return `${this.name}: TypeAlias = Literal[${variants}]` } } -type PythonType = PythonListType | PythonSimpleType; +type PythonType = PythonListType | PythonSimpleType class PythonSimpleType implements Serializable { - name: string; + name: string constructor(name: string) { - this.name = name; + this.name = name } - serialize() : string { - return this.name; + serialize(): string { + return this.name } } class PythonListType implements Serializable { - inner: PythonType; + inner: PythonType constructor(inner: PythonType) { - this.inner = inner; + this.inner = inner + } + serialize(): string { + return `List[${this.inner.serialize()}]` } - serialize() : string { - return `List[${this.inner.serialize()}]`; - } } class PythonBaseModelAttr implements Serializable { - name: string; - pg_name: string; - py_type: PythonType; - nullable: boolean; - + name: string + pg_name: string + py_type: PythonType + nullable: boolean + constructor(name: string, py_type: PythonType, nullable: boolean) { - this.name = formatForPyAttributeName(name); - this.pg_name = name; - this.py_type = py_type; - this.nullable = nullable; + this.name = formatForPyAttributeName(name) + this.pg_name = name + this.py_type = py_type + this.nullable = nullable } serialize(): string { const py_type = this.nullable ? `Optional[${this.py_type.serialize()}]` - : this.py_type.serialize(); + : this.py_type.serialize() return ` ${this.name}: ${py_type} = Field(alias="${this.pg_name}")` } } class PythonBaseModel implements Serializable { - name: string; - table_name: string; - schema: PostgresSchema; - class_attributes: PythonBaseModelAttr[]; - + name: string + table_name: string + schema: PostgresSchema + class_attributes: PythonBaseModelAttr[] + constructor(name: string, schema: PostgresSchema, class_attributes: PythonBaseModelAttr[]) { - this.schema = schema; - this.class_attributes = class_attributes; - this.table_name = name; - this.name = `${formatForPyClassName(schema.name)}${formatForPyClassName(name)}`; + this.schema = schema + this.class_attributes = class_attributes + this.table_name = name + this.name = `${formatForPyClassName(schema.name)}${formatForPyClassName(name)}` } serialize(): string { - const attributes = this.class_attributes.length > 0 - ? this.class_attributes.map((attr) => attr.serialize()).join('\n') - : " pass"; - return `class ${this.name}(BaseModel):\n${attributes}`; + const attributes = + this.class_attributes.length > 0 + ? this.class_attributes.map((attr) => attr.serialize()).join('\n') + : ' pass' + return `class ${this.name}(BaseModel):\n${attributes}` } } class PythonTypedDictAttr implements Serializable { - name: string; - pg_name: string; - py_type: PythonType; - nullable: boolean; - not_required: boolean; + name: string + pg_name: string + py_type: PythonType + nullable: boolean + not_required: boolean constructor(name: string, py_type: PythonType, nullable: boolean, required: boolean) { - this.name = formatForPyAttributeName(name); - this.pg_name = name; - this.py_type = py_type; - this.nullable = nullable; - this.not_required = required; + this.name = formatForPyAttributeName(name) + this.pg_name = name + this.py_type = py_type + this.nullable = nullable + this.not_required = required } serialize(): string { - const annotation = `Annotated[${this.py_type.serialize()}, Field(alias="${this.pg_name}")]`; - const rhs = this.not_required - ? `NotRequired[${annotation}]` - : annotation; - return ` ${this.name}: ${rhs}`; + const annotation = `Annotated[${this.py_type.serialize()}, Field(alias="${this.pg_name}")]` + const rhs = this.not_required ? `NotRequired[${annotation}]` : annotation + return ` ${this.name}: ${rhs}` } } class PythonTypedDict implements Serializable { - name: string; - table_name: string; - parent_class: string; - schema: PostgresSchema; - dict_attributes: PythonTypedDictAttr[]; - operation: "Insert" | "Update"; - - constructor(name: string, operation: "Insert" | "Update", schema: PostgresSchema, dict_attributes: PythonTypedDictAttr[], parent_class: string="BaseModel") { - this.schema = schema; - this.dict_attributes = dict_attributes; - this.table_name = name; - this.name = `${formatForPyClassName(schema.name)}${formatForPyClassName(name)}`; - this.parent_class = parent_class; - this.operation = operation; + name: string + table_name: string + parent_class: string + schema: PostgresSchema + dict_attributes: PythonTypedDictAttr[] + operation: 'Insert' | 'Update' + + constructor( + name: string, + operation: 'Insert' | 'Update', + schema: PostgresSchema, + dict_attributes: PythonTypedDictAttr[], + parent_class: string = 'BaseModel' + ) { + this.schema = schema + this.dict_attributes = dict_attributes + this.table_name = name + this.name = `${formatForPyClassName(schema.name)}${formatForPyClassName(name)}` + this.parent_class = parent_class + this.operation = operation } serialize(): string { - const attributes = this.dict_attributes.length > 0 - ? this.dict_attributes.map((attr) => attr.serialize()).join('\n') - : " pass"; - return `class ${this.name}${this.operation}(TypedDict):\n${attributes}`; + const attributes = + this.dict_attributes.length > 0 + ? this.dict_attributes.map((attr) => attr.serialize()).join('\n') + : ' pass' + return `class ${this.name}${this.operation}(TypedDict):\n${attributes}` } } function concatLines(items: Serializable[]): string { - return items.map((item) => item.serialize()).join('\n\n'); + return items.map((item) => item.serialize()).join('\n\n') } const PY_TYPE_MAP: Record = { @@ -381,9 +398,8 @@ function formatForPyClassName(name: string): string { * ``` */ function formatForPyAttributeName(name: string): string { - return name + return name .split(/[^a-zA-Z0-9]+/) // Split on non-alphanumeric characters (like spaces, dashes, etc.) - .map(word => word.toLowerCase()) // Convert each word to lowercase - .join('_'); // Join with underscores + .map((word) => word.toLowerCase()) // Convert each word to lowercase + .join('_') // Join with underscores } - diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 240a62a3..46079de0 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -6249,7 +6249,6 @@ test('typegen: swift w/ public access control', async () => { `) }) - test('typegen: python', async () => { const { body } = await app.inject({ method: 'GET', @@ -6500,4 +6499,5 @@ class PublicCompositeTypeWithArrayAttribute(BaseModel): class PublicCompositeTypeWithRecordAttribute(BaseModel): todo: PublicTodos = Field(alias="todo")" -`)}) +`) +}) From 859a5d646f9911fa15d554788bcc47c92d2cd94f Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Mon, 17 Nov 2025 11:49:32 -0300 Subject: [PATCH 53/72] fix: filter non-included schemas --- src/server/templates/python.ts | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts index 2ac80996..31243b66 100644 --- a/src/server/templates/python.ts +++ b/src/server/templates/python.ts @@ -24,10 +24,14 @@ export const apply = ({ return py_class_and_methods }) const composite_types = types - .filter((type) => type.attributes.length > 0) + .filter((type) => type.attributes.length > 0 && schemas.some((schema) => type.schema == schema.name)) .map((type) => ctx.typeToClass(type)) - const py_views = views.map((view) => ctx.viewToClass(view)) - const py_matviews = materializedViews.map((matview) => ctx.matViewToClass(matview)) + const py_views = views + .filter((view) => schemas.some((schema) => schema.name === view.schema)) + .map((view) => ctx.viewToClass(view)) + const py_matviews = materializedViews + .filter((matview) => schemas.some((schema) => schema.name === matview.schema)) + .map((matview) => ctx.matViewToClass(matview)) let output = ` from __future__ import annotations From 55592859693ca7e2afea41e59c87b8c6602b9415 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Mon, 17 Nov 2025 11:52:21 -0300 Subject: [PATCH 54/72] fix: add x-application-name header, add python to server.ts --- src/server/routes/generators/python.ts | 10 ++++------ src/server/server.ts | 3 +++ 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/server/routes/generators/python.ts b/src/server/routes/generators/python.ts index 51385373..706d9dd4 100644 --- a/src/server/routes/generators/python.ts +++ b/src/server/routes/generators/python.ts @@ -1,25 +1,23 @@ import type { FastifyInstance } from 'fastify' import { PostgresMeta } from '../../../lib/index.js' -import { DEFAULT_POOL_CONFIG } from '../../constants.js' -import { extractRequestForLogging } from '../../utils.js' +import { createConnectionConfig, extractRequestForLogging } from '../../utils.js' import { apply as applyPyTemplate } from '../../templates/python.js' import { getGeneratorMetadata } from '../../../lib/generators.js' export default async (fastify: FastifyInstance) => { fastify.get<{ - Headers: { pg: string } + Headers: { pg: string; 'x-pg-application-name'?: string } Querystring: { excluded_schemas?: string included_schemas?: string } }>('/', async (request, reply) => { - const connectionString = request.headers.pg + const config = createConnectionConfig(request) const excludedSchemas = request.query.excluded_schemas?.split(',').map((schema) => schema.trim()) ?? [] const includedSchemas = request.query.included_schemas?.split(',').map((schema) => schema.trim()) ?? [] - - const pgMeta: PostgresMeta = new PostgresMeta({ ...DEFAULT_POOL_CONFIG, connectionString }) + const pgMeta: PostgresMeta = new PostgresMeta(config) const { data: generatorMeta, error: generatorMetaError } = await getGeneratorMetadata(pgMeta, { includedSchemas, excludedSchemas, diff --git a/src/server/server.ts b/src/server/server.ts index 8b7c1c10..68fbb54c 100644 --- a/src/server/server.ts +++ b/src/server/server.ts @@ -18,6 +18,7 @@ import { import { apply as applyTypescriptTemplate } from './templates/typescript.js' import { apply as applyGoTemplate } from './templates/go.js' import { apply as applySwiftTemplate } from './templates/swift.js' +import { apply as applyPythonTemplate } from './templates/python.js' const logger = pino({ formatters: { @@ -143,6 +144,8 @@ async function getTypeOutput(): Promise { }) case 'go': return applyGoTemplate(config) + case 'python': + return applyPythonTemplate(config) default: throw new Error(`Unsupported language for GENERATE_TYPES: ${GENERATE_TYPES}`) } From 2f6ad575b38fd7aff709e250b7a867d6942122b1 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Mon, 17 Nov 2025 11:54:22 -0300 Subject: [PATCH 55/72] chore: remove console.log --- src/server/templates/python.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts index 31243b66..5d980db7 100644 --- a/src/server/templates/python.ts +++ b/src/server/templates/python.ts @@ -105,7 +105,6 @@ class PythonContext { const schema = type!.schema return `${formatForPyClassName(schema)}${formatForPyClassName(type.name)}` } - console.log(`Unknown recognized row type ${name}`) return 'Any' } From 0404a45c5509bc29d715509f9256fb775397dad7 Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Mon, 17 Nov 2025 12:00:24 -0300 Subject: [PATCH 56/72] chore: run prettier --- src/server/templates/python.ts | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts index 5d980db7..dcfdcd0d 100644 --- a/src/server/templates/python.ts +++ b/src/server/templates/python.ts @@ -24,7 +24,9 @@ export const apply = ({ return py_class_and_methods }) const composite_types = types - .filter((type) => type.attributes.length > 0 && schemas.some((schema) => type.schema == schema.name)) + .filter( + (type) => type.attributes.length > 0 && schemas.some((schema) => type.schema == schema.name) + ) .map((type) => ctx.typeToClass(type)) const py_views = views .filter((view) => schemas.some((schema) => schema.name === view.schema)) From 418b37175a190d246413f32ba6e0b0d6e3920afd Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Mon, 17 Nov 2025 13:32:45 -0300 Subject: [PATCH 57/72] feat: add gen python types command to package.json --- package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/package.json b/package.json index b00d0ae6..b39c62c0 100644 --- a/package.json +++ b/package.json @@ -25,6 +25,7 @@ "gen:types:typescript": "PG_META_GENERATE_TYPES=typescript node --loader ts-node/esm src/server/server.ts", "gen:types:go": "PG_META_GENERATE_TYPES=go node --loader ts-node/esm src/server/server.ts", "gen:types:swift": "PG_META_GENERATE_TYPES=swift node --loader ts-node/esm src/server/server.ts", + "gen:types:python": "PG_META_GENERATE_TYPES=python node --loader ts-node/esm src/server/server.ts", "start": "node dist/server/server.js", "dev": "trap 'npm run db:clean' INT && run-s db:clean db:run && run-s dev:code", "dev:code": "nodemon --exec node --loader ts-node/esm src/server/server.ts | pino-pretty --colorize", From edc495dead688cfa3d777ec5bb385b4bc54b595a Mon Sep 17 00:00:00 2001 From: Leonardo Santiago Date: Mon, 17 Nov 2025 15:59:28 -0300 Subject: [PATCH 58/72] fix: accept table names starting with _ --- src/server/templates/python.ts | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts index dcfdcd0d..100870b4 100644 --- a/src/server/templates/python.ts +++ b/src/server/templates/python.ts @@ -384,13 +384,19 @@ const PY_TYPE_MAP: Record = { * formatForPyTypeName('pokemon league') // PokemonLeague * ``` */ + function formatForPyClassName(name: string): string { return name .split(/[^a-zA-Z0-9]/) - .map((word) => `${word[0].toUpperCase()}${word.slice(1)}`) + .map((word) => { + if (word) { + return `${word[0].toUpperCase()}${word.slice(1)}` + } else { + return '' + } + }) .join('') } - /** * Converts a Postgres name to snake_case. * From af17401cb94cb54e1ebf6732d42c53cd84a59ae2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 18 Nov 2025 16:44:44 +0000 Subject: [PATCH 59/72] chore(deps): bump fastify from 4.29.0 to 4.29.1 (#1015) Bumps [fastify](https://github.com/fastify/fastify) from 4.29.0 to 4.29.1. - [Release notes](https://github.com/fastify/fastify/releases) - [Commits](https://github.com/fastify/fastify/compare/v4.29.0...v4.29.1) --- updated-dependencies: - dependency-name: fastify dependency-version: 4.29.1 dependency-type: direct:production ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- package-lock.json | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index bb25a75e..2aa40233 100644 --- a/package-lock.json +++ b/package-lock.json @@ -3261,9 +3261,9 @@ "license": "MIT" }, "node_modules/fastify": { - "version": "4.29.0", - "resolved": "https://registry.npmjs.org/fastify/-/fastify-4.29.0.tgz", - "integrity": "sha512-MaaUHUGcCgC8fXQDsDtioaCcag1fmPJ9j64vAKunqZF4aSub040ZGi/ag8NGE2714yREPOKZuHCfpPzuUD3UQQ==", + "version": "4.29.1", + "resolved": "https://registry.npmjs.org/fastify/-/fastify-4.29.1.tgz", + "integrity": "sha512-m2kMNHIG92tSNWv+Z3UeTR9AWLLuo7KctC7mlFPtMEVrfjIhmQhkQnT9v15qA/BfVq3vvj134Y0jl9SBje3jXQ==", "funding": [ { "type": "github", From b0478c3968a14b924d57da861fd87074a8616bf1 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Fri, 21 Nov 2025 12:22:42 +0100 Subject: [PATCH 60/72] fix(typegen): schemas filtering (#1016) --- src/server/templates/python.ts | 25 +++++------- test/server/typegen.ts | 74 ++++++++++++++++++++++++++++++++++ 2 files changed, 84 insertions(+), 15 deletions(-) diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts index 100870b4..fa2d5eef 100644 --- a/src/server/templates/python.ts +++ b/src/server/templates/python.ts @@ -17,23 +17,18 @@ export const apply = ({ types, }: GeneratorMetadata): string => { const ctx = new PythonContext(types, columns, schemas) - const py_tables = tables - .filter((table) => schemas.some((schema) => schema.name === table.schema)) - .flatMap((table) => { - const py_class_and_methods = ctx.tableToClass(table) - return py_class_and_methods - }) + // Used for efficient lookup of types by schema name + const schemasNames = new Set(schemas.map((schema) => schema.name)) + const py_tables = tables.flatMap((table) => { + const py_class_and_methods = ctx.tableToClass(table) + return py_class_and_methods + }) const composite_types = types - .filter( - (type) => type.attributes.length > 0 && schemas.some((schema) => type.schema == schema.name) - ) + // We always include system schemas, so we need to filter out types that are not in the included schemas + .filter((type) => type.attributes.length > 0 && schemasNames.has(type.schema)) .map((type) => ctx.typeToClass(type)) - const py_views = views - .filter((view) => schemas.some((schema) => schema.name === view.schema)) - .map((view) => ctx.viewToClass(view)) - const py_matviews = materializedViews - .filter((matview) => schemas.some((schema) => schema.name === matview.schema)) - .map((matview) => ctx.matViewToClass(matview)) + const py_views = views.map((view) => ctx.viewToClass(view)) + const py_matviews = materializedViews.map((matview) => ctx.matViewToClass(matview)) let output = ` from __future__ import annotations diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 46079de0..2005339e 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -6501,3 +6501,77 @@ class PublicCompositeTypeWithRecordAttribute(BaseModel): todo: PublicTodos = Field(alias="todo")" `) }) + +test('typegen: python w/ excluded/included schemas', async () => { + // Create a test schema with some tables + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + CREATE SCHEMA IF NOT EXISTS test_schema; + CREATE TABLE IF NOT EXISTS test_schema.test_table ( + id serial PRIMARY KEY, + name text + ); + CREATE TABLE IF NOT EXISTS test_schema.another_table ( + id serial PRIMARY KEY, + value text + ); + `, + }, + }) + + try { + // Test excluded_schemas - should exclude test_schema + const { body: excludedBody } = await app.inject({ + method: 'GET', + path: '/generators/python', + query: { access_control: 'public', excluded_schemas: 'test_schema' }, + }) + expect(excludedBody).not.toContain('TestSchemaTestTable') + expect(excludedBody).not.toContain('TestSchemaAnotherTable') + expect(excludedBody).toContain('PublicUsers') + expect(excludedBody).toContain('PublicTodos') + + // Test included_schemas - should only include test_schema + const { body: includedBody } = await app.inject({ + method: 'GET', + path: '/generators/python', + query: { access_control: 'public', included_schemas: 'test_schema' }, + }) + expect(includedBody).toContain('TestSchemaTestTable') + expect(includedBody).toContain('TestSchemaAnotherTable') + expect(includedBody).not.toContain('PublicUsers') + expect(includedBody).not.toContain('PublicTodos') + + // Test multiple excluded schemas + const { body: multipleExcludedBody } = await app.inject({ + method: 'GET', + path: '/generators/python', + query: { access_control: 'public', excluded_schemas: 'test_schema,public' }, + }) + expect(multipleExcludedBody).not.toContain('TestSchemaTestTable') + expect(multipleExcludedBody).not.toContain('PublicUsers') + + // // Test multiple included schemas + const { body: multipleIncludedBody } = await app.inject({ + method: 'GET', + path: '/generators/python', + query: { access_control: 'public', included_schemas: 'public,test_schema' }, + }) + expect(multipleIncludedBody).toContain('TestSchemaTestTable') + expect(multipleIncludedBody).toContain('PublicUsers') + } finally { + // Clean up test schema + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + DROP SCHEMA IF EXISTS test_schema CASCADE; + `, + }, + }) + } +}) From 0aad295131295c0613210522bf9595fb3ea99650 Mon Sep 17 00:00:00 2001 From: Bobbie Soedirgo Date: Tue, 2 Dec 2025 17:35:41 +0800 Subject: [PATCH 61/72] chore(deps): upgrade deps --- package-lock.json | 165 ++++++++++++++++++++++------------------------ 1 file changed, 79 insertions(+), 86 deletions(-) diff --git a/package-lock.json b/package-lock.json index 2aa40233..e8fe798a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -639,6 +639,29 @@ "@sinclair/typebox": ">=0.26 <=0.32" } }, + "node_modules/@isaacs/balanced-match": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz", + "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/@isaacs/brace-expansion": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz", + "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@isaacs/balanced-match": "^4.0.1" + }, + "engines": { + "node": "20 || >=22" + } + }, "node_modules/@isaacs/cliui": { "version": "8.0.2", "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", @@ -3599,6 +3622,30 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/glob": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-11.1.0.tgz", + "integrity": "sha512-vuNwKSaKiqm7g0THUBu2x7ckSs3XJLXE+2ssL7/MfTGPLLcrJQ/4Uq1CjPTtO5cCIiRxqvN6Twy1qOwhL0Xjcw==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "foreground-child": "^3.3.1", + "jackspeak": "^4.1.1", + "minimatch": "^10.1.1", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^2.0.0" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/glob-parent": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", @@ -3612,6 +3659,22 @@ "node": ">= 6" } }, + "node_modules/glob/node_modules/minimatch": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz", + "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/brace-expansion": "^5.0.0" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/globalthis": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz", @@ -4312,9 +4375,9 @@ } }, "node_modules/jackspeak": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.0.tgz", - "integrity": "sha512-9DDdhb5j6cpeitCbvLO7n7J4IxnbM6hoF6O1g4HQ5TfhvvKN8ywDM7668ZhMHRqVmxqhps/F6syWK2KcPxYlkw==", + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz", + "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==", "dev": true, "license": "BlueOak-1.0.0", "dependencies": { @@ -4565,6 +4628,16 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, "node_modules/mnemonist": { "version": "0.39.6", "resolved": "https://registry.npmjs.org/mnemonist/-/mnemonist-0.39.6.tgz", @@ -5078,16 +5151,6 @@ "node": "20 || >=22" } }, - "node_modules/path-scurry/node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/path-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/path-type/-/path-type-4.0.0.tgz", @@ -5888,66 +5951,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/rimraf/node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/rimraf/node_modules/glob": { - "version": "11.0.1", - "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.1.tgz", - "integrity": "sha512-zrQDm8XPnYEKawJScsnM0QzobJxlT/kHOOlRTio8IH/GrmxRE5fjllkzdaHclIuNjUQTJYH2xHNIGfdpJkDJUw==", - "dev": true, - "license": "ISC", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^4.0.1", - "minimatch": "^10.0.0", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^2.0.0" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/rimraf/node_modules/minimatch": { - "version": "10.0.1", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.1.tgz", - "integrity": "sha512-ethXTt3SGGR+95gudmqJ1eNhRO7eGEGIgYA9vnPatK4/etz2MEVDno5GMCibdMTuBMyElzIlgxMna3K94XDIDQ==", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/rimraf/node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/rollup": { "version": "4.37.0", "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.37.0.tgz", @@ -6627,9 +6630,9 @@ } }, "node_modules/test-exclude/node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "version": "10.5.0", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz", + "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==", "dev": true, "license": "ISC", "dependencies": { @@ -6686,16 +6689,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/test-exclude/node_modules/minipass": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", - "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, "node_modules/test-exclude/node_modules/path-scurry": { "version": "1.11.1", "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", From 9854d5ab4d815fc7a35215d2688c2297fd7c08ff Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Wed, 3 Dec 2025 13:13:12 +0100 Subject: [PATCH 62/72] fix(typegen): add better order stability for functions override (#1019) * fix(typegen): add better order stability for functions override * chore: update tests snapshots --- src/server/templates/typescript.ts | 6 +- test/server/typegen.ts | 391 +++++++++++++++++++---------- 2 files changed, 259 insertions(+), 138 deletions(-) diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 1b527686..75ca7de4 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -681,8 +681,10 @@ export type Database = { {} as Record ) for (const fnName in schemaFunctionsGroupedByName) { - schemaFunctionsGroupedByName[fnName].sort((a, b) => - b.fn.definition.localeCompare(a.fn.definition) + schemaFunctionsGroupedByName[fnName].sort( + (a, b) => + a.fn.argument_types.localeCompare(b.fn.argument_types) || + a.fn.return_type.localeCompare(b.fn.return_type) ) } diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 2005339e..852eddce 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -667,9 +667,7 @@ test('typegen: typescript', async () => { } get_single_user_summary_from_view: | { - Args: { - userview_row: Database["public"]["Views"]["users_view"]["Row"] - } + Args: { search_user_id: number } Returns: { todo_count: number | null todo_details: string[] | null @@ -678,7 +676,7 @@ test('typegen: typescript', async () => { user_status: Database["public"]["Enums"]["user_status"] | null } SetofOptions: { - from: "users_view" + from: "*" to: "user_todos_summary_view" isOneToOne: true isSetofReturn: true @@ -701,7 +699,9 @@ test('typegen: typescript', async () => { } } | { - Args: { search_user_id: number } + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } Returns: { todo_count: number | null todo_details: string[] | null @@ -710,7 +710,7 @@ test('typegen: typescript', async () => { user_status: Database["public"]["Enums"]["user_status"] | null } SetofOptions: { - from: "*" + from: "users_view" to: "user_todos_summary_view" isOneToOne: true isSetofReturn: true @@ -732,16 +732,14 @@ test('typegen: typescript', async () => { } get_todos_from_user: | { - Args: { - userview_row: Database["public"]["Views"]["users_view"]["Row"] - } + Args: { search_user_id: number } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "users_view" + from: "*" to: "todos" isOneToOne: false isSetofReturn: true @@ -762,14 +760,16 @@ test('typegen: typescript', async () => { } } | { - Args: { search_user_id: number } + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "*" + from: "users_view" to: "todos" isOneToOne: false isSetofReturn: true @@ -777,28 +777,28 @@ test('typegen: typescript', async () => { } get_todos_setof_rows: | { - Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "users" + from: "todos" to: "todos" isOneToOne: false isSetofReturn: true } } | { - Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "todos" + from: "users" to: "todos" isOneToOne: false isSetofReturn: true @@ -865,32 +865,34 @@ test('typegen: typescript', async () => { } postgres_fdw_handler: { Args: never; Returns: unknown } postgrest_resolvable_with_override_function: + | { Args: never; Returns: undefined } | { Args: { a: string }; Returns: number } + | { Args: { b: number }; Returns: string } | { - Args: { user_id: number } + Args: { completed: boolean; todo_id: number } Returns: { - decimal: number | null + details: string | null id: number - name: string | null - status: Database["public"]["Enums"]["user_status"] | null + "user-id": number }[] SetofOptions: { from: "*" - to: "users" + to: "todos" isOneToOne: false isSetofReturn: true } } | { - Args: { completed: boolean; todo_id: number } + Args: { user_id: number } Returns: { - details: string | null + decimal: number | null id: number - "user-id": number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null }[] SetofOptions: { from: "*" - to: "todos" + to: "users" isOneToOne: false isSetofReturn: true } @@ -909,22 +911,20 @@ test('typegen: typescript', async () => { isSetofReturn: true } } - | { Args: { b: number }; Returns: string } - | { Args: never; Returns: undefined } postgrest_unresolvable_function: + | { Args: never; Returns: undefined } | { - Args: { a: string } + Args: { a: number } Returns: { error: true } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" } | { - Args: { a: number } + Args: { a: string } Returns: { error: true } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" } - | { Args: never; Returns: undefined } search_todos_by_details: { Args: { search_details: string } Returns: { @@ -958,28 +958,28 @@ test('typegen: typescript', async () => { } test_unnamed_row_setof: | { - Args: { user_id: number } + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "*" + from: "todos" to: "todos" isOneToOne: false isSetofReturn: true } } | { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Args: { user_id: number } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "todos" + from: "*" to: "todos" isOneToOne: false isSetofReturn: true @@ -1854,9 +1854,7 @@ test('typegen w/ one-to-one relationships', async () => { } get_single_user_summary_from_view: | { - Args: { - userview_row: Database["public"]["Views"]["users_view"]["Row"] - } + Args: { search_user_id: number } Returns: { todo_count: number | null todo_details: string[] | null @@ -1865,7 +1863,7 @@ test('typegen w/ one-to-one relationships', async () => { user_status: Database["public"]["Enums"]["user_status"] | null } SetofOptions: { - from: "users_view" + from: "*" to: "user_todos_summary_view" isOneToOne: true isSetofReturn: true @@ -1888,7 +1886,9 @@ test('typegen w/ one-to-one relationships', async () => { } } | { - Args: { search_user_id: number } + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } Returns: { todo_count: number | null todo_details: string[] | null @@ -1897,7 +1897,7 @@ test('typegen w/ one-to-one relationships', async () => { user_status: Database["public"]["Enums"]["user_status"] | null } SetofOptions: { - from: "*" + from: "users_view" to: "user_todos_summary_view" isOneToOne: true isSetofReturn: true @@ -1919,16 +1919,14 @@ test('typegen w/ one-to-one relationships', async () => { } get_todos_from_user: | { - Args: { - userview_row: Database["public"]["Views"]["users_view"]["Row"] - } + Args: { search_user_id: number } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "users_view" + from: "*" to: "todos" isOneToOne: false isSetofReturn: true @@ -1949,14 +1947,16 @@ test('typegen w/ one-to-one relationships', async () => { } } | { - Args: { search_user_id: number } + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "*" + from: "users_view" to: "todos" isOneToOne: false isSetofReturn: true @@ -1964,28 +1964,28 @@ test('typegen w/ one-to-one relationships', async () => { } get_todos_setof_rows: | { - Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "users" + from: "todos" to: "todos" isOneToOne: false isSetofReturn: true } } | { - Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "todos" + from: "users" to: "todos" isOneToOne: false isSetofReturn: true @@ -2052,32 +2052,34 @@ test('typegen w/ one-to-one relationships', async () => { } postgres_fdw_handler: { Args: never; Returns: unknown } postgrest_resolvable_with_override_function: + | { Args: never; Returns: undefined } | { Args: { a: string }; Returns: number } + | { Args: { b: number }; Returns: string } | { - Args: { user_id: number } + Args: { completed: boolean; todo_id: number } Returns: { - decimal: number | null + details: string | null id: number - name: string | null - status: Database["public"]["Enums"]["user_status"] | null + "user-id": number }[] SetofOptions: { from: "*" - to: "users" + to: "todos" isOneToOne: false isSetofReturn: true } } | { - Args: { completed: boolean; todo_id: number } + Args: { user_id: number } Returns: { - details: string | null + decimal: number | null id: number - "user-id": number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null }[] SetofOptions: { from: "*" - to: "todos" + to: "users" isOneToOne: false isSetofReturn: true } @@ -2096,22 +2098,20 @@ test('typegen w/ one-to-one relationships', async () => { isSetofReturn: true } } - | { Args: { b: number }; Returns: string } - | { Args: never; Returns: undefined } postgrest_unresolvable_function: + | { Args: never; Returns: undefined } | { - Args: { a: string } + Args: { a: number } Returns: { error: true } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" } | { - Args: { a: number } + Args: { a: string } Returns: { error: true } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" } - | { Args: never; Returns: undefined } search_todos_by_details: { Args: { search_details: string } Returns: { @@ -2145,28 +2145,28 @@ test('typegen w/ one-to-one relationships', async () => { } test_unnamed_row_setof: | { - Args: { user_id: number } + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "*" + from: "todos" to: "todos" isOneToOne: false isSetofReturn: true } } | { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Args: { user_id: number } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "todos" + from: "*" to: "todos" isOneToOne: false isSetofReturn: true @@ -3041,9 +3041,7 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } get_single_user_summary_from_view: | { - Args: { - userview_row: Database["public"]["Views"]["users_view"]["Row"] - } + Args: { search_user_id: number } Returns: { todo_count: number | null todo_details: string[] | null @@ -3052,7 +3050,7 @@ test('typegen: typescript w/ one-to-one relationships', async () => { user_status: Database["public"]["Enums"]["user_status"] | null } SetofOptions: { - from: "users_view" + from: "*" to: "user_todos_summary_view" isOneToOne: true isSetofReturn: true @@ -3075,7 +3073,9 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } } | { - Args: { search_user_id: number } + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } Returns: { todo_count: number | null todo_details: string[] | null @@ -3084,7 +3084,7 @@ test('typegen: typescript w/ one-to-one relationships', async () => { user_status: Database["public"]["Enums"]["user_status"] | null } SetofOptions: { - from: "*" + from: "users_view" to: "user_todos_summary_view" isOneToOne: true isSetofReturn: true @@ -3106,16 +3106,14 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } get_todos_from_user: | { - Args: { - userview_row: Database["public"]["Views"]["users_view"]["Row"] - } + Args: { search_user_id: number } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "users_view" + from: "*" to: "todos" isOneToOne: false isSetofReturn: true @@ -3136,14 +3134,16 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } } | { - Args: { search_user_id: number } + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "*" + from: "users_view" to: "todos" isOneToOne: false isSetofReturn: true @@ -3151,28 +3151,28 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } get_todos_setof_rows: | { - Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "users" + from: "todos" to: "todos" isOneToOne: false isSetofReturn: true } } | { - Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "todos" + from: "users" to: "todos" isOneToOne: false isSetofReturn: true @@ -3239,32 +3239,34 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } postgres_fdw_handler: { Args: never; Returns: unknown } postgrest_resolvable_with_override_function: + | { Args: never; Returns: undefined } | { Args: { a: string }; Returns: number } + | { Args: { b: number }; Returns: string } | { - Args: { user_id: number } + Args: { completed: boolean; todo_id: number } Returns: { - decimal: number | null + details: string | null id: number - name: string | null - status: Database["public"]["Enums"]["user_status"] | null + "user-id": number }[] SetofOptions: { from: "*" - to: "users" + to: "todos" isOneToOne: false isSetofReturn: true } } | { - Args: { completed: boolean; todo_id: number } + Args: { user_id: number } Returns: { - details: string | null + decimal: number | null id: number - "user-id": number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null }[] SetofOptions: { from: "*" - to: "todos" + to: "users" isOneToOne: false isSetofReturn: true } @@ -3283,22 +3285,20 @@ test('typegen: typescript w/ one-to-one relationships', async () => { isSetofReturn: true } } - | { Args: { b: number }; Returns: string } - | { Args: never; Returns: undefined } postgrest_unresolvable_function: + | { Args: never; Returns: undefined } | { - Args: { a: string } + Args: { a: number } Returns: { error: true } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" } | { - Args: { a: number } + Args: { a: string } Returns: { error: true } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" } - | { Args: never; Returns: undefined } search_todos_by_details: { Args: { search_details: string } Returns: { @@ -3332,28 +3332,28 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } test_unnamed_row_setof: | { - Args: { user_id: number } + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "*" + from: "todos" to: "todos" isOneToOne: false isSetofReturn: true } } | { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Args: { user_id: number } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "todos" + from: "*" to: "todos" isOneToOne: false isSetofReturn: true @@ -4233,9 +4233,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { } get_single_user_summary_from_view: | { - Args: { - userview_row: Database["public"]["Views"]["users_view"]["Row"] - } + Args: { search_user_id: number } Returns: { todo_count: number | null todo_details: string[] | null @@ -4244,7 +4242,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { user_status: Database["public"]["Enums"]["user_status"] | null } SetofOptions: { - from: "users_view" + from: "*" to: "user_todos_summary_view" isOneToOne: true isSetofReturn: true @@ -4267,7 +4265,9 @@ test('typegen: typescript w/ postgrestVersion', async () => { } } | { - Args: { search_user_id: number } + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } Returns: { todo_count: number | null todo_details: string[] | null @@ -4276,7 +4276,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { user_status: Database["public"]["Enums"]["user_status"] | null } SetofOptions: { - from: "*" + from: "users_view" to: "user_todos_summary_view" isOneToOne: true isSetofReturn: true @@ -4298,16 +4298,14 @@ test('typegen: typescript w/ postgrestVersion', async () => { } get_todos_from_user: | { - Args: { - userview_row: Database["public"]["Views"]["users_view"]["Row"] - } + Args: { search_user_id: number } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "users_view" + from: "*" to: "todos" isOneToOne: false isSetofReturn: true @@ -4328,14 +4326,16 @@ test('typegen: typescript w/ postgrestVersion', async () => { } } | { - Args: { search_user_id: number } + Args: { + userview_row: Database["public"]["Views"]["users_view"]["Row"] + } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "*" + from: "users_view" to: "todos" isOneToOne: false isSetofReturn: true @@ -4343,28 +4343,28 @@ test('typegen: typescript w/ postgrestVersion', async () => { } get_todos_setof_rows: | { - Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } + Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "users" + from: "todos" to: "todos" isOneToOne: false isSetofReturn: true } } | { - Args: { todo_row: Database["public"]["Tables"]["todos"]["Row"] } + Args: { user_row: Database["public"]["Tables"]["users"]["Row"] } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "todos" + from: "users" to: "todos" isOneToOne: false isSetofReturn: true @@ -4431,32 +4431,34 @@ test('typegen: typescript w/ postgrestVersion', async () => { } postgres_fdw_handler: { Args: never; Returns: unknown } postgrest_resolvable_with_override_function: + | { Args: never; Returns: undefined } | { Args: { a: string }; Returns: number } + | { Args: { b: number }; Returns: string } | { - Args: { user_id: number } + Args: { completed: boolean; todo_id: number } Returns: { - decimal: number | null + details: string | null id: number - name: string | null - status: Database["public"]["Enums"]["user_status"] | null + "user-id": number }[] SetofOptions: { from: "*" - to: "users" + to: "todos" isOneToOne: false isSetofReturn: true } } | { - Args: { completed: boolean; todo_id: number } + Args: { user_id: number } Returns: { - details: string | null + decimal: number | null id: number - "user-id": number + name: string | null + status: Database["public"]["Enums"]["user_status"] | null }[] SetofOptions: { from: "*" - to: "todos" + to: "users" isOneToOne: false isSetofReturn: true } @@ -4475,22 +4477,20 @@ test('typegen: typescript w/ postgrestVersion', async () => { isSetofReturn: true } } - | { Args: { b: number }; Returns: string } - | { Args: never; Returns: undefined } postgrest_unresolvable_function: + | { Args: never; Returns: undefined } | { - Args: { a: string } + Args: { a: number } Returns: { error: true } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" } | { - Args: { a: number } + Args: { a: string } Returns: { error: true } & "Could not choose the best candidate function between: public.postgrest_unresolvable_function(a => int4), public.postgrest_unresolvable_function(a => text). Try renaming the parameters or the function itself in the database so function overloading can be resolved" } - | { Args: never; Returns: undefined } search_todos_by_details: { Args: { search_details: string } Returns: { @@ -4524,28 +4524,28 @@ test('typegen: typescript w/ postgrestVersion', async () => { } test_unnamed_row_setof: | { - Args: { user_id: number } + Args: { "": Database["public"]["Tables"]["todos"]["Row"] } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "*" + from: "todos" to: "todos" isOneToOne: false isSetofReturn: true } } | { - Args: { "": Database["public"]["Tables"]["todos"]["Row"] } + Args: { user_id: number } Returns: { details: string | null id: number "user-id": number }[] SetofOptions: { - from: "todos" + from: "*" to: "todos" isOneToOne: false isSetofReturn: true @@ -4992,6 +4992,125 @@ test('typegen: typescript consistent types definitions orders', async () => { expect(firstCall).toEqual(secondCall) }) +test('typegen: typescript function override order stability', async () => { + // Helper function to clean up test entities + const cleanupTestEntities = async () => { + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + -- Drop functions with all possible signatures + DROP FUNCTION IF EXISTS test_func_override(integer, text) CASCADE; + DROP FUNCTION IF EXISTS test_func_override(text, integer) CASCADE; + DROP FUNCTION IF EXISTS test_func_override(boolean, integer, text) CASCADE; + DROP FUNCTION IF EXISTS test_func_override(text, boolean) CASCADE; + `, + }, + }) + } + + // Clean up any existing test entities + await cleanupTestEntities() + + // === FIRST ROUND: Create function overrides in order 1 === + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + -- Create function overrides in specific order + CREATE FUNCTION test_func_override(param_a integer, param_b text) + RETURNS integer AS 'SELECT param_a + 1' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_override(param_a text, param_b integer) + RETURNS text AS 'SELECT param_a || param_b::text' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_override(param_a boolean, param_b integer, param_c text) + RETURNS boolean AS 'SELECT param_a' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_override(param_a text, param_b boolean) + RETURNS text AS 'SELECT CASE WHEN param_b THEN param_a ELSE '''' END' LANGUAGE sql IMMUTABLE; + `, + }, + }) + + // Generate types for first configuration + const { body: firstCall } = await app.inject({ + method: 'GET', + path: '/generators/typescript', + query: { detect_one_to_one_relationships: 'true', postgrest_version: '13' }, + }) + + // === SECOND ROUND: Modify function definitions without changing signatures === + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + -- Modify function definitions (using CREATE OR REPLACE) + -- This should preserve the order + CREATE OR REPLACE FUNCTION test_func_override(param_a integer, param_b text) + RETURNS integer AS 'SELECT param_a + 100' LANGUAGE sql IMMUTABLE; + + CREATE OR REPLACE FUNCTION test_func_override(param_a text, param_b integer) + RETURNS text AS 'SELECT param_a || ''_'' || param_b::text' LANGUAGE sql IMMUTABLE; + + CREATE OR REPLACE FUNCTION test_func_override(param_a boolean, param_b integer, param_c text) + RETURNS boolean AS 'SELECT NOT param_a' LANGUAGE sql IMMUTABLE; + + CREATE OR REPLACE FUNCTION test_func_override(param_a text, param_b boolean) + RETURNS text AS 'SELECT CASE WHEN param_b THEN param_a || ''_true'' ELSE ''false'' END' LANGUAGE sql IMMUTABLE; + `, + }, + }) + + // Generate types for second configuration (after modifying definitions) + const { body: secondCall } = await app.inject({ + method: 'GET', + path: '/generators/typescript', + query: { detect_one_to_one_relationships: 'true', postgrest_version: '13' }, + }) + + // === THIRD ROUND: Drop and recreate in different order === + await cleanupTestEntities() + + // Create functions in reverse order + await app.inject({ + method: 'POST', + path: '/query', + payload: { + query: ` + -- Create function overrides in reverse order + CREATE FUNCTION test_func_override(param_a text, param_b boolean) + RETURNS text AS 'SELECT CASE WHEN param_b THEN param_a ELSE '''' END' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_override(param_a boolean, param_b integer, param_c text) + RETURNS boolean AS 'SELECT param_a' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_override(param_a text, param_b integer) + RETURNS text AS 'SELECT param_a || param_b::text' LANGUAGE sql IMMUTABLE; + + CREATE FUNCTION test_func_override(param_a integer, param_b text) + RETURNS integer AS 'SELECT param_a + 1' LANGUAGE sql IMMUTABLE; + `, + }, + }) + + // Generate types for third configuration (recreated in different order) + const { body: thirdCall } = await app.inject({ + method: 'GET', + path: '/generators/typescript', + query: { detect_one_to_one_relationships: 'true', postgrest_version: '13' }, + }) + + // Clean up test entities + await cleanupTestEntities() + + expect(firstCall).toEqual(secondCall) + expect(secondCall).toEqual(thirdCall) +}) + test('typegen: go', async () => { const { body } = await app.inject({ method: 'GET', path: '/generators/go' }) expect(body).toMatchInlineSnapshot(` From 0c0e7c0e41d35ff2ea1ad15ba0c6787695b6fa3b Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Wed, 3 Dec 2025 16:50:20 +0100 Subject: [PATCH 63/72] fix(release): npm release workflow (#1022) --- package.json | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index b39c62c0..58b95839 100644 --- a/package.json +++ b/package.json @@ -15,7 +15,9 @@ "imports": { "#package.json": "./package.json" }, - "repository": "supabase/postgres-meta", + "repository": { + "url": "git+https://github.com/supabase/postgres-meta.git" + }, "scripts": { "check": "tsc -p tsconfig.json --noEmit", "clean": "rimraf dist tsconfig.tsbuildinfo", From 6d3e35df3cce97f04c22f1bea3cb551dd895abcf Mon Sep 17 00:00:00 2001 From: Etienne Stalmans Date: Wed, 3 Dec 2025 18:19:43 +0100 Subject: [PATCH 64/72] fix: bump github action versions (#1023) support trusted publishing through using newer npm --- .github/workflows/release.yml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 39280fba..535d009e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -6,9 +6,6 @@ on: - master workflow_dispatch: -permissions: - contents: read - jobs: semantic-release: name: Release @@ -22,7 +19,7 @@ jobs: steps: - uses: actions/checkout@v5 - - uses: actions/setup-node@v4 + - uses: actions/setup-node@v6 with: node-version-file: '.nvmrc' @@ -31,7 +28,7 @@ jobs: npm run build - id: semantic-release - uses: cycjimmy/semantic-release-action@v5 + uses: cycjimmy/semantic-release-action@v6 with: semantic_version: 25.0.1 # version with latest npm and support for trusted publishing env: From 803a77cf456e80bbb8e47f3e8782159410be1320 Mon Sep 17 00:00:00 2001 From: Etienne Stalmans Date: Thu, 4 Dec 2025 08:57:48 +0100 Subject: [PATCH 65/72] feat: bump ci (#1024) --- .github/workflows/release.yml | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 535d009e..71bc8f6c 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -9,7 +9,7 @@ on: jobs: semantic-release: name: Release - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 outputs: new-release-published: ${{ steps.semantic-release.outputs.new_release_published }} new-release-version: ${{ steps.semantic-release.outputs.new_release_version }} @@ -23,6 +23,9 @@ jobs: with: node-version-file: '.nvmrc' + - name: Update npm + run: npm install -g npm@latest + - run: | npm clean-install npm run build From 2c41dd9410b03a016d7ed5295efcc05b31baa13a Mon Sep 17 00:00:00 2001 From: Andrew Smith Date: Thu, 11 Dec 2025 12:42:37 +0000 Subject: [PATCH 66/72] fix(typegen): python type missing uuid import (#1027) * fix(typegen): python type missing uuid import * chore(tests): update python generator test --- src/server/templates/python.ts | 1 + test/server/typegen.ts | 1 + 2 files changed, 2 insertions(+) diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts index fa2d5eef..2d9459ca 100644 --- a/src/server/templates/python.ts +++ b/src/server/templates/python.ts @@ -34,6 +34,7 @@ export const apply = ({ from __future__ import annotations import datetime +import uuid from typing import ( Annotated, Any, diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 852eddce..4e720277 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -6378,6 +6378,7 @@ test('typegen: python', async () => { "from __future__ import annotations import datetime +import uuid from typing import ( Annotated, Any, From 185eac0e5dc74d21eec4675068ecaedaeecf7c77 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Fri, 12 Dec 2025 09:20:11 +0100 Subject: [PATCH 67/72] chore(typegen): add canary python types runtime (#1028) * chore(typegen): add canary python types runtime * chore(ci): test workflow on PR * chore: remove cache * Revert "fix(typegen): python type missing uuid import (#1027)" This reverts commit 2c41dd9410b03a016d7ed5295efcc05b31baa13a. * fix: add uuid to db * chore: use mypy for lint check * Reapply "fix(typegen): python type missing uuid import (#1027)" This reverts commit 0d74cdc827d2ab81b7982d12725d4dfedd319c6b. * chore: normalize uuid in tests snapshots --- .github/workflows/validate-python-types.yml | 71 +++ scripts/generate-python-types-test.ts | 44 ++ test/db/00-init.sql | 3 +- test/lib/functions.ts | 20 +- test/lib/tables.ts | 18 + test/lib/types.ts | 2 +- test/lib/views.ts | 24 +- test/server/indexes.ts | 6 +- test/server/query.ts | 9 +- test/server/typegen.ts | 586 +++++++++++--------- test/server/utils.ts | 26 + 11 files changed, 518 insertions(+), 291 deletions(-) create mode 100644 .github/workflows/validate-python-types.yml create mode 100644 scripts/generate-python-types-test.ts diff --git a/.github/workflows/validate-python-types.yml b/.github/workflows/validate-python-types.yml new file mode 100644 index 00000000..1e5809b1 --- /dev/null +++ b/.github/workflows/validate-python-types.yml @@ -0,0 +1,71 @@ +name: Validate Python Type Generation + +on: + push: + branches: [master] + pull_request: + branches: [master] + +jobs: + validate-python-types: + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Node.js + uses: actions/setup-node@v4 + with: + node-version: '20' + cache: 'npm' + + - name: Install dependencies + run: npm ci + + - name: Build project + run: npm run build + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.11' + + - name: Install Python dependencies + run: | + pip install pydantic mypy + + - name: Start test database + working-directory: test/db + run: | + docker compose up -d --wait + + - name: Wait for database to be ready + run: | + # Install PostgreSQL client for health check + sudo apt-get update && sudo apt-get install -y postgresql-client + until pg_isready -h localhost -p 5432 -U postgres; do + echo "Waiting for database..." + sleep 1 + done + echo "Database is ready!" + + - name: Generate Python types + id: generate-types + run: | + node --loader ts-node/esm scripts/generate-python-types-test.ts > generated_types.py + echo "Generated Python types (first 30 lines):" + head -30 generated_types.py + + - name: Validate Python types runtime + run: | + python -c "import generated_types; print('✓ Generated Python types are valid and can be imported')" + + - name: Validate Python types with mypy + run: | + mypy generated_types.py --strict + + - name: Cleanup + if: always() + working-directory: test/db + run: docker compose down diff --git a/scripts/generate-python-types-test.ts b/scripts/generate-python-types-test.ts new file mode 100644 index 00000000..45111f56 --- /dev/null +++ b/scripts/generate-python-types-test.ts @@ -0,0 +1,44 @@ +#!/usr/bin/env node + +/** + * Script to generate Python types for CI validation + * This script uses the test database setup to generate Python types + */ + +import { build } from '../src/server/app.js' + +const TEST_CONNECTION_STRING = 'postgresql://postgres:postgres@localhost:5432' + +async function generatePythonTypes() { + const app = build() + + try { + const response = await app.inject({ + method: 'GET', + url: '/generators/python', + headers: { + pg: TEST_CONNECTION_STRING, + }, + query: { + access_control: 'public', + }, + }) + + if (response.statusCode !== 200) { + console.error(`Failed to generate types: ${response.statusCode}`) + console.error(response.body) + process.exit(1) + } + + // Write to stdout so it can be captured + process.stdout.write(response.body) + } catch (error) { + console.error('Error generating Python types:', error) + process.exit(1) + } finally { + await app.close() + } +} + +generatePythonTypes() + diff --git a/test/db/00-init.sql b/test/db/00-init.sql index f2161591..8ddc77ba 100644 --- a/test/db/00-init.sql +++ b/test/db/00-init.sql @@ -9,7 +9,8 @@ CREATE TABLE public.users ( id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, name text, status user_status DEFAULT 'ACTIVE', - decimal numeric + decimal numeric, + user_uuid uuid DEFAULT gen_random_uuid() ); INSERT INTO public.users (name) diff --git a/test/lib/functions.ts b/test/lib/functions.ts index 9d6088b6..ce26e078 100644 --- a/test/lib/functions.ts +++ b/test/lib/functions.ts @@ -75,15 +75,15 @@ test('list set-returning function with single object limit', async () => { "definition": " SELECT * FROM public.users_audit WHERE user_id = user_row.id; ", - "id": 16506, + "id": 16507, "identity_argument_types": "user_row users", "is_set_returning_function": true, "language": "sql", "name": "get_user_audit_setof_single_row", "prorows": 1, "return_type": "SETOF users_audit", - "return_type_id": 16418, - "return_type_relation_id": 16416, + "return_type_id": 16419, + "return_type_relation_id": 16417, "schema": "public", "security_definer": false, }, @@ -118,15 +118,15 @@ test('list set-returning function with multiples definitions', async () => { "definition": " SELECT * FROM public.todos WHERE "user-id" = user_row.id; ", - "id": 16509, + "id": 16510, "identity_argument_types": "user_row users", "is_set_returning_function": true, "language": "sql", "name": "get_todos_setof_rows", "prorows": 1000, "return_type": "SETOF todos", - "return_type_id": 16404, - "return_type_relation_id": 16402, + "return_type_id": 16405, + "return_type_relation_id": 16403, "schema": "public", "security_definer": false, }, @@ -136,7 +136,7 @@ test('list set-returning function with multiples definitions', async () => { "has_default": false, "mode": "in", "name": "todo_row", - "type_id": 16404, + "type_id": 16405, }, ], "argument_types": "todo_row todos", @@ -153,15 +153,15 @@ test('list set-returning function with multiples definitions', async () => { "definition": " SELECT * FROM public.todos WHERE "user-id" = todo_row."user-id"; ", - "id": 16510, + "id": 16511, "identity_argument_types": "todo_row todos", "is_set_returning_function": true, "language": "sql", "name": "get_todos_setof_rows", "prorows": 1000, "return_type": "SETOF todos", - "return_type_id": 16404, - "return_type_relation_id": 16402, + "return_type_id": 16405, + "return_type_relation_id": 16403, "schema": "public", "security_definer": false, }, diff --git a/test/lib/tables.ts b/test/lib/tables.ts index 677204fc..a0dfbaad 100644 --- a/test/lib/tables.ts +++ b/test/lib/tables.ts @@ -117,6 +117,24 @@ test('list', async () => { "schema": "public", "table": "users", }, + { + "check": null, + "comment": null, + "data_type": "uuid", + "default_value": "gen_random_uuid()", + "enums": [], + "format": "uuid", + "identity_generation": null, + "is_generated": false, + "is_identity": false, + "is_nullable": true, + "is_unique": false, + "is_updatable": true, + "name": "user_uuid", + "ordinal_position": 5, + "schema": "public", + "table": "users", + }, ], "comment": null, "dead_rows_estimate": Any, diff --git a/test/lib/types.ts b/test/lib/types.ts index 349a1b80..b256697e 100644 --- a/test/lib/types.ts +++ b/test/lib/types.ts @@ -74,7 +74,7 @@ test('list types with include Table Types', async () => { "id": Any, "name": "todos", "schema": "public", - "type_relation_id": 16402, + "type_relation_id": 16403, } ` ) diff --git a/test/lib/views.ts b/test/lib/views.ts index 275eb4a3..e623e14b 100644 --- a/test/lib/views.ts +++ b/test/lib/views.ts @@ -15,7 +15,7 @@ test('list', async () => { "default_value": null, "enums": [], "format": "int8", - "id": "16423.1", + "id": "16424.1", "identity_generation": null, "is_generated": false, "is_identity": false, @@ -26,7 +26,7 @@ test('list', async () => { "ordinal_position": 1, "schema": "public", "table": "todos_view", - "table_id": 16423, + "table_id": 16424, }, { "check": null, @@ -35,7 +35,7 @@ test('list', async () => { "default_value": null, "enums": [], "format": "text", - "id": "16423.2", + "id": "16424.2", "identity_generation": null, "is_generated": false, "is_identity": false, @@ -46,7 +46,7 @@ test('list', async () => { "ordinal_position": 2, "schema": "public", "table": "todos_view", - "table_id": 16423, + "table_id": 16424, }, { "check": null, @@ -55,7 +55,7 @@ test('list', async () => { "default_value": null, "enums": [], "format": "int8", - "id": "16423.3", + "id": "16424.3", "identity_generation": null, "is_generated": false, "is_identity": false, @@ -66,7 +66,7 @@ test('list', async () => { "ordinal_position": 3, "schema": "public", "table": "todos_view", - "table_id": 16423, + "table_id": 16424, }, ], "comment": null, @@ -112,7 +112,7 @@ test('retrieve', async () => { "default_value": null, "enums": [], "format": "int8", - "id": "16423.1", + "id": "16424.1", "identity_generation": null, "is_generated": false, "is_identity": false, @@ -123,7 +123,7 @@ test('retrieve', async () => { "ordinal_position": 1, "schema": "public", "table": "todos_view", - "table_id": 16423, + "table_id": 16424, }, { "check": null, @@ -132,7 +132,7 @@ test('retrieve', async () => { "default_value": null, "enums": [], "format": "text", - "id": "16423.2", + "id": "16424.2", "identity_generation": null, "is_generated": false, "is_identity": false, @@ -143,7 +143,7 @@ test('retrieve', async () => { "ordinal_position": 2, "schema": "public", "table": "todos_view", - "table_id": 16423, + "table_id": 16424, }, { "check": null, @@ -152,7 +152,7 @@ test('retrieve', async () => { "default_value": null, "enums": [], "format": "int8", - "id": "16423.3", + "id": "16424.3", "identity_generation": null, "is_generated": false, "is_identity": false, @@ -163,7 +163,7 @@ test('retrieve', async () => { "ordinal_position": 3, "schema": "public", "table": "todos_view", - "table_id": 16423, + "table_id": 16424, }, ], "comment": null, diff --git a/test/server/indexes.ts b/test/server/indexes.ts index b3fb7f0c..1ad4d0a2 100644 --- a/test/server/indexes.ts +++ b/test/server/indexes.ts @@ -22,7 +22,7 @@ test('list indexes', async () => { 0, ], "comment": null, - "id": 16399, + "id": 16400, "index_attributes": [ { "attribute_name": "id", @@ -57,7 +57,7 @@ test('list indexes', async () => { }) test('retrieve index', async () => { - const res = await app.inject({ method: 'GET', path: '/indexes/16399' }) + const res = await app.inject({ method: 'GET', path: '/indexes/16400' }) const index = res.json() expect(index).toMatchInlineSnapshot( ` @@ -71,7 +71,7 @@ test('retrieve index', async () => { 0, ], "comment": null, - "id": 16399, + "id": 16400, "index_attributes": [ { "attribute_name": "id", diff --git a/test/server/query.ts b/test/server/query.ts index 9d6c0e1b..01f9cc92 100644 --- a/test/server/query.ts +++ b/test/server/query.ts @@ -1,5 +1,5 @@ import { expect, test } from 'vitest' -import { app } from './utils' +import { app, normalizeUuids } from './utils' test('query', async () => { const res = await app.inject({ @@ -7,19 +7,21 @@ test('query', async () => { path: '/query', payload: { query: 'SELECT * FROM users' }, }) - expect(res.json()).toMatchInlineSnapshot(` + expect(normalizeUuids(res.json())).toMatchInlineSnapshot(` [ { "decimal": null, "id": 1, "name": "Joe Bloggs", "status": "ACTIVE", + "user_uuid": "00000000-0000-0000-0000-000000000000", }, { "decimal": null, "id": 2, "name": "Jane Doe", "status": "ACTIVE", + "user_uuid": "00000000-0000-0000-0000-000000000000", }, ] `) @@ -758,13 +760,14 @@ test('parameter binding with positional parameters', async () => { parameters: [1, 'ACTIVE'], }, }) - expect(res.json()).toMatchInlineSnapshot(` + expect(normalizeUuids(res.json())).toMatchInlineSnapshot(` [ { "decimal": null, "id": 1, "name": "Joe Bloggs", "status": "ACTIVE", + "user_uuid": "00000000-0000-0000-0000-000000000000", }, ] `) diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 4e720277..d71b468e 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -307,6 +307,7 @@ test('typegen: typescript', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null test_unnamed_row_composite: | Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] | null @@ -321,12 +322,14 @@ test('typegen: typescript', async () => { id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Update: { decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Relationships: [] } @@ -493,18 +496,21 @@ test('typegen: typescript', async () => { id: number | null name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null } Insert: { decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Update: { decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Relationships: [] } @@ -575,6 +581,7 @@ test('typegen: typescript', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null } SetofOptions: { from: "*" @@ -590,6 +597,7 @@ test('typegen: typescript', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null }[] SetofOptions: { from: "*" @@ -605,6 +613,7 @@ test('typegen: typescript', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null } SetofOptions: { from: "todos" @@ -889,6 +898,7 @@ test('typegen: typescript', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null }[] SetofOptions: { from: "*" @@ -1482,6 +1492,7 @@ test('typegen w/ one-to-one relationships', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null test_unnamed_row_composite: | Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] | null @@ -1496,12 +1507,14 @@ test('typegen w/ one-to-one relationships', async () => { id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Update: { decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Relationships: [] } @@ -1680,18 +1693,21 @@ test('typegen w/ one-to-one relationships', async () => { id: number | null name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null } Insert: { decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Update: { decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Relationships: [] } @@ -1762,6 +1778,7 @@ test('typegen w/ one-to-one relationships', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null } SetofOptions: { from: "*" @@ -1777,6 +1794,7 @@ test('typegen w/ one-to-one relationships', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null }[] SetofOptions: { from: "*" @@ -1792,6 +1810,7 @@ test('typegen w/ one-to-one relationships', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null } SetofOptions: { from: "todos" @@ -2076,6 +2095,7 @@ test('typegen w/ one-to-one relationships', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null }[] SetofOptions: { from: "*" @@ -2669,6 +2689,7 @@ test('typegen: typescript w/ one-to-one relationships', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null test_unnamed_row_composite: | Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] | null @@ -2683,12 +2704,14 @@ test('typegen: typescript w/ one-to-one relationships', async () => { id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Update: { decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Relationships: [] } @@ -2867,18 +2890,21 @@ test('typegen: typescript w/ one-to-one relationships', async () => { id: number | null name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null } Insert: { decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Update: { decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Relationships: [] } @@ -2949,6 +2975,7 @@ test('typegen: typescript w/ one-to-one relationships', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null } SetofOptions: { from: "*" @@ -2964,6 +2991,7 @@ test('typegen: typescript w/ one-to-one relationships', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null }[] SetofOptions: { from: "*" @@ -2979,6 +3007,7 @@ test('typegen: typescript w/ one-to-one relationships', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null } SetofOptions: { from: "todos" @@ -3263,6 +3292,7 @@ test('typegen: typescript w/ one-to-one relationships', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null }[] SetofOptions: { from: "*" @@ -3861,6 +3891,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null test_unnamed_row_composite: | Database["public"]["CompositeTypes"]["composite_type_with_array_attribute"] | null @@ -3875,12 +3906,14 @@ test('typegen: typescript w/ postgrestVersion', async () => { id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Update: { decimal?: number | null id?: number name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Relationships: [] } @@ -4059,18 +4092,21 @@ test('typegen: typescript w/ postgrestVersion', async () => { id: number | null name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null } Insert: { decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Update: { decimal?: number | null id?: number | null name?: string | null status?: Database["public"]["Enums"]["user_status"] | null + user_uuid?: string | null } Relationships: [] } @@ -4141,6 +4177,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null } SetofOptions: { from: "*" @@ -4156,6 +4193,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null }[] SetofOptions: { from: "*" @@ -4171,6 +4209,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null } SetofOptions: { from: "todos" @@ -4455,6 +4494,7 @@ test('typegen: typescript w/ postgrestVersion', async () => { id: number name: string | null status: Database["public"]["Enums"]["user_status"] | null + user_uuid: string | null }[] SetofOptions: { from: "*" @@ -5117,24 +5157,27 @@ test('typegen: go', async () => { "package database type PublicUsersSelect struct { - Decimal *float64 \`json:"decimal"\` - Id int64 \`json:"id"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` + Decimal *float64 \`json:"decimal"\` + Id int64 \`json:"id"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + UserUuid *string \`json:"user_uuid"\` } type PublicUsersInsert struct { - Decimal *float64 \`json:"decimal"\` - Id *int64 \`json:"id"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` + Decimal *float64 \`json:"decimal"\` + Id *int64 \`json:"id"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + UserUuid *string \`json:"user_uuid"\` } type PublicUsersUpdate struct { - Decimal *float64 \`json:"decimal"\` - Id *int64 \`json:"id"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` + Decimal *float64 \`json:"decimal"\` + Id *int64 \`json:"id"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + UserUuid *string \`json:"user_uuid"\` } type PublicTodosSelect struct { @@ -5349,10 +5392,11 @@ test('typegen: go', async () => { } type PublicUsersViewSelect struct { - Decimal *float64 \`json:"decimal"\` - Id *int64 \`json:"id"\` - Name *string \`json:"name"\` - Status *string \`json:"status"\` + Decimal *float64 \`json:"decimal"\` + Id *int64 \`json:"id"\` + Name *string \`json:"name"\` + Status *string \`json:"status"\` + UserUuid *string \`json:"user_uuid"\` } type PublicUserTodosSummaryViewSelect struct { @@ -5728,11 +5772,13 @@ test('typegen: swift', async () => { internal let id: Int64 internal let name: String? internal let status: UserStatus? + internal let userUuid: UUID? internal enum CodingKeys: String, CodingKey { case decimal = "decimal" case id = "id" case name = "name" case status = "status" + case userUuid = "user_uuid" } } internal struct UsersInsert: Codable, Hashable, Sendable, Identifiable { @@ -5740,11 +5786,13 @@ test('typegen: swift', async () => { internal let id: Int64? internal let name: String? internal let status: UserStatus? + internal let userUuid: UUID? internal enum CodingKeys: String, CodingKey { case decimal = "decimal" case id = "id" case name = "name" case status = "status" + case userUuid = "user_uuid" } } internal struct UsersUpdate: Codable, Hashable, Sendable, Identifiable { @@ -5752,11 +5800,13 @@ test('typegen: swift', async () => { internal let id: Int64? internal let name: String? internal let status: UserStatus? + internal let userUuid: UUID? internal enum CodingKeys: String, CodingKey { case decimal = "decimal" case id = "id" case name = "name" case status = "status" + case userUuid = "user_uuid" } } internal struct UsersAuditSelect: Codable, Hashable, Sendable, Identifiable { @@ -5840,11 +5890,13 @@ test('typegen: swift', async () => { internal let id: Int64? internal let name: String? internal let status: UserStatus? + internal let userUuid: UUID? internal enum CodingKeys: String, CodingKey { case decimal = "decimal" case id = "id" case name = "name" case status = "status" + case userUuid = "user_uuid" } } internal struct UsersViewWithMultipleRefsToUsersSelect: Codable, Hashable, Sendable { @@ -6221,11 +6273,13 @@ test('typegen: swift w/ public access control', async () => { public let id: Int64 public let name: String? public let status: UserStatus? + public let userUuid: UUID? public enum CodingKeys: String, CodingKey { case decimal = "decimal" case id = "id" case name = "name" case status = "status" + case userUuid = "user_uuid" } } public struct UsersInsert: Codable, Hashable, Sendable, Identifiable { @@ -6233,11 +6287,13 @@ test('typegen: swift w/ public access control', async () => { public let id: Int64? public let name: String? public let status: UserStatus? + public let userUuid: UUID? public enum CodingKeys: String, CodingKey { case decimal = "decimal" case id = "id" case name = "name" case status = "status" + case userUuid = "user_uuid" } } public struct UsersUpdate: Codable, Hashable, Sendable, Identifiable { @@ -6245,11 +6301,13 @@ test('typegen: swift w/ public access control', async () => { public let id: Int64? public let name: String? public let status: UserStatus? + public let userUuid: UUID? public enum CodingKeys: String, CodingKey { case decimal = "decimal" case id = "id" case name = "name" case status = "status" + case userUuid = "user_uuid" } } public struct UsersAuditSelect: Codable, Hashable, Sendable, Identifiable { @@ -6333,11 +6391,13 @@ test('typegen: swift w/ public access control', async () => { public let id: Int64? public let name: String? public let status: UserStatus? + public let userUuid: UUID? public enum CodingKeys: String, CodingKey { case decimal = "decimal" case id = "id" case name = "name" case status = "status" + case userUuid = "user_uuid" } } public struct UsersViewWithMultipleRefsToUsersSelect: Codable, Hashable, Sendable { @@ -6375,251 +6435,255 @@ test('typegen: python', async () => { query: { access_control: 'public' }, }) expect(body).toMatchInlineSnapshot(` -"from __future__ import annotations - -import datetime -import uuid -from typing import ( - Annotated, - Any, - List, - Literal, - NotRequired, - Optional, - TypeAlias, - TypedDict, -) - -from pydantic import BaseModel, Field, Json - -PublicUserStatus: TypeAlias = Literal["ACTIVE", "INACTIVE"] - -PublicMemeStatus: TypeAlias = Literal["new", "old", "retired"] - -class PublicUsers(BaseModel): - decimal: Optional[float] = Field(alias="decimal") - id: int = Field(alias="id") - name: Optional[str] = Field(alias="name") - status: Optional[PublicUserStatus] = Field(alias="status") - -class PublicUsersInsert(TypedDict): - decimal: NotRequired[Annotated[float, Field(alias="decimal")]] - id: NotRequired[Annotated[int, Field(alias="id")]] - name: NotRequired[Annotated[str, Field(alias="name")]] - status: NotRequired[Annotated[PublicUserStatus, Field(alias="status")]] - -class PublicUsersUpdate(TypedDict): - decimal: NotRequired[Annotated[float, Field(alias="decimal")]] - id: NotRequired[Annotated[int, Field(alias="id")]] - name: NotRequired[Annotated[str, Field(alias="name")]] - status: NotRequired[Annotated[PublicUserStatus, Field(alias="status")]] - -class PublicTodos(BaseModel): - details: Optional[str] = Field(alias="details") - id: int = Field(alias="id") - user_id: int = Field(alias="user-id") - -class PublicTodosInsert(TypedDict): - details: NotRequired[Annotated[str, Field(alias="details")]] - id: NotRequired[Annotated[int, Field(alias="id")]] - user_id: Annotated[int, Field(alias="user-id")] - -class PublicTodosUpdate(TypedDict): - details: NotRequired[Annotated[str, Field(alias="details")]] - id: NotRequired[Annotated[int, Field(alias="id")]] - user_id: NotRequired[Annotated[int, Field(alias="user-id")]] - -class PublicUsersAudit(BaseModel): - created_at: Optional[datetime.datetime] = Field(alias="created_at") - id: int = Field(alias="id") - previous_value: Optional[Json[Any]] = Field(alias="previous_value") - user_id: Optional[int] = Field(alias="user_id") - -class PublicUsersAuditInsert(TypedDict): - created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - id: NotRequired[Annotated[int, Field(alias="id")]] - previous_value: NotRequired[Annotated[Json[Any], Field(alias="previous_value")]] - user_id: NotRequired[Annotated[int, Field(alias="user_id")]] - -class PublicUsersAuditUpdate(TypedDict): - created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - id: NotRequired[Annotated[int, Field(alias="id")]] - previous_value: NotRequired[Annotated[Json[Any], Field(alias="previous_value")]] - user_id: NotRequired[Annotated[int, Field(alias="user_id")]] - -class PublicUserDetails(BaseModel): - details: Optional[str] = Field(alias="details") - user_id: int = Field(alias="user_id") - -class PublicUserDetailsInsert(TypedDict): - details: NotRequired[Annotated[str, Field(alias="details")]] - user_id: Annotated[int, Field(alias="user_id")] - -class PublicUserDetailsUpdate(TypedDict): - details: NotRequired[Annotated[str, Field(alias="details")]] - user_id: NotRequired[Annotated[int, Field(alias="user_id")]] - -class PublicEmpty(BaseModel): - pass - -class PublicEmptyInsert(TypedDict): - pass - -class PublicEmptyUpdate(TypedDict): - pass - -class PublicTableWithOtherTablesRowType(BaseModel): - col1: Optional[PublicUserDetails] = Field(alias="col1") - col2: Optional[PublicAView] = Field(alias="col2") - -class PublicTableWithOtherTablesRowTypeInsert(TypedDict): - col1: NotRequired[Annotated[PublicUserDetails, Field(alias="col1")]] - col2: NotRequired[Annotated[PublicAView, Field(alias="col2")]] - -class PublicTableWithOtherTablesRowTypeUpdate(TypedDict): - col1: NotRequired[Annotated[PublicUserDetails, Field(alias="col1")]] - col2: NotRequired[Annotated[PublicAView, Field(alias="col2")]] - -class PublicTableWithPrimaryKeyOtherThanId(BaseModel): - name: Optional[str] = Field(alias="name") - other_id: int = Field(alias="other_id") - -class PublicTableWithPrimaryKeyOtherThanIdInsert(TypedDict): - name: NotRequired[Annotated[str, Field(alias="name")]] - other_id: NotRequired[Annotated[int, Field(alias="other_id")]] - -class PublicTableWithPrimaryKeyOtherThanIdUpdate(TypedDict): - name: NotRequired[Annotated[str, Field(alias="name")]] - other_id: NotRequired[Annotated[int, Field(alias="other_id")]] - -class PublicEvents(BaseModel): - created_at: datetime.datetime = Field(alias="created_at") - data: Optional[Json[Any]] = Field(alias="data") - event_type: Optional[str] = Field(alias="event_type") - id: int = Field(alias="id") - -class PublicEventsInsert(TypedDict): - created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - data: NotRequired[Annotated[Json[Any], Field(alias="data")]] - event_type: NotRequired[Annotated[str, Field(alias="event_type")]] - id: NotRequired[Annotated[int, Field(alias="id")]] - -class PublicEventsUpdate(TypedDict): - created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - data: NotRequired[Annotated[Json[Any], Field(alias="data")]] - event_type: NotRequired[Annotated[str, Field(alias="event_type")]] - id: NotRequired[Annotated[int, Field(alias="id")]] - -class PublicEvents2024(BaseModel): - created_at: datetime.datetime = Field(alias="created_at") - data: Optional[Json[Any]] = Field(alias="data") - event_type: Optional[str] = Field(alias="event_type") - id: int = Field(alias="id") - -class PublicEvents2024Insert(TypedDict): - created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - data: NotRequired[Annotated[Json[Any], Field(alias="data")]] - event_type: NotRequired[Annotated[str, Field(alias="event_type")]] - id: Annotated[int, Field(alias="id")] - -class PublicEvents2024Update(TypedDict): - created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - data: NotRequired[Annotated[Json[Any], Field(alias="data")]] - event_type: NotRequired[Annotated[str, Field(alias="event_type")]] - id: NotRequired[Annotated[int, Field(alias="id")]] - -class PublicEvents2025(BaseModel): - created_at: datetime.datetime = Field(alias="created_at") - data: Optional[Json[Any]] = Field(alias="data") - event_type: Optional[str] = Field(alias="event_type") - id: int = Field(alias="id") - -class PublicEvents2025Insert(TypedDict): - created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - data: NotRequired[Annotated[Json[Any], Field(alias="data")]] - event_type: NotRequired[Annotated[str, Field(alias="event_type")]] - id: Annotated[int, Field(alias="id")] - -class PublicEvents2025Update(TypedDict): - created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - data: NotRequired[Annotated[Json[Any], Field(alias="data")]] - event_type: NotRequired[Annotated[str, Field(alias="event_type")]] - id: NotRequired[Annotated[int, Field(alias="id")]] - -class PublicCategory(BaseModel): - id: int = Field(alias="id") - name: str = Field(alias="name") - -class PublicCategoryInsert(TypedDict): - id: NotRequired[Annotated[int, Field(alias="id")]] - name: Annotated[str, Field(alias="name")] - -class PublicCategoryUpdate(TypedDict): - id: NotRequired[Annotated[int, Field(alias="id")]] - name: NotRequired[Annotated[str, Field(alias="name")]] - -class PublicMemes(BaseModel): - category: Optional[int] = Field(alias="category") - created_at: datetime.datetime = Field(alias="created_at") - id: int = Field(alias="id") - metadata: Optional[Json[Any]] = Field(alias="metadata") - name: str = Field(alias="name") - status: Optional[PublicMemeStatus] = Field(alias="status") - -class PublicMemesInsert(TypedDict): - category: NotRequired[Annotated[int, Field(alias="category")]] - created_at: Annotated[datetime.datetime, Field(alias="created_at")] - id: NotRequired[Annotated[int, Field(alias="id")]] - metadata: NotRequired[Annotated[Json[Any], Field(alias="metadata")]] - name: Annotated[str, Field(alias="name")] - status: NotRequired[Annotated[PublicMemeStatus, Field(alias="status")]] - -class PublicMemesUpdate(TypedDict): - category: NotRequired[Annotated[int, Field(alias="category")]] - created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - id: NotRequired[Annotated[int, Field(alias="id")]] - metadata: NotRequired[Annotated[Json[Any], Field(alias="metadata")]] - name: NotRequired[Annotated[str, Field(alias="name")]] - status: NotRequired[Annotated[PublicMemeStatus, Field(alias="status")]] - -class PublicAView(BaseModel): - id: Optional[int] = Field(alias="id") - -class PublicTodosView(BaseModel): - details: Optional[str] = Field(alias="details") - id: Optional[int] = Field(alias="id") - user_id: Optional[int] = Field(alias="user-id") - -class PublicUsersView(BaseModel): - decimal: Optional[float] = Field(alias="decimal") - id: Optional[int] = Field(alias="id") - name: Optional[str] = Field(alias="name") - status: Optional[PublicUserStatus] = Field(alias="status") - -class PublicUserTodosSummaryView(BaseModel): - todo_count: Optional[int] = Field(alias="todo_count") - todo_details: Optional[List[str]] = Field(alias="todo_details") - user_id: Optional[int] = Field(alias="user_id") - user_name: Optional[str] = Field(alias="user_name") - user_status: Optional[PublicUserStatus] = Field(alias="user_status") - -class PublicUsersViewWithMultipleRefsToUsers(BaseModel): - initial_id: Optional[int] = Field(alias="initial_id") - initial_name: Optional[str] = Field(alias="initial_name") - second_id: Optional[int] = Field(alias="second_id") - second_name: Optional[str] = Field(alias="second_name") - -class PublicTodosMatview(BaseModel): - details: Optional[str] = Field(alias="details") - id: Optional[int] = Field(alias="id") - user_id: Optional[int] = Field(alias="user-id") - -class PublicCompositeTypeWithArrayAttribute(BaseModel): - my_text_array: List[str] = Field(alias="my_text_array") - -class PublicCompositeTypeWithRecordAttribute(BaseModel): - todo: PublicTodos = Field(alias="todo")" -`) + "from __future__ import annotations + + import datetime + import uuid + from typing import ( + Annotated, + Any, + List, + Literal, + NotRequired, + Optional, + TypeAlias, + TypedDict, + ) + + from pydantic import BaseModel, Field, Json + + PublicUserStatus: TypeAlias = Literal["ACTIVE", "INACTIVE"] + + PublicMemeStatus: TypeAlias = Literal["new", "old", "retired"] + + class PublicUsers(BaseModel): + decimal: Optional[float] = Field(alias="decimal") + id: int = Field(alias="id") + name: Optional[str] = Field(alias="name") + status: Optional[PublicUserStatus] = Field(alias="status") + user_uuid: Optional[uuid.UUID] = Field(alias="user_uuid") + + class PublicUsersInsert(TypedDict): + decimal: NotRequired[Annotated[float, Field(alias="decimal")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + name: NotRequired[Annotated[str, Field(alias="name")]] + status: NotRequired[Annotated[PublicUserStatus, Field(alias="status")]] + user_uuid: NotRequired[Annotated[uuid.UUID, Field(alias="user_uuid")]] + + class PublicUsersUpdate(TypedDict): + decimal: NotRequired[Annotated[float, Field(alias="decimal")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + name: NotRequired[Annotated[str, Field(alias="name")]] + status: NotRequired[Annotated[PublicUserStatus, Field(alias="status")]] + user_uuid: NotRequired[Annotated[uuid.UUID, Field(alias="user_uuid")]] + + class PublicTodos(BaseModel): + details: Optional[str] = Field(alias="details") + id: int = Field(alias="id") + user_id: int = Field(alias="user-id") + + class PublicTodosInsert(TypedDict): + details: NotRequired[Annotated[str, Field(alias="details")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + user_id: Annotated[int, Field(alias="user-id")] + + class PublicTodosUpdate(TypedDict): + details: NotRequired[Annotated[str, Field(alias="details")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + user_id: NotRequired[Annotated[int, Field(alias="user-id")]] + + class PublicUsersAudit(BaseModel): + created_at: Optional[datetime.datetime] = Field(alias="created_at") + id: int = Field(alias="id") + previous_value: Optional[Json[Any]] = Field(alias="previous_value") + user_id: Optional[int] = Field(alias="user_id") + + class PublicUsersAuditInsert(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + previous_value: NotRequired[Annotated[Json[Any], Field(alias="previous_value")]] + user_id: NotRequired[Annotated[int, Field(alias="user_id")]] + + class PublicUsersAuditUpdate(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + previous_value: NotRequired[Annotated[Json[Any], Field(alias="previous_value")]] + user_id: NotRequired[Annotated[int, Field(alias="user_id")]] + + class PublicUserDetails(BaseModel): + details: Optional[str] = Field(alias="details") + user_id: int = Field(alias="user_id") + + class PublicUserDetailsInsert(TypedDict): + details: NotRequired[Annotated[str, Field(alias="details")]] + user_id: Annotated[int, Field(alias="user_id")] + + class PublicUserDetailsUpdate(TypedDict): + details: NotRequired[Annotated[str, Field(alias="details")]] + user_id: NotRequired[Annotated[int, Field(alias="user_id")]] + + class PublicEmpty(BaseModel): + pass + + class PublicEmptyInsert(TypedDict): + pass + + class PublicEmptyUpdate(TypedDict): + pass + + class PublicTableWithOtherTablesRowType(BaseModel): + col1: Optional[PublicUserDetails] = Field(alias="col1") + col2: Optional[PublicAView] = Field(alias="col2") + + class PublicTableWithOtherTablesRowTypeInsert(TypedDict): + col1: NotRequired[Annotated[PublicUserDetails, Field(alias="col1")]] + col2: NotRequired[Annotated[PublicAView, Field(alias="col2")]] + + class PublicTableWithOtherTablesRowTypeUpdate(TypedDict): + col1: NotRequired[Annotated[PublicUserDetails, Field(alias="col1")]] + col2: NotRequired[Annotated[PublicAView, Field(alias="col2")]] + + class PublicTableWithPrimaryKeyOtherThanId(BaseModel): + name: Optional[str] = Field(alias="name") + other_id: int = Field(alias="other_id") + + class PublicTableWithPrimaryKeyOtherThanIdInsert(TypedDict): + name: NotRequired[Annotated[str, Field(alias="name")]] + other_id: NotRequired[Annotated[int, Field(alias="other_id")]] + + class PublicTableWithPrimaryKeyOtherThanIdUpdate(TypedDict): + name: NotRequired[Annotated[str, Field(alias="name")]] + other_id: NotRequired[Annotated[int, Field(alias="other_id")]] + + class PublicEvents(BaseModel): + created_at: datetime.datetime = Field(alias="created_at") + data: Optional[Json[Any]] = Field(alias="data") + event_type: Optional[str] = Field(alias="event_type") + id: int = Field(alias="id") + + class PublicEventsInsert(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + data: NotRequired[Annotated[Json[Any], Field(alias="data")]] + event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + + class PublicEventsUpdate(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + data: NotRequired[Annotated[Json[Any], Field(alias="data")]] + event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + + class PublicEvents2024(BaseModel): + created_at: datetime.datetime = Field(alias="created_at") + data: Optional[Json[Any]] = Field(alias="data") + event_type: Optional[str] = Field(alias="event_type") + id: int = Field(alias="id") + + class PublicEvents2024Insert(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + data: NotRequired[Annotated[Json[Any], Field(alias="data")]] + event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + id: Annotated[int, Field(alias="id")] + + class PublicEvents2024Update(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + data: NotRequired[Annotated[Json[Any], Field(alias="data")]] + event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + + class PublicEvents2025(BaseModel): + created_at: datetime.datetime = Field(alias="created_at") + data: Optional[Json[Any]] = Field(alias="data") + event_type: Optional[str] = Field(alias="event_type") + id: int = Field(alias="id") + + class PublicEvents2025Insert(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + data: NotRequired[Annotated[Json[Any], Field(alias="data")]] + event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + id: Annotated[int, Field(alias="id")] + + class PublicEvents2025Update(TypedDict): + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + data: NotRequired[Annotated[Json[Any], Field(alias="data")]] + event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + + class PublicCategory(BaseModel): + id: int = Field(alias="id") + name: str = Field(alias="name") + + class PublicCategoryInsert(TypedDict): + id: NotRequired[Annotated[int, Field(alias="id")]] + name: Annotated[str, Field(alias="name")] + + class PublicCategoryUpdate(TypedDict): + id: NotRequired[Annotated[int, Field(alias="id")]] + name: NotRequired[Annotated[str, Field(alias="name")]] + + class PublicMemes(BaseModel): + category: Optional[int] = Field(alias="category") + created_at: datetime.datetime = Field(alias="created_at") + id: int = Field(alias="id") + metadata: Optional[Json[Any]] = Field(alias="metadata") + name: str = Field(alias="name") + status: Optional[PublicMemeStatus] = Field(alias="status") + + class PublicMemesInsert(TypedDict): + category: NotRequired[Annotated[int, Field(alias="category")]] + created_at: Annotated[datetime.datetime, Field(alias="created_at")] + id: NotRequired[Annotated[int, Field(alias="id")]] + metadata: NotRequired[Annotated[Json[Any], Field(alias="metadata")]] + name: Annotated[str, Field(alias="name")] + status: NotRequired[Annotated[PublicMemeStatus, Field(alias="status")]] + + class PublicMemesUpdate(TypedDict): + category: NotRequired[Annotated[int, Field(alias="category")]] + created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + metadata: NotRequired[Annotated[Json[Any], Field(alias="metadata")]] + name: NotRequired[Annotated[str, Field(alias="name")]] + status: NotRequired[Annotated[PublicMemeStatus, Field(alias="status")]] + + class PublicAView(BaseModel): + id: Optional[int] = Field(alias="id") + + class PublicTodosView(BaseModel): + details: Optional[str] = Field(alias="details") + id: Optional[int] = Field(alias="id") + user_id: Optional[int] = Field(alias="user-id") + + class PublicUsersView(BaseModel): + decimal: Optional[float] = Field(alias="decimal") + id: Optional[int] = Field(alias="id") + name: Optional[str] = Field(alias="name") + status: Optional[PublicUserStatus] = Field(alias="status") + user_uuid: Optional[uuid.UUID] = Field(alias="user_uuid") + + class PublicUserTodosSummaryView(BaseModel): + todo_count: Optional[int] = Field(alias="todo_count") + todo_details: Optional[List[str]] = Field(alias="todo_details") + user_id: Optional[int] = Field(alias="user_id") + user_name: Optional[str] = Field(alias="user_name") + user_status: Optional[PublicUserStatus] = Field(alias="user_status") + + class PublicUsersViewWithMultipleRefsToUsers(BaseModel): + initial_id: Optional[int] = Field(alias="initial_id") + initial_name: Optional[str] = Field(alias="initial_name") + second_id: Optional[int] = Field(alias="second_id") + second_name: Optional[str] = Field(alias="second_name") + + class PublicTodosMatview(BaseModel): + details: Optional[str] = Field(alias="details") + id: Optional[int] = Field(alias="id") + user_id: Optional[int] = Field(alias="user-id") + + class PublicCompositeTypeWithArrayAttribute(BaseModel): + my_text_array: List[str] = Field(alias="my_text_array") + + class PublicCompositeTypeWithRecordAttribute(BaseModel): + todo: PublicTodos = Field(alias="todo")" + `) }) test('typegen: python w/ excluded/included schemas', async () => { diff --git a/test/server/utils.ts b/test/server/utils.ts index 0222fcf3..63e1e53d 100644 --- a/test/server/utils.ts +++ b/test/server/utils.ts @@ -1,3 +1,29 @@ import { build as buildApp } from '../../src/server/app' export const app = buildApp() + +/** + * Normalizes UUIDs in test data to make snapshots resilient to UUID changes. + * Replaces all UUID strings with a consistent placeholder. + */ +export function normalizeUuids(data: unknown): unknown { + const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i + + if (typeof data === 'string' && uuidRegex.test(data)) { + return '00000000-0000-0000-0000-000000000000' + } + + if (Array.isArray(data)) { + return data.map(normalizeUuids) + } + + if (data !== null && typeof data === 'object') { + const normalized: Record = {} + for (const [key, value] of Object.entries(data)) { + normalized[key] = normalizeUuids(value) + } + return normalized + } + + return data +} From a56f296b79a47b8c695ec0769456399fc15db63b Mon Sep 17 00:00:00 2001 From: Muzzaiyyan Hussain Date: Fri, 9 Jan 2026 06:37:04 +0530 Subject: [PATCH 68/72] fix(typescript): infer nullable INTERVAL columns as string | null (#1031) * fix(types): infer nullable INTERVAL as string | null * test: add e2e test with null and required interval * fix: interval types mapping for python --------- Co-authored-by: avallete --- src/server/templates/python.ts | 1 + src/server/templates/typescript.ts | 3 +- test/db/00-init.sql | 37 +++++- test/server/typegen.ts | 205 +++++++++++++++++++++++++++++ test/types.test.ts | 12 ++ 5 files changed, 256 insertions(+), 2 deletions(-) diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts index 2d9459ca..b112f089 100644 --- a/src/server/templates/python.ts +++ b/src/server/templates/python.ts @@ -345,6 +345,7 @@ const PY_TYPE_MAP: Record = { timestamptz: 'datetime.datetime', uuid: 'uuid.UUID', vector: 'list[Any]', + interval: 'str', // JSON json: 'Json[Any]', diff --git a/src/server/templates/typescript.ts b/src/server/templates/typescript.ts index 75ca7de4..352c4ddc 100644 --- a/src/server/templates/typescript.ts +++ b/src/server/templates/typescript.ts @@ -869,7 +869,7 @@ export const Constants = { } // TODO: Make this more robust. Currently doesn't handle range types - returns them as unknown. -const pgTypeToTsType = ( +export const pgTypeToTsType = ( schema: PostgresSchema, pgType: string, { @@ -902,6 +902,7 @@ const pgTypeToTsType = ( 'timestamptz', 'uuid', 'vector', + 'interval', ].includes(pgType) ) { return 'string' diff --git a/test/db/00-init.sql b/test/db/00-init.sql index 8ddc77ba..c30e1f4a 100644 --- a/test/db/00-init.sql +++ b/test/db/00-init.sql @@ -465,4 +465,39 @@ CREATE OR REPLACE FUNCTION "public"."days_since_event" ("public"."events") RETUR SET "search_path" TO '' AS $_$ SELECT ROUND(EXTRACT(EPOCH FROM (NOW() - $1.created_at)) / 86400); -$_$; \ No newline at end of file +$_$; + +-- Table with interval columns for testing interval type (nullable and not nullable) +CREATE TABLE public.interval_test ( + id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, + duration_required interval NOT NULL, + duration_optional interval +); + +-- Insert test data with interval values +INSERT INTO public.interval_test (duration_required, duration_optional) +VALUES + ('1 day 2 hours 30 minutes', '3 days 5 hours'), + ('1 week', NULL), + ('2 hours 15 minutes', '45 minutes'); + +-- Function that takes interval parameter and returns interval +CREATE OR REPLACE FUNCTION public.add_interval_to_duration( + base_duration interval, + additional_interval interval +) +RETURNS interval +LANGUAGE SQL +STABLE +AS $$ + SELECT base_duration + additional_interval; +$$; + +-- Function that takes a table row with interval and returns interval +CREATE OR REPLACE FUNCTION public.double_duration(interval_test_row public.interval_test) +RETURNS interval +LANGUAGE SQL +STABLE +AS $$ + SELECT interval_test_row.duration_required * 2; +$$; \ No newline at end of file diff --git a/test/server/typegen.ts b/test/server/typegen.ts index d71b468e..7bb7013b 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -119,6 +119,24 @@ test('typegen: typescript', async () => { } Relationships: [] } + interval_test: { + Row: { + duration_optional: string | null + duration_required: string + id: number + } + Insert: { + duration_optional?: string | null + duration_required: string + id?: number + } + Update: { + duration_optional?: string | null + duration_required?: string + id?: number + } + Relationships: [] + } memes: { Row: { category: number | null @@ -525,6 +543,10 @@ test('typegen: typescript', async () => { } } Functions: { + add_interval_to_duration: { + Args: { additional_interval: string; base_duration: string } + Returns: string + } blurb: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } Returns: { @@ -574,6 +596,12 @@ test('typegen: typescript', async () => { error: true } & "the function public.details_words with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + double_duration: { + Args: { + interval_test_row: Database["public"]["Tables"]["interval_test"]["Row"] + } + Returns: string + } function_returning_row: { Args: never Returns: { @@ -1291,6 +1319,24 @@ test('typegen w/ one-to-one relationships', async () => { } Relationships: [] } + interval_test: { + Row: { + duration_optional: string | null + duration_required: string + id: number + } + Insert: { + duration_optional?: string | null + duration_required: string + id?: number + } + Update: { + duration_optional?: string | null + duration_required?: string + id?: number + } + Relationships: [] + } memes: { Row: { category: number | null @@ -1722,6 +1768,10 @@ test('typegen w/ one-to-one relationships', async () => { } } Functions: { + add_interval_to_duration: { + Args: { additional_interval: string; base_duration: string } + Returns: string + } blurb: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } Returns: { @@ -1771,6 +1821,12 @@ test('typegen w/ one-to-one relationships', async () => { error: true } & "the function public.details_words with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + double_duration: { + Args: { + interval_test_row: Database["public"]["Tables"]["interval_test"]["Row"] + } + Returns: string + } function_returning_row: { Args: never Returns: { @@ -2488,6 +2544,24 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } Relationships: [] } + interval_test: { + Row: { + duration_optional: string | null + duration_required: string + id: number + } + Insert: { + duration_optional?: string | null + duration_required: string + id?: number + } + Update: { + duration_optional?: string | null + duration_required?: string + id?: number + } + Relationships: [] + } memes: { Row: { category: number | null @@ -2919,6 +2993,10 @@ test('typegen: typescript w/ one-to-one relationships', async () => { } } Functions: { + add_interval_to_duration: { + Args: { additional_interval: string; base_duration: string } + Returns: string + } blurb: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } Returns: { @@ -2968,6 +3046,12 @@ test('typegen: typescript w/ one-to-one relationships', async () => { error: true } & "the function public.details_words with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + double_duration: { + Args: { + interval_test_row: Database["public"]["Tables"]["interval_test"]["Row"] + } + Returns: string + } function_returning_row: { Args: never Returns: { @@ -3690,6 +3774,24 @@ test('typegen: typescript w/ postgrestVersion', async () => { } Relationships: [] } + interval_test: { + Row: { + duration_optional: string | null + duration_required: string + id: number + } + Insert: { + duration_optional?: string | null + duration_required: string + id?: number + } + Update: { + duration_optional?: string | null + duration_required?: string + id?: number + } + Relationships: [] + } memes: { Row: { category: number | null @@ -4121,6 +4223,10 @@ test('typegen: typescript w/ postgrestVersion', async () => { } } Functions: { + add_interval_to_duration: { + Args: { additional_interval: string; base_duration: string } + Returns: string + } blurb: { Args: { "": Database["public"]["Tables"]["todos"]["Row"] } Returns: { @@ -4170,6 +4276,12 @@ test('typegen: typescript w/ postgrestVersion', async () => { error: true } & "the function public.details_words with parameter or with a single unnamed json/jsonb parameter, but no matches were found in the schema cache" } + double_duration: { + Args: { + interval_test_row: Database["public"]["Tables"]["interval_test"]["Row"] + } + Returns: string + } function_returning_row: { Args: never Returns: { @@ -5339,6 +5451,24 @@ test('typegen: go', async () => { Id *int64 \`json:"id"\` } + type PublicIntervalTestSelect struct { + DurationOptional interface{} \`json:"duration_optional"\` + DurationRequired interface{} \`json:"duration_required"\` + Id int64 \`json:"id"\` + } + + type PublicIntervalTestInsert struct { + DurationOptional interface{} \`json:"duration_optional"\` + DurationRequired interface{} \`json:"duration_required"\` + Id *int64 \`json:"id"\` + } + + type PublicIntervalTestUpdate struct { + DurationOptional interface{} \`json:"duration_optional"\` + DurationRequired interface{} \`json:"duration_required"\` + Id *int64 \`json:"id"\` + } + type PublicCategorySelect struct { Id int32 \`json:"id"\` Name string \`json:"name"\` @@ -5614,6 +5744,36 @@ test('typegen: swift', async () => { case status = "status" } } + internal struct IntervalTestSelect: Codable, Hashable, Sendable, Identifiable { + internal let durationOptional: IntervalSelect? + internal let durationRequired: IntervalSelect + internal let id: Int64 + internal enum CodingKeys: String, CodingKey { + case durationOptional = "duration_optional" + case durationRequired = "duration_required" + case id = "id" + } + } + internal struct IntervalTestInsert: Codable, Hashable, Sendable, Identifiable { + internal let durationOptional: IntervalSelect? + internal let durationRequired: IntervalSelect + internal let id: Int64? + internal enum CodingKeys: String, CodingKey { + case durationOptional = "duration_optional" + case durationRequired = "duration_required" + case id = "id" + } + } + internal struct IntervalTestUpdate: Codable, Hashable, Sendable, Identifiable { + internal let durationOptional: IntervalSelect? + internal let durationRequired: IntervalSelect? + internal let id: Int64? + internal enum CodingKeys: String, CodingKey { + case durationOptional = "duration_optional" + case durationRequired = "duration_required" + case id = "id" + } + } internal struct MemesSelect: Codable, Hashable, Sendable { internal let category: Int32? internal let createdAt: String @@ -6115,6 +6275,36 @@ test('typegen: swift w/ public access control', async () => { case status = "status" } } + public struct IntervalTestSelect: Codable, Hashable, Sendable, Identifiable { + public let durationOptional: IntervalSelect? + public let durationRequired: IntervalSelect + public let id: Int64 + public enum CodingKeys: String, CodingKey { + case durationOptional = "duration_optional" + case durationRequired = "duration_required" + case id = "id" + } + } + public struct IntervalTestInsert: Codable, Hashable, Sendable, Identifiable { + public let durationOptional: IntervalSelect? + public let durationRequired: IntervalSelect + public let id: Int64? + public enum CodingKeys: String, CodingKey { + case durationOptional = "duration_optional" + case durationRequired = "duration_required" + case id = "id" + } + } + public struct IntervalTestUpdate: Codable, Hashable, Sendable, Identifiable { + public let durationOptional: IntervalSelect? + public let durationRequired: IntervalSelect? + public let id: Int64? + public enum CodingKeys: String, CodingKey { + case durationOptional = "duration_optional" + case durationRequired = "duration_required" + case id = "id" + } + } public struct MemesSelect: Codable, Hashable, Sendable { public let category: Int32? public let createdAt: String @@ -6609,6 +6799,21 @@ test('typegen: python', async () => { event_type: NotRequired[Annotated[str, Field(alias="event_type")]] id: NotRequired[Annotated[int, Field(alias="id")]] + class PublicIntervalTest(BaseModel): + duration_optional: Optional[str] = Field(alias="duration_optional") + duration_required: str = Field(alias="duration_required") + id: int = Field(alias="id") + + class PublicIntervalTestInsert(TypedDict): + duration_optional: NotRequired[Annotated[str, Field(alias="duration_optional")]] + duration_required: Annotated[str, Field(alias="duration_required")] + id: NotRequired[Annotated[int, Field(alias="id")]] + + class PublicIntervalTestUpdate(TypedDict): + duration_optional: NotRequired[Annotated[str, Field(alias="duration_optional")]] + duration_required: NotRequired[Annotated[str, Field(alias="duration_required")]] + id: NotRequired[Annotated[int, Field(alias="id")]] + class PublicCategory(BaseModel): id: int = Field(alias="id") name: str = Field(alias="name") diff --git a/test/types.test.ts b/test/types.test.ts index df2af697..8a213902 100644 --- a/test/types.test.ts +++ b/test/types.test.ts @@ -1,6 +1,7 @@ import { expect, test, describe } from 'vitest' import { build } from '../src/server/app.js' import { TEST_CONNECTION_STRING } from './lib/utils.js' +import { pgTypeToTsType } from '../src/server/templates/typescript' describe('server/routes/types', () => { test('should list types', async () => { @@ -43,4 +44,15 @@ describe('server/routes/types', () => { expect(response.statusCode).toBe(404) await app.close() }) + + test('nullable interval column maps to string | null', () => { + const result = pgTypeToTsType({ name: 'public' } as any, 'interval', { + types: [], + schemas: [], + tables: [], + views: [], + }) + + expect(result).toBe('string') + }) }) From 9ba38a309afbd8d5eda4c17039363e748df186d7 Mon Sep 17 00:00:00 2001 From: Salman Chishti Date: Wed, 28 Jan 2026 10:07:11 +0000 Subject: [PATCH 69/72] chore(deps): Upgrade GitHub Actions for Node 24 compatibility (#1032) Signed-off-by: Salman Muin Kayser Chishti <13schishti@gmail.com> --- .github/workflows/canary-comment.yml | 4 ++-- .github/workflows/canary-deploy.yml | 4 ++-- .github/workflows/ci.yml | 10 +++++----- .github/workflows/docs.yml | 4 ++-- .github/workflows/publish-deps.yml | 4 ++-- .github/workflows/release.yml | 2 +- .github/workflows/validate-python-types.yml | 6 +++--- 7 files changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/canary-comment.yml b/.github/workflows/canary-comment.yml index 69cebc03..f950dda1 100644 --- a/.github/workflows/canary-comment.yml +++ b/.github/workflows/canary-comment.yml @@ -19,7 +19,7 @@ jobs: # Get PR number from the workflow run - name: Get PR info id: pr-info - uses: actions/github-script@v7 + uses: actions/github-script@v8 with: script: | // Get the workflow run details @@ -60,7 +60,7 @@ jobs: - name: Extract canary info if: ${{ steps.pr-info.outputs.found == 'true' && steps.pr-info.outputs.has_canary_label == 'true' && github.event.workflow_run.conclusion == 'success' }} id: canary-info - uses: actions/github-script@v7 + uses: actions/github-script@v8 with: script: | const workflowRun = context.payload.workflow_run; diff --git a/.github/workflows/canary-deploy.yml b/.github/workflows/canary-deploy.yml index f40f7a0f..df832acf 100644 --- a/.github/workflows/canary-deploy.yml +++ b/.github/workflows/canary-deploy.yml @@ -31,7 +31,7 @@ jobs: steps: # Checkout fork code - safe because no secrets are available for building - name: Checkout code - uses: actions/checkout@v5 + uses: actions/checkout@v6 # Log PR author for auditing - name: Log PR author @@ -40,7 +40,7 @@ jobs: echo "PR #${{ github.event.pull_request.number }} from fork: ${{ github.event.pull_request.head.repo.full_name }}" - name: Setup Node.js - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 with: node-version-file: '.nvmrc' cache: 'npm' diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c531213f..8b1d523c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -20,9 +20,9 @@ jobs: name: Test runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - - uses: actions/setup-node@v4 + - uses: actions/setup-node@v6 with: node-version-file: '.nvmrc' @@ -40,10 +40,10 @@ jobs: name: Prettier check runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - name: Setup node - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 with: node-version-file: '.nvmrc' @@ -64,7 +64,7 @@ jobs: contents: read packages: write steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 name: Checkout Repo - uses: docker/setup-buildx-action@v3 diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 3630904a..02787215 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -20,9 +20,9 @@ jobs: name: Publish docs runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - - uses: actions/setup-node@v4 + - uses: actions/setup-node@v6 with: node-version-file: '.nvmrc' diff --git a/.github/workflows/publish-deps.yml b/.github/workflows/publish-deps.yml index 693a3edd..fe57f8cb 100644 --- a/.github/workflows/publish-deps.yml +++ b/.github/workflows/publish-deps.yml @@ -13,12 +13,12 @@ jobs: # Must match glibc verison in node:20 runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 with: repository: 'pyramation/libpg-query-node' ref: 'v15' - - uses: actions/setup-node@v4 + - uses: actions/setup-node@v6 with: node-version-file: '.nvmrc' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 71bc8f6c..25fd50e5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -17,7 +17,7 @@ jobs: contents: write id-token: write steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: actions/setup-node@v6 with: diff --git a/.github/workflows/validate-python-types.yml b/.github/workflows/validate-python-types.yml index 1e5809b1..c71ef4e7 100644 --- a/.github/workflows/validate-python-types.yml +++ b/.github/workflows/validate-python-types.yml @@ -12,10 +12,10 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@v6 - name: Set up Node.js - uses: actions/setup-node@v4 + uses: actions/setup-node@v6 with: node-version: '20' cache: 'npm' @@ -27,7 +27,7 @@ jobs: run: npm run build - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version: '3.11' From be1e9de1a4eb0e51980deff95ec5767042eaab91 Mon Sep 17 00:00:00 2001 From: Salman Chishti Date: Wed, 28 Jan 2026 10:11:38 +0000 Subject: [PATCH 70/72] chore(deps): Upgrade GitHub Actions to latest versions (#1033) Signed-off-by: Salman Muin Kayser Chishti <13schishti@gmail.com> Co-authored-by: Andrew Valleteau --- .github/workflows/canary-comment.yml | 4 ++-- .github/workflows/canary-deploy.yml | 2 +- .github/workflows/ci.yml | 2 +- .github/workflows/mirror.yml | 2 +- .github/workflows/publish-deps.yml | 2 +- .github/workflows/release.yml | 2 +- 6 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/canary-comment.yml b/.github/workflows/canary-comment.yml index f950dda1..56147599 100644 --- a/.github/workflows/canary-comment.yml +++ b/.github/workflows/canary-comment.yml @@ -81,7 +81,7 @@ jobs: # Find existing comment - name: Find existing comment if: ${{ steps.pr-info.outputs.found == 'true' && steps.pr-info.outputs.has_canary_label == 'true' }} - uses: peter-evans/find-comment@v3 + uses: peter-evans/find-comment@v4 id: find-comment with: issue-number: ${{ steps.pr-info.outputs.pr_number }} @@ -91,7 +91,7 @@ jobs: # Create or update comment based on workflow status - name: Create or update canary comment if: ${{ steps.pr-info.outputs.found == 'true' && steps.pr-info.outputs.has_canary_label == 'true' }} - uses: peter-evans/create-or-update-comment@v4 + uses: peter-evans/create-or-update-comment@v5 with: comment-id: ${{ steps.find-comment.outputs.comment-id }} issue-number: ${{ steps.pr-info.outputs.pr_number }} diff --git a/.github/workflows/canary-deploy.yml b/.github/workflows/canary-deploy.yml index df832acf..932dda35 100644 --- a/.github/workflows/canary-deploy.yml +++ b/.github/workflows/canary-deploy.yml @@ -74,7 +74,7 @@ jobs: password: ${{ secrets.DOCKER_PASSWORD }} - name: configure aws credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@v5 with: role-to-assume: ${{ secrets.PROD_AWS_ROLE }} aws-region: us-east-1 diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 8b1d523c..8dd628f2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -70,7 +70,7 @@ jobs: - uses: docker/setup-buildx-action@v3 name: Set up Docker Buildx - - uses: docker/build-push-action@v5 + - uses: docker/build-push-action@v6 with: push: false tags: pg-meta:test diff --git a/.github/workflows/mirror.yml b/.github/workflows/mirror.yml index 5c56a23b..0fee1473 100644 --- a/.github/workflows/mirror.yml +++ b/.github/workflows/mirror.yml @@ -19,7 +19,7 @@ jobs: packages: write steps: - name: configure aws credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@v5 with: role-to-assume: ${{ secrets.PROD_AWS_ROLE }} aws-region: us-east-1 diff --git a/.github/workflows/publish-deps.yml b/.github/workflows/publish-deps.yml index fe57f8cb..607eb757 100644 --- a/.github/workflows/publish-deps.yml +++ b/.github/workflows/publish-deps.yml @@ -25,7 +25,7 @@ jobs: - run: npm i - run: npm run binary:build - - uses: aws-actions/configure-aws-credentials@v4 + - uses: aws-actions/configure-aws-credentials@v5 with: role-to-assume: ${{ secrets.PROD_AWS_ROLE }} aws-region: us-east-1 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 25fd50e5..718372a5 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -70,7 +70,7 @@ jobs: password: ${{ secrets.DOCKER_PASSWORD }} - name: configure aws credentials - uses: aws-actions/configure-aws-credentials@v4 + uses: aws-actions/configure-aws-credentials@v5 with: role-to-assume: ${{ secrets.PROD_AWS_ROLE }} aws-region: us-east-1 From 5b6537d76ec387d95455a9c754341b4c2c23ff53 Mon Sep 17 00:00:00 2001 From: Andrew Valleteau Date: Sat, 28 Feb 2026 10:23:27 +0100 Subject: [PATCH 71/72] feat(query): add queryTimeoutSecs param for pool timeout override (#1045) * feat(query): add queryTimeoutSecs param for pool timeout override Allow callers to set or disable pool-level query_timeout via ?queryTimeoutSecs=0|N. Absent keeps default (backwards compatible). Enables long-running migrations to avoid 408 timeouts. * fix: review * fix: setStatementTimeout=0 --- src/lib/db.ts | 7 +-- src/server/routes/query.ts | 4 +- src/server/utils.ts | 14 +++++- test/server/query-timeout.ts | 85 +++++++++++++++++++++++++++++------- 4 files changed, 87 insertions(+), 23 deletions(-) diff --git a/src/lib/db.ts b/src/lib/db.ts index d43ef8f5..900e0fb8 100644 --- a/src/lib/db.ts +++ b/src/lib/db.ts @@ -124,9 +124,10 @@ export const init: (config: PoolConfig) => { async () => { // Use statement_timeout AND idle_session_timeout to ensure the connection will be killed even if idle after // timeout time. - const statementTimeoutQueryPrefix = statementQueryTimeout - ? `SET statement_timeout='${statementQueryTimeout}s'; SET idle_session_timeout='${statementQueryTimeout}s';` - : '' + const statementTimeoutQueryPrefix = + statementQueryTimeout !== undefined + ? `SET statement_timeout='${statementQueryTimeout}s'; SET idle_session_timeout='${statementQueryTimeout}s';` + : '' // node-postgres need a statement_timeout to kill the connection when timeout is reached // otherwise the query will keep running on the database even if query timeout was reached // This need to be added at query and not connection level because poolers (pgbouncer) doesn't diff --git a/src/server/routes/query.ts b/src/server/routes/query.ts index c6bea0c6..b7e41133 100644 --- a/src/server/routes/query.ts +++ b/src/server/routes/query.ts @@ -17,11 +17,11 @@ export default async (fastify: FastifyInstance) => { fastify.post<{ Headers: { pg: string; 'x-pg-application-name'?: string } Body: { query: string; parameters?: unknown[] } - Querystring: { statementTimeoutSecs?: number } + Querystring: { statementTimeoutSecs?: number; queryTimeoutSecs?: number } }>('/', async (request, reply) => { const statementTimeoutSecs = request.query.statementTimeoutSecs errorOnEmptyQuery(request) - const config = createConnectionConfig(request) + const config = createConnectionConfig(request, request.query.queryTimeoutSecs) const pgMeta = new PostgresMeta(config) const { data, error } = await pgMeta.query(request.body.query, { trackQueryInSentry: true, diff --git a/src/server/utils.ts b/src/server/utils.ts index ebb8ec90..920fe96f 100644 --- a/src/server/utils.ts +++ b/src/server/utils.ts @@ -23,9 +23,19 @@ export const extractRequestForLogging = (request: FastifyRequest) => { } } -export function createConnectionConfig(request: FastifyRequest): PoolConfig { +export function createConnectionConfig( + request: FastifyRequest, + queryTimeoutSecs?: number | string +): PoolConfig { const connectionString = request.headers.pg as string - const config = { ...DEFAULT_POOL_CONFIG, connectionString } + const timeout = queryTimeoutSecs !== undefined ? Number(queryTimeoutSecs) : undefined + const config = { + ...DEFAULT_POOL_CONFIG, + connectionString, + ...(timeout !== undefined && { + query_timeout: timeout === 0 ? undefined : timeout * 1000, + }), + } // Override application_name if custom one provided in header if (request.headers['x-pg-application-name']) { diff --git a/test/server/query-timeout.ts b/test/server/query-timeout.ts index 47554afc..cf9f8eb6 100644 --- a/test/server/query-timeout.ts +++ b/test/server/query-timeout.ts @@ -2,15 +2,16 @@ import { expect, test, describe } from 'vitest' import { app } from './utils' import { pgMeta } from '../lib/utils' -const TIMEOUT = (Number(process.env.PG_QUERY_TIMEOUT_SECS) ?? 10) + 2 -const STATEMENT_TIMEOUT = (Number(process.env.PG_QUERY_TIMEOUT_SECS) ?? 10) + 1 +const PG_QUERY_TIMEOUT = Number(process.env.PG_QUERY_TIMEOUT_SECS) ?? 10 +const TIMEOUT = PG_QUERY_TIMEOUT + 2 +const STATEMENT_TIMEOUT = PG_QUERY_TIMEOUT + 1 +const CUSTOM_QUERY_TIMEOUT = 2 describe('test query timeout', () => { test( - `query timeout after ${TIMEOUT}s and connection cleanup`, + `pool timeout after ${TIMEOUT}s with statementTimeoutSecs and connection cleanup`, async () => { const query = `SELECT pg_sleep(${TIMEOUT + 10});` - // Execute a query that will sleep for 10 seconds const res = await app.inject({ method: 'POST', path: '/query', @@ -20,30 +21,25 @@ describe('test query timeout', () => { }, }) - // Check that we get the proper timeout error response - expect(res.statusCode).toBe(408) // Request Timeout + expect(res.statusCode).toBe(408) expect(res.json()).toMatchObject({ error: expect.stringContaining('Query read timeout'), }) - // wait one second for the statement timeout to take effect await new Promise((resolve) => setTimeout(resolve, 1000)) - // Verify that the connection has been cleaned up by checking active connections const connectionsRes = await pgMeta.query(` SELECT * FROM pg_stat_activity where application_name = 'postgres-meta 0.0.0-automated' and query ILIKE '%${query}%'; `) - // Should have no active connections except for our current query expect(connectionsRes.data).toHaveLength(0) }, TIMEOUT * 1000 ) test( - 'query without timeout parameter should not have timeout', + 'absent queryTimeoutSecs uses default pool timeout', async () => { const query = `SELECT pg_sleep(${TIMEOUT + 10});` - // Execute a query that will sleep for 10 seconds without specifying timeout const res = await app.inject({ method: 'POST', path: '/query', @@ -52,22 +48,79 @@ describe('test query timeout', () => { }, }) - // Check that we get the proper timeout error response - expect(res.statusCode).toBe(408) // Request Timeout + expect(res.statusCode).toBe(408) expect(res.json()).toMatchObject({ error: expect.stringContaining('Query read timeout'), }) - // wait one second await new Promise((resolve) => setTimeout(resolve, 1000)) - // Verify that the connection has not been cleaned up sinice there is no statementTimetout + // No statementTimeout was set, so the PG-side query is still running const connectionsRes = await pgMeta.query(` SELECT * FROM pg_stat_activity where application_name = 'postgres-meta 0.0.0-automated' and query ILIKE '%${query}%'; `) - // Should have no active connections except for our current query expect(connectionsRes.data).toHaveLength(1) }, TIMEOUT * 1000 ) + + test( + 'queryTimeoutSecs=0 disables pool-level timeout', + async () => { + const sleepSecs = PG_QUERY_TIMEOUT + 1 + const res = await app.inject({ + method: 'POST', + path: '/query', + query: 'queryTimeoutSecs=0', + payload: { + query: `SELECT pg_sleep(${sleepSecs});`, + }, + }) + + expect(res.statusCode).toBe(200) + expect(res.json()).toEqual([{ pg_sleep: '' }]) + }, + (PG_QUERY_TIMEOUT + 5) * 1000 + ) + + test( + 'custom queryTimeoutSecs overrides default pool timeout', + async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', + query: `queryTimeoutSecs=${CUSTOM_QUERY_TIMEOUT}`, + payload: { + query: `SELECT pg_sleep(${PG_QUERY_TIMEOUT + 1});`, + }, + }) + + expect(res.statusCode).toBe(408) + expect(res.json()).toMatchObject({ + error: expect.stringContaining('Query read timeout'), + }) + }, + (CUSTOM_QUERY_TIMEOUT + 5) * 1000 + ) + + test( + 'queryTimeoutSecs=0 with statementTimeoutSecs still enforces statement timeout', + async () => { + const res = await app.inject({ + method: 'POST', + path: '/query', + query: `queryTimeoutSecs=0&statementTimeoutSecs=${STATEMENT_TIMEOUT}`, + payload: { + query: `SELECT pg_sleep(${STATEMENT_TIMEOUT + 5});`, + }, + }) + + // Statement timeout fires (not pool timeout), producing a DatabaseError + expect(res.statusCode).toBe(400) + expect(res.json()).toMatchObject({ + error: expect.stringContaining('canceling statement due to statement timeout'), + }) + }, + (STATEMENT_TIMEOUT + 5) * 1000 + ) }) From 66066edaa0b2058def43b12b34440d7f756f3dce Mon Sep 17 00:00:00 2001 From: Nancy Sangani <9d.24.nancy.sangani@gmail.com> Date: Thu, 5 Mar 2026 23:37:28 +0530 Subject: [PATCH 72/72] fix: add Optional to nullable columns in Python Insert/Update TypedDict (#1046) Co-authored-by: Andrew Valleteau --- src/server/templates/python.ts | 5 +- test/server/typegen.ts | 88 +++++++++++++++++----------------- 2 files changed, 48 insertions(+), 45 deletions(-) diff --git a/src/server/templates/python.ts b/src/server/templates/python.ts index b112f089..0d00f475 100644 --- a/src/server/templates/python.ts +++ b/src/server/templates/python.ts @@ -278,7 +278,10 @@ class PythonTypedDictAttr implements Serializable { } serialize(): string { - const annotation = `Annotated[${this.py_type.serialize()}, Field(alias="${this.pg_name}")]` + const py_type = this.nullable + ? `Optional[${this.py_type.serialize()}]` + : this.py_type.serialize() + const annotation = `Annotated[${py_type}, Field(alias="${this.pg_name}")]` const rhs = this.not_required ? `NotRequired[${annotation}]` : annotation return ` ${this.name}: ${rhs}` } diff --git a/test/server/typegen.ts b/test/server/typegen.ts index 7bb7013b..ae693f50 100644 --- a/test/server/typegen.ts +++ b/test/server/typegen.ts @@ -6654,18 +6654,18 @@ test('typegen: python', async () => { user_uuid: Optional[uuid.UUID] = Field(alias="user_uuid") class PublicUsersInsert(TypedDict): - decimal: NotRequired[Annotated[float, Field(alias="decimal")]] + decimal: NotRequired[Annotated[Optional[float], Field(alias="decimal")]] id: NotRequired[Annotated[int, Field(alias="id")]] - name: NotRequired[Annotated[str, Field(alias="name")]] - status: NotRequired[Annotated[PublicUserStatus, Field(alias="status")]] - user_uuid: NotRequired[Annotated[uuid.UUID, Field(alias="user_uuid")]] + name: NotRequired[Annotated[Optional[str], Field(alias="name")]] + status: NotRequired[Annotated[Optional[PublicUserStatus], Field(alias="status")]] + user_uuid: NotRequired[Annotated[Optional[uuid.UUID], Field(alias="user_uuid")]] class PublicUsersUpdate(TypedDict): - decimal: NotRequired[Annotated[float, Field(alias="decimal")]] + decimal: NotRequired[Annotated[Optional[float], Field(alias="decimal")]] id: NotRequired[Annotated[int, Field(alias="id")]] - name: NotRequired[Annotated[str, Field(alias="name")]] - status: NotRequired[Annotated[PublicUserStatus, Field(alias="status")]] - user_uuid: NotRequired[Annotated[uuid.UUID, Field(alias="user_uuid")]] + name: NotRequired[Annotated[Optional[str], Field(alias="name")]] + status: NotRequired[Annotated[Optional[PublicUserStatus], Field(alias="status")]] + user_uuid: NotRequired[Annotated[Optional[uuid.UUID], Field(alias="user_uuid")]] class PublicTodos(BaseModel): details: Optional[str] = Field(alias="details") @@ -6673,12 +6673,12 @@ test('typegen: python', async () => { user_id: int = Field(alias="user-id") class PublicTodosInsert(TypedDict): - details: NotRequired[Annotated[str, Field(alias="details")]] + details: NotRequired[Annotated[Optional[str], Field(alias="details")]] id: NotRequired[Annotated[int, Field(alias="id")]] user_id: Annotated[int, Field(alias="user-id")] class PublicTodosUpdate(TypedDict): - details: NotRequired[Annotated[str, Field(alias="details")]] + details: NotRequired[Annotated[Optional[str], Field(alias="details")]] id: NotRequired[Annotated[int, Field(alias="id")]] user_id: NotRequired[Annotated[int, Field(alias="user-id")]] @@ -6689,27 +6689,27 @@ test('typegen: python', async () => { user_id: Optional[int] = Field(alias="user_id") class PublicUsersAuditInsert(TypedDict): - created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + created_at: NotRequired[Annotated[Optional[datetime.datetime], Field(alias="created_at")]] id: NotRequired[Annotated[int, Field(alias="id")]] - previous_value: NotRequired[Annotated[Json[Any], Field(alias="previous_value")]] - user_id: NotRequired[Annotated[int, Field(alias="user_id")]] + previous_value: NotRequired[Annotated[Optional[Json[Any]], Field(alias="previous_value")]] + user_id: NotRequired[Annotated[Optional[int], Field(alias="user_id")]] class PublicUsersAuditUpdate(TypedDict): - created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] + created_at: NotRequired[Annotated[Optional[datetime.datetime], Field(alias="created_at")]] id: NotRequired[Annotated[int, Field(alias="id")]] - previous_value: NotRequired[Annotated[Json[Any], Field(alias="previous_value")]] - user_id: NotRequired[Annotated[int, Field(alias="user_id")]] + previous_value: NotRequired[Annotated[Optional[Json[Any]], Field(alias="previous_value")]] + user_id: NotRequired[Annotated[Optional[int], Field(alias="user_id")]] class PublicUserDetails(BaseModel): details: Optional[str] = Field(alias="details") user_id: int = Field(alias="user_id") class PublicUserDetailsInsert(TypedDict): - details: NotRequired[Annotated[str, Field(alias="details")]] + details: NotRequired[Annotated[Optional[str], Field(alias="details")]] user_id: Annotated[int, Field(alias="user_id")] class PublicUserDetailsUpdate(TypedDict): - details: NotRequired[Annotated[str, Field(alias="details")]] + details: NotRequired[Annotated[Optional[str], Field(alias="details")]] user_id: NotRequired[Annotated[int, Field(alias="user_id")]] class PublicEmpty(BaseModel): @@ -6726,23 +6726,23 @@ test('typegen: python', async () => { col2: Optional[PublicAView] = Field(alias="col2") class PublicTableWithOtherTablesRowTypeInsert(TypedDict): - col1: NotRequired[Annotated[PublicUserDetails, Field(alias="col1")]] - col2: NotRequired[Annotated[PublicAView, Field(alias="col2")]] + col1: NotRequired[Annotated[Optional[PublicUserDetails], Field(alias="col1")]] + col2: NotRequired[Annotated[Optional[PublicAView], Field(alias="col2")]] class PublicTableWithOtherTablesRowTypeUpdate(TypedDict): - col1: NotRequired[Annotated[PublicUserDetails, Field(alias="col1")]] - col2: NotRequired[Annotated[PublicAView, Field(alias="col2")]] + col1: NotRequired[Annotated[Optional[PublicUserDetails], Field(alias="col1")]] + col2: NotRequired[Annotated[Optional[PublicAView], Field(alias="col2")]] class PublicTableWithPrimaryKeyOtherThanId(BaseModel): name: Optional[str] = Field(alias="name") other_id: int = Field(alias="other_id") class PublicTableWithPrimaryKeyOtherThanIdInsert(TypedDict): - name: NotRequired[Annotated[str, Field(alias="name")]] + name: NotRequired[Annotated[Optional[str], Field(alias="name")]] other_id: NotRequired[Annotated[int, Field(alias="other_id")]] class PublicTableWithPrimaryKeyOtherThanIdUpdate(TypedDict): - name: NotRequired[Annotated[str, Field(alias="name")]] + name: NotRequired[Annotated[Optional[str], Field(alias="name")]] other_id: NotRequired[Annotated[int, Field(alias="other_id")]] class PublicEvents(BaseModel): @@ -6753,14 +6753,14 @@ test('typegen: python', async () => { class PublicEventsInsert(TypedDict): created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - data: NotRequired[Annotated[Json[Any], Field(alias="data")]] - event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + data: NotRequired[Annotated[Optional[Json[Any]], Field(alias="data")]] + event_type: NotRequired[Annotated[Optional[str], Field(alias="event_type")]] id: NotRequired[Annotated[int, Field(alias="id")]] class PublicEventsUpdate(TypedDict): created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - data: NotRequired[Annotated[Json[Any], Field(alias="data")]] - event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + data: NotRequired[Annotated[Optional[Json[Any]], Field(alias="data")]] + event_type: NotRequired[Annotated[Optional[str], Field(alias="event_type")]] id: NotRequired[Annotated[int, Field(alias="id")]] class PublicEvents2024(BaseModel): @@ -6771,14 +6771,14 @@ test('typegen: python', async () => { class PublicEvents2024Insert(TypedDict): created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - data: NotRequired[Annotated[Json[Any], Field(alias="data")]] - event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + data: NotRequired[Annotated[Optional[Json[Any]], Field(alias="data")]] + event_type: NotRequired[Annotated[Optional[str], Field(alias="event_type")]] id: Annotated[int, Field(alias="id")] class PublicEvents2024Update(TypedDict): created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - data: NotRequired[Annotated[Json[Any], Field(alias="data")]] - event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + data: NotRequired[Annotated[Optional[Json[Any]], Field(alias="data")]] + event_type: NotRequired[Annotated[Optional[str], Field(alias="event_type")]] id: NotRequired[Annotated[int, Field(alias="id")]] class PublicEvents2025(BaseModel): @@ -6789,14 +6789,14 @@ test('typegen: python', async () => { class PublicEvents2025Insert(TypedDict): created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - data: NotRequired[Annotated[Json[Any], Field(alias="data")]] - event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + data: NotRequired[Annotated[Optional[Json[Any]], Field(alias="data")]] + event_type: NotRequired[Annotated[Optional[str], Field(alias="event_type")]] id: Annotated[int, Field(alias="id")] class PublicEvents2025Update(TypedDict): created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] - data: NotRequired[Annotated[Json[Any], Field(alias="data")]] - event_type: NotRequired[Annotated[str, Field(alias="event_type")]] + data: NotRequired[Annotated[Optional[Json[Any]], Field(alias="data")]] + event_type: NotRequired[Annotated[Optional[str], Field(alias="event_type")]] id: NotRequired[Annotated[int, Field(alias="id")]] class PublicIntervalTest(BaseModel): @@ -6805,12 +6805,12 @@ test('typegen: python', async () => { id: int = Field(alias="id") class PublicIntervalTestInsert(TypedDict): - duration_optional: NotRequired[Annotated[str, Field(alias="duration_optional")]] + duration_optional: NotRequired[Annotated[Optional[str], Field(alias="duration_optional")]] duration_required: Annotated[str, Field(alias="duration_required")] id: NotRequired[Annotated[int, Field(alias="id")]] class PublicIntervalTestUpdate(TypedDict): - duration_optional: NotRequired[Annotated[str, Field(alias="duration_optional")]] + duration_optional: NotRequired[Annotated[Optional[str], Field(alias="duration_optional")]] duration_required: NotRequired[Annotated[str, Field(alias="duration_required")]] id: NotRequired[Annotated[int, Field(alias="id")]] @@ -6835,20 +6835,20 @@ test('typegen: python', async () => { status: Optional[PublicMemeStatus] = Field(alias="status") class PublicMemesInsert(TypedDict): - category: NotRequired[Annotated[int, Field(alias="category")]] + category: NotRequired[Annotated[Optional[int], Field(alias="category")]] created_at: Annotated[datetime.datetime, Field(alias="created_at")] id: NotRequired[Annotated[int, Field(alias="id")]] - metadata: NotRequired[Annotated[Json[Any], Field(alias="metadata")]] + metadata: NotRequired[Annotated[Optional[Json[Any]], Field(alias="metadata")]] name: Annotated[str, Field(alias="name")] - status: NotRequired[Annotated[PublicMemeStatus, Field(alias="status")]] + status: NotRequired[Annotated[Optional[PublicMemeStatus], Field(alias="status")]] class PublicMemesUpdate(TypedDict): - category: NotRequired[Annotated[int, Field(alias="category")]] + category: NotRequired[Annotated[Optional[int], Field(alias="category")]] created_at: NotRequired[Annotated[datetime.datetime, Field(alias="created_at")]] id: NotRequired[Annotated[int, Field(alias="id")]] - metadata: NotRequired[Annotated[Json[Any], Field(alias="metadata")]] + metadata: NotRequired[Annotated[Optional[Json[Any]], Field(alias="metadata")]] name: NotRequired[Annotated[str, Field(alias="name")]] - status: NotRequired[Annotated[PublicMemeStatus, Field(alias="status")]] + status: NotRequired[Annotated[Optional[PublicMemeStatus], Field(alias="status")]] class PublicAView(BaseModel): id: Optional[int] = Field(alias="id")