diff --git a/changelogs/drizzle-kit/0.30.3.md b/changelogs/drizzle-kit/0.30.3.md new file mode 100644 index 000000000..de64c7324 --- /dev/null +++ b/changelogs/drizzle-kit/0.30.3.md @@ -0,0 +1,7 @@ +# SingleStore `push` and `generate` improvements + +As SingleStore did not support certain DDL statements before this release, you might encounter an error indicating that some schema changes cannot be applied due to a database issue. Starting from this version, drizzle-kit will detect such cases and initiate table recreation with data transfer between the tables + +# Bug fixes + +- [[BUG] If the index name is the same as the generated name, it will be empty and a type error will occur](https://github.com/drizzle-team/drizzle-orm/issues/3420) \ No newline at end of file diff --git a/changelogs/drizzle-orm/0.39.0.md b/changelogs/drizzle-orm/0.39.0.md new file mode 100644 index 000000000..b22df9b4e --- /dev/null +++ b/changelogs/drizzle-orm/0.39.0.md @@ -0,0 +1,154 @@ +# New features + +## Bun SQL driver support +You can now use the new Bun SQL driver released in Bun v1.2.0 with Drizzle + +```ts +import { drizzle } from 'drizzle-orm/bun-sql'; + +const db = drizzle(process.env.PG_DB_URL!); + +const result = await db.select().from(...); +``` + +or you can use Bun SQL instance + +```ts +import { drizzle } from 'drizzle-orm/bun-sqlite'; +import { SQL } from 'bun'; + +const client = new SQL(process.env.PG_DB_URL!); +const db = drizzle({ client }); + +const result = await db.select().from(...); +``` + +Current Limitations: + +- `json` and `jsonb` inserts and selects currently perform an additional `JSON.stringify` on the Bun SQL side. Once this is removed, they should work properly. You can always use custom types and redefine the mappers to and from the database. +- `datetime`, `date`, and `timestamp` will not work properly when using `mode: string` in Drizzle. This is due to Bun's API limitations, which prevent custom parsers for queries. As a result, Drizzle cannot control the response sent from Bun SQL to Drizzle. Once this feature is added to Bun SQL, it should work as expected. +- `array` types currently have issues in Bun SQL. + +> You can check more in [Bun docs](https://bun.sh/docs/api/sql) +> +> You can check more getting started examples in [Drizzle docs](https://orm.drizzle.team/docs/get-started/bun-sql-new) + +## WITH now supports INSERT, UPDATE, DELETE and raw sql template + +**`with` and `insert`** + +```ts +const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const sq = db.$with('sq').as( + db.insert(users).values({ name: 'John' }).returning(), +); + +const result = await db.with(sq).select().from(sq); +``` + +**`with` and `update`** + +```ts +const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const sq = db.$with('sq').as( + db.update(users).set({ age: 25 }).where(eq(users.name, 'John')).returning(), +); +const result = await db.with(sq).select().from(sq); +``` + +**`with` and `delete`** + +```ts +const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const sq = db.$with('sq').as( + db.delete(users).where(eq(users.name, 'John')).returning(), +); + +const result = await db.with(sq).select().from(sq); +``` + +**`with` and `sql`** + +```ts +const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const sq = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, +}).as(sql`select * from ${users} where ${users.name} = 'John'`); + +const result = await db.with(sq).select().from(sq); +``` + +## New tables in `/neon` import + +In this release you can use `neon_identity` schema and `users_sync` table inside this schema by just importing it from `/neon` + +```ts +// "drizzle-orm/neon" +const neonIdentitySchema = pgSchema('neon_identity'); + +/** + * Table schema of the `users_sync` table used by Neon Identity. + * This table automatically synchronizes and stores user data from external authentication providers. + * + * @schema neon_identity + * @table users_sync + */ +export const usersSync = neonIdentitySchema.table('users_sync', { + rawJson: jsonb('raw_json').notNull(), + id: text().primaryKey().notNull(), + name: text(), + email: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }), + deletedAt: timestamp('deleted_at', { withTimezone: true, mode: 'string' }), +}); +``` + +# Utils and small improvements + +## `getViewName` util function + +```ts +import { getViewName } from 'drizzle-orm/sql' + +export const user = pgTable("user", { + id: serial(), + name: text(), + email: text(), +}); + +export const userView = pgView("user_view").as((qb) => qb.select().from(user)); + +const viewName = getViewName(userView) +``` + +# Bug fixed and GitHub issue closed + +- [[FEATURE]: allow INSERT in CTEs (WITH clauses)](https://github.com/drizzle-team/drizzle-orm/issues/2078) +- [[FEATURE]: Support Raw SQL in CTE Query Builder](https://github.com/drizzle-team/drizzle-orm/issues/2168) +- [[FEATURE]: include pre-defined database objects related to Neon Identity in drizzle-orm](https://github.com/drizzle-team/drizzle-orm/issues/3959) +- [[BUG]: $count is undefined on withReplicas](https://github.com/drizzle-team/drizzle-orm/issues/3951) +- [[FEATURE]: get[Materialized]ViewName, ie getTableName but for (materialized) views.](https://github.com/drizzle-team/drizzle-orm/issues/3946) +- [[BUG]: $count API error with vercel-postgres](https://github.com/drizzle-team/drizzle-orm/issues/3710) +- [[BUG]: Cannot use schema.coerce on refining drizzle-zod types](https://github.com/drizzle-team/drizzle-orm/issues/3842) +- [[FEATURE]: Type Coercion in drizzle-zod](https://github.com/drizzle-team/drizzle-orm/issues/776) +- [[BUG]: The inferred type of X cannot be named without a reference to ../../../../../node_modules/drizzle-zod/schema.types.internal.mjs](https://github.com/drizzle-team/drizzle-orm/issues/3732) +- [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869) \ No newline at end of file diff --git a/changelogs/drizzle-typebox/0.3.0.md b/changelogs/drizzle-typebox/0.3.0.md new file mode 100644 index 000000000..880923032 --- /dev/null +++ b/changelogs/drizzle-typebox/0.3.0.md @@ -0,0 +1,4 @@ +# Bug fixed and GitHub issue closed + +- [[BUG]: The inferred type of X cannot be named without a reference to ../../../../../node_modules/drizzle-zod/schema.types.internal.mjs](https://github.com/drizzle-team/drizzle-orm/issues/3732) +- [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869) \ No newline at end of file diff --git a/changelogs/drizzle-valibot/0.4.0.md b/changelogs/drizzle-valibot/0.4.0.md new file mode 100644 index 000000000..880923032 --- /dev/null +++ b/changelogs/drizzle-valibot/0.4.0.md @@ -0,0 +1,4 @@ +# Bug fixed and GitHub issue closed + +- [[BUG]: The inferred type of X cannot be named without a reference to ../../../../../node_modules/drizzle-zod/schema.types.internal.mjs](https://github.com/drizzle-team/drizzle-orm/issues/3732) +- [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869) \ No newline at end of file diff --git a/changelogs/drizzle-zod/0.7.0.md b/changelogs/drizzle-zod/0.7.0.md new file mode 100644 index 000000000..b1d1738b4 --- /dev/null +++ b/changelogs/drizzle-zod/0.7.0.md @@ -0,0 +1,37 @@ +# Improvements + +## Added type coercion support + +**Use case: Type coercion** + +```ts copy +import { pgTable, timestamp } from 'drizzle-orm/pg-core'; +import { createSchemaFactory } from 'drizzle-zod'; +import { z } from 'zod'; + +const users = pgTable('users', { + ..., + createdAt: timestamp().notNull() +}); + +const { createInsertSchema } = createSchemaFactory({ + // This configuration will only coerce dates. Set `coerce` to `true` to coerce all data types or specify others + coerce: { + date: true + } +}); + +const userInsertSchema = createInsertSchema(users); +// The above is the same as this: +const userInsertSchema = z.object({ + ..., + createdAt: z.coerce.date() +}); +``` + +# Bug fixed and GitHub issue closed + +- [[BUG]: Cannot use schema.coerce on refining drizzle-zod types](https://github.com/drizzle-team/drizzle-orm/issues/3842) +- [[FEATURE]: Type Coercion in drizzle-zod](https://github.com/drizzle-team/drizzle-orm/issues/776) +- [[BUG]: The inferred type of X cannot be named without a reference to ../../../../../node_modules/drizzle-zod/schema.types.internal.mjs](https://github.com/drizzle-team/drizzle-orm/issues/3732) +- [[BUG]: drizzle-zod excessively deep and possibly infinite types](https://github.com/drizzle-team/drizzle-orm/issues/3869) diff --git a/drizzle-kit/package.json b/drizzle-kit/package.json index 6e2fec181..9d4dd3824 100644 --- a/drizzle-kit/package.json +++ b/drizzle-kit/package.json @@ -1,6 +1,6 @@ { "name": "drizzle-kit", - "version": "0.30.2", + "version": "0.30.3", "homepage": "https://orm.drizzle.team", "keywords": [ "drizzle", diff --git a/drizzle-kit/src/api.ts b/drizzle-kit/src/api.ts index 18107bd34..3af67a042 100644 --- a/drizzle-kit/src/api.ts +++ b/drizzle-kit/src/api.ts @@ -474,6 +474,7 @@ export const pushSingleStoreSchema = async ( db, statements, validatedCur, + validatedPrev, ); return { diff --git a/drizzle-kit/src/cli/commands/push.ts b/drizzle-kit/src/cli/commands/push.ts index 0c82fe026..e517e4b0d 100644 --- a/drizzle-kit/src/cli/commands/push.ts +++ b/drizzle-kit/src/cli/commands/push.ts @@ -208,34 +208,16 @@ export const singlestorePush = async ( db, filteredStatements, statements.validatedCur, + statements.validatedPrev, ); - const filteredSqlStatements = fromJson(filteredStatements, 'singlestore'); - - const uniqueSqlStatementsToExecute: string[] = []; - statementsToExecute.forEach((ss) => { - if (!uniqueSqlStatementsToExecute.includes(ss)) { - uniqueSqlStatementsToExecute.push(ss); - } - }); - const uniqueFilteredSqlStatements: string[] = []; - filteredSqlStatements.forEach((ss) => { - if (!uniqueFilteredSqlStatements.includes(ss)) { - uniqueFilteredSqlStatements.push(ss); - } - }); - if (verbose) { console.log(); console.log( withStyle.warning('You are about to execute current statements:'), ); console.log(); - console.log( - [...uniqueSqlStatementsToExecute, ...uniqueFilteredSqlStatements] - .map((s) => chalk.blue(s)) - .join('\n'), - ); + console.log(statementsToExecute.map((s) => chalk.blue(s)).join('\n')); console.log(); } @@ -289,13 +271,10 @@ export const singlestorePush = async ( } } - for (const dStmnt of uniqueSqlStatementsToExecute) { + for (const dStmnt of statementsToExecute) { await db.query(dStmnt); } - for (const statement of uniqueFilteredSqlStatements) { - await db.query(statement); - } if (filteredStatements.length > 0) { render(`[${chalk.green('✓')}] Changes applied`); } else { diff --git a/drizzle-kit/src/cli/commands/singlestorePushUtils.ts b/drizzle-kit/src/cli/commands/singlestorePushUtils.ts index 80fad9b2d..5a550a239 100644 --- a/drizzle-kit/src/cli/commands/singlestorePushUtils.ts +++ b/drizzle-kit/src/cli/commands/singlestorePushUtils.ts @@ -1,9 +1,11 @@ import chalk from 'chalk'; import { render } from 'hanji'; +import { fromJson } from 'src/sqlgenerator'; import { TypeOf } from 'zod'; import { JsonAlterColumnTypeStatement, JsonStatement } from '../../jsonStatements'; -import { singlestoreSchema, SingleStoreSquasher } from '../../serializer/singlestoreSchema'; -import type { DB } from '../../utils'; +import { Column, SingleStoreSchemaSquashed, SingleStoreSquasher } from '../../serializer/singlestoreSchema'; +import { singlestoreSchema } from '../../serializer/singlestoreSchema'; +import { type DB, findAddedAndRemoved } from '../../utils'; import { Select } from '../selector-ui'; import { withStyle } from '../validations/outputs'; @@ -104,10 +106,30 @@ export const filterStatements = ( }); }; +export function findColumnTypeAlternations( + columns1: Record, + columns2: Record, +): string[] { + const changes: string[] = []; + + for (const key in columns1) { + if (columns1.hasOwnProperty(key) && columns2.hasOwnProperty(key)) { + const col1 = columns1[key]; + const col2 = columns2[key]; + if (col1.type !== col2.type) { + changes.push(col2.name); + } + } + } + + return changes; +} + export const logSuggestionsAndReturn = async ( db: DB, statements: JsonStatement[], json2: TypeOf, + json1: TypeOf, ) => { let shouldAskForApprove = false; const statementsToExecute: string[] = []; @@ -337,6 +359,88 @@ export const logSuggestionsAndReturn = async ( shouldAskForApprove = true; } } + } else if (statement.type === 'singlestore_recreate_table') { + const tableName = statement.tableName; + + const prevColumns = json1.tables[tableName].columns; + const currentColumns = json2.tables[tableName].columns; + const { removedColumns, addedColumns } = findAddedAndRemoved( + Object.keys(prevColumns), + Object.keys(currentColumns), + ); + + if (removedColumns.length) { + for (const removedColumn of removedColumns) { + const res = await db.query<{ count: string }>( + `select count(\`${tableName}\`.\`${removedColumn}\`) as count from \`${tableName}\``, + ); + + const count = Number(res[0].count); + if (count > 0) { + infoToPrint.push( + `· You're about to delete ${ + chalk.underline( + removedColumn, + ) + } column in ${tableName} table with ${count} items`, + ); + columnsToRemove.push(removedColumn); + shouldAskForApprove = true; + } + } + } + + if (addedColumns.length) { + for (const addedColumn of addedColumns) { + const [res] = await db.query<{ count: string }>( + `select count(*) as count from \`${tableName}\``, + ); + + const columnConf = json2.tables[tableName].columns[addedColumn]; + + const count = Number(res.count); + if (count > 0 && columnConf.notNull && !columnConf.default) { + infoToPrint.push( + `· You're about to add not-null ${ + chalk.underline( + addedColumn, + ) + } column without default value to table, which contains ${count} items`, + ); + shouldAskForApprove = true; + tablesToTruncate.push(tableName); + + statementsToExecute.push(`TRUNCATE TABLE \`${tableName}\`;`); + } + } + } + + const columnWithChangedType = findColumnTypeAlternations(prevColumns, currentColumns); + for (const column of columnWithChangedType) { + const [res] = await db.query<{ count: string }>( + `select count(*) as count from \`${tableName}\` WHERE \`${tableName}\`.\`${column}\` IS NOT NULL;`, + ); + + const count = Number(res.count); + if (count > 0) { + infoToPrint.push( + `· You're about recreate ${chalk.underline(tableName)} table with data type changing for ${ + chalk.underline( + column, + ) + } column, which contains ${count} items`, + ); + shouldAskForApprove = true; + tablesToTruncate.push(tableName); + + statementsToExecute.push(`TRUNCATE TABLE \`${tableName}\`;`); + } + } + } + + const stmnt = fromJson([statement], 'singlestore', 'push'); + if (typeof stmnt !== 'undefined') { + statementsToExecute.push(...stmnt); } } diff --git a/drizzle-kit/src/introspect-mysql.ts b/drizzle-kit/src/introspect-mysql.ts index 005a2af42..72d94a6ac 100644 --- a/drizzle-kit/src/introspect-mysql.ts +++ b/drizzle-kit/src/introspect-mysql.ts @@ -13,7 +13,6 @@ import { PrimaryKey, UniqueConstraint, } from './serializer/mysqlSchema'; -import { indexName } from './serializer/mysqlSerializer'; import { unescapeSingleQuotes } from './utils'; const mysqlImportsList = new Set([ @@ -924,12 +923,9 @@ const createTableIndexes = ( idxKey = casing(idxKey); - const indexGeneratedName = indexName(tableName, it.columns); - const escapedIndexName = indexGeneratedName === it.name ? '' : `"${it.name}"`; - - statement += `\n\t`; + statement += `\t\t${idxKey}: `; statement += it.isUnique ? 'uniqueIndex(' : 'index('; - statement += `${escapedIndexName})`; + statement += `"${it.name}")`; statement += `.on(${ it.columns .map((it) => `table.${casing(it)}`) diff --git a/drizzle-kit/src/jsonStatements.ts b/drizzle-kit/src/jsonStatements.ts index f64020f5a..b70d01b99 100644 --- a/drizzle-kit/src/jsonStatements.ts +++ b/drizzle-kit/src/jsonStatements.ts @@ -73,6 +73,14 @@ export interface JsonRecreateTableStatement { checkConstraints: string[]; } +export interface JsonRecreateSingleStoreTableStatement { + type: 'singlestore_recreate_table'; + tableName: string; + columns: Column[]; + compositePKs: string[]; + uniqueConstraints?: string[]; +} + export interface JsonDropTableStatement { type: 'drop_table'; tableName: string; @@ -794,6 +802,7 @@ export type JsonAlterColumnStatement = | JsonAlterColumnDropIdentityStatement; export type JsonStatement = + | JsonRecreateSingleStoreTableStatement | JsonRecreateTableStatement | JsonAlterColumnStatement | JsonCreateTableStatement diff --git a/drizzle-kit/src/snapshotsDiffer.ts b/drizzle-kit/src/snapshotsDiffer.ts index 2db4ad02c..0fd803288 100644 --- a/drizzle-kit/src/snapshotsDiffer.ts +++ b/drizzle-kit/src/snapshotsDiffer.ts @@ -141,7 +141,7 @@ import { } from './serializer/pgSchema'; import { SingleStoreSchema, SingleStoreSchemaSquashed, SingleStoreSquasher } from './serializer/singlestoreSchema'; import { SQLiteSchema, SQLiteSchemaSquashed, SQLiteSquasher, View as SqliteView } from './serializer/sqliteSchema'; -import { libSQLCombineStatements, sqliteCombineStatements } from './statementCombiner'; +import { libSQLCombineStatements, singleStoreCombineStatements, sqliteCombineStatements } from './statementCombiner'; import { copy, prepareMigrationMeta } from './utils'; const makeChanged = (schema: T) => { @@ -2875,9 +2875,8 @@ export const applySingleStoreSnapshotsDiff = async ( return [viewKey, viewValue]; }, ); - */ - const diffResult = applyJsonDiff(tablesPatchedSnap1, json2); // replace tablesPatchedSnap1 with viewsPatchedSnap1 + const diffResult = applyJsonDiff(columnsPatchedSnap1, json2); // replace columnsPatchedSnap1 with viewsPatchedSnap1 const typedResult: DiffResultSingleStore = diffResultSchemeSingleStore.parse(diffResult); @@ -3177,7 +3176,8 @@ export const applySingleStoreSnapshotsDiff = async ( jsonStatements.push(...jsonAlteredUniqueConstraints); - const sqlStatements = fromJson(jsonStatements, 'singlestore'); + const combinedJsonStatements = singleStoreCombineStatements(jsonStatements, json2); + const sqlStatements = fromJson(combinedJsonStatements, 'singlestore'); const uniqueSqlStatements: string[] = []; sqlStatements.forEach((ss) => { @@ -3193,7 +3193,7 @@ export const applySingleStoreSnapshotsDiff = async ( const _meta = prepareMigrationMeta([], rTables, rColumns); return { - statements: jsonStatements, + statements: combinedJsonStatements, sqlStatements: uniqueSqlStatements, _meta, }; diff --git a/drizzle-kit/src/sqlgenerator.ts b/drizzle-kit/src/sqlgenerator.ts index 6d3034b61..4843c6c0c 100644 --- a/drizzle-kit/src/sqlgenerator.ts +++ b/drizzle-kit/src/sqlgenerator.ts @@ -69,6 +69,7 @@ import { JsonMoveEnumStatement, JsonMoveSequenceStatement, JsonPgCreateIndexStatement, + JsonRecreateSingleStoreTableStatement, JsonRecreateTableStatement, JsonRenameColumnStatement, JsonRenameEnumStatement, @@ -574,7 +575,7 @@ class MySqlCreateTableConvertor extends Convertor { return statement; } } -class SingleStoreCreateTableConvertor extends Convertor { +export class SingleStoreCreateTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_table' && dialect === 'singlestore'; } @@ -618,7 +619,7 @@ class SingleStoreCreateTableConvertor extends Convertor { if (typeof compositePKs !== 'undefined' && compositePKs.length > 0) { statement += ',\n'; const compositePK = SingleStoreSquasher.unsquashPK(compositePKs[0]); - statement += `\tCONSTRAINT \`${st.compositePkName}\` PRIMARY KEY(\`${compositePK.columns.join(`\`,\``)}\`)`; + statement += `\tCONSTRAINT \`${compositePK.name}\` PRIMARY KEY(\`${compositePK.columns.join(`\`,\``)}\`)`; } if ( @@ -1531,7 +1532,7 @@ class MySQLDropTableConvertor extends Convertor { } } -class SingleStoreDropTableConvertor extends Convertor { +export class SingleStoreDropTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'drop_table' && dialect === 'singlestore'; } @@ -1590,14 +1591,14 @@ class MySqlRenameTableConvertor extends Convertor { } } -class SingleStoreRenameTableConvertor extends Convertor { +export class SingleStoreRenameTableConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'rename_table' && dialect === 'singlestore'; } convert(statement: JsonRenameTableStatement) { const { tableNameFrom, tableNameTo } = statement; - return `RENAME TABLE \`${tableNameFrom}\` TO \`${tableNameTo}\`;`; + return `ALTER TABLE \`${tableNameFrom}\` RENAME TO \`${tableNameTo}\`;`; } } @@ -1641,7 +1642,7 @@ class SingleStoreAlterTableRenameColumnConvertor extends Convertor { convert(statement: JsonRenameColumnStatement) { const { tableName, oldColumnName, newColumnName } = statement; - return `ALTER TABLE \`${tableName}\` RENAME COLUMN \`${oldColumnName}\` TO \`${newColumnName}\`;`; + return `ALTER TABLE \`${tableName}\` CHANGE \`${oldColumnName}\` \`${newColumnName}\`;`; } } @@ -3499,7 +3500,7 @@ class CreateMySqlIndexConvertor extends Convertor { } } -class CreateSingleStoreIndexConvertor extends Convertor { +export class CreateSingleStoreIndexConvertor extends Convertor { can(statement: JsonStatement, dialect: Dialect): boolean { return statement.type === 'create_index' && dialect === 'singlestore'; } @@ -3816,10 +3817,68 @@ class LibSQLRecreateTableConvertor extends Convertor { } } +class SingleStoreRecreateTableConvertor extends Convertor { + can(statement: JsonStatement, dialect: Dialect): boolean { + return ( + statement.type === 'singlestore_recreate_table' + && dialect === 'singlestore' + ); + } + + convert(statement: JsonRecreateSingleStoreTableStatement): string[] { + const { tableName, columns, compositePKs, uniqueConstraints } = statement; + + const columnNames = columns.map((it) => `\`${it.name}\``).join(', '); + const newTableName = `__new_${tableName}`; + + const sqlStatements: string[] = []; + + // create new table + sqlStatements.push( + new SingleStoreCreateTableConvertor().convert({ + type: 'create_table', + tableName: newTableName, + columns, + compositePKs, + uniqueConstraints, + schema: '', + }), + ); + + // migrate data + sqlStatements.push( + `INSERT INTO \`${newTableName}\`(${columnNames}) SELECT ${columnNames} FROM \`${tableName}\`;`, + ); + + // drop table + sqlStatements.push( + new SingleStoreDropTableConvertor().convert({ + type: 'drop_table', + tableName: tableName, + schema: '', + }), + ); + + // rename table + sqlStatements.push( + new SingleStoreRenameTableConvertor().convert({ + fromSchema: '', + tableNameFrom: newTableName, + tableNameTo: tableName, + toSchema: '', + type: 'rename_table', + }), + ); + + return sqlStatements; + } +} + const convertors: Convertor[] = []; convertors.push(new PgCreateTableConvertor()); convertors.push(new MySqlCreateTableConvertor()); convertors.push(new SingleStoreCreateTableConvertor()); +convertors.push(new SingleStoreRecreateTableConvertor()); convertors.push(new SQLiteCreateTableConvertor()); convertors.push(new SQLiteRecreateTableConvertor()); convertors.push(new LibSQLRecreateTableConvertor()); diff --git a/drizzle-kit/src/statementCombiner.ts b/drizzle-kit/src/statementCombiner.ts index f3ca9789c..7d84a2aa8 100644 --- a/drizzle-kit/src/statementCombiner.ts +++ b/drizzle-kit/src/statementCombiner.ts @@ -4,6 +4,7 @@ import { JsonStatement, prepareCreateIndexesJson, } from './jsonStatements'; +import { SingleStoreSchemaSquashed } from './serializer/singlestoreSchema'; import { SQLiteSchemaSquashed, SQLiteSquasher } from './serializer/sqliteSchema'; export const prepareLibSQLRecreateTable = ( @@ -444,3 +445,153 @@ export const sqliteCombineStatements = ( return [...renamedTables, ...renamedColumns, ...rest]; }; + +export const prepareSingleStoreRecreateTable = ( + table: SingleStoreSchemaSquashed['tables'][keyof SingleStoreSchemaSquashed['tables']], +): JsonStatement[] => { + const { name, columns, uniqueConstraints, indexes, compositePrimaryKeys } = table; + + const composites: string[] = Object.values(compositePrimaryKeys); + + const statements: JsonStatement[] = [ + { + type: 'singlestore_recreate_table', + tableName: name, + columns: Object.values(columns), + compositePKs: composites, + uniqueConstraints: Object.values(uniqueConstraints), + }, + ]; + + if (Object.keys(indexes).length) { + statements.push(...prepareCreateIndexesJson(name, '', indexes)); + } + return statements; +}; + +export const singleStoreCombineStatements = ( + statements: JsonStatement[], + json2: SingleStoreSchemaSquashed, +) => { + const newStatements: Record = {}; + + for (const statement of statements) { + if ( + statement.type === 'alter_table_alter_column_set_type' + || statement.type === 'alter_table_alter_column_set_notnull' + || statement.type === 'alter_table_alter_column_drop_notnull' + || statement.type === 'alter_table_alter_column_drop_autoincrement' + || statement.type === 'alter_table_alter_column_set_autoincrement' + || statement.type === 'alter_table_alter_column_drop_pk' + || statement.type === 'alter_table_alter_column_set_pk' + || statement.type === 'create_composite_pk' + || statement.type === 'alter_composite_pk' + || statement.type === 'delete_composite_pk' + ) { + const tableName = statement.tableName; + + const statementsForTable = newStatements[tableName]; + + if (!statementsForTable) { + newStatements[tableName] = prepareSingleStoreRecreateTable(json2.tables[tableName]); + continue; + } + + if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { + const wasRename = statementsForTable.some(({ type }) => + type === 'rename_table' || type === 'alter_table_rename_column' + ); + const preparedStatements = prepareSingleStoreRecreateTable(json2.tables[tableName]); + + if (wasRename) { + newStatements[tableName].push(...preparedStatements); + } else { + newStatements[tableName] = preparedStatements; + } + + continue; + } + + continue; + } + + if ( + (statement.type === 'alter_table_alter_column_drop_default' + || statement.type === 'alter_table_alter_column_set_default') && statement.columnNotNull + ) { + const tableName = statement.tableName; + + const statementsForTable = newStatements[tableName]; + + if (!statementsForTable) { + newStatements[tableName] = prepareSingleStoreRecreateTable(json2.tables[tableName]); + continue; + } + + if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { + const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); + const preparedStatements = prepareSingleStoreRecreateTable(json2.tables[tableName]); + + if (wasRename) { + newStatements[tableName].push(...preparedStatements); + } else { + newStatements[tableName] = preparedStatements; + } + + continue; + } + + continue; + } + + if (statement.type === 'alter_table_add_column' && statement.column.primaryKey) { + const tableName = statement.tableName; + + const statementsForTable = newStatements[tableName]; + + if (!statementsForTable) { + newStatements[tableName] = prepareSingleStoreRecreateTable(json2.tables[tableName]); + continue; + } + + if (!statementsForTable.some(({ type }) => type === 'recreate_table')) { + const wasRename = statementsForTable.some(({ type }) => type === 'rename_table'); + const preparedStatements = prepareSingleStoreRecreateTable(json2.tables[tableName]); + + if (wasRename) { + newStatements[tableName].push(...preparedStatements); + } else { + newStatements[tableName] = preparedStatements; + } + + continue; + } + + continue; + } + + const tableName = statement.type === 'rename_table' + ? statement.tableNameTo + : (statement as { tableName: string }).tableName; + + const statementsForTable = newStatements[tableName]; + + if (!statementsForTable) { + newStatements[tableName] = [statement]; + continue; + } + + if (!statementsForTable.some(({ type }) => type === 'singlestore_recreate_table')) { + newStatements[tableName].push(statement); + } + } + + const combinedStatements = Object.values(newStatements).flat(); + + const renamedTables = combinedStatements.filter((it) => it.type === 'rename_table'); + const renamedColumns = combinedStatements.filter((it) => it.type === 'alter_table_rename_column'); + + const rest = combinedStatements.filter((it) => it.type !== 'rename_table' && it.type !== 'alter_table_rename_column'); + + return [...renamedTables, ...renamedColumns, ...rest]; +}; diff --git a/drizzle-kit/src/utils.ts b/drizzle-kit/src/utils.ts index 2638ca4ef..93eb044e0 100644 --- a/drizzle-kit/src/utils.ts +++ b/drizzle-kit/src/utils.ts @@ -4,7 +4,6 @@ import { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from import { join } from 'path'; import { parse } from 'url'; import type { NamedWithSchema } from './cli/commands/migrate'; -import { CasingType } from './cli/validations/common'; import { info } from './cli/views'; import { assertUnreachable, snapshotVersion } from './global'; import type { Dialect } from './schemaValidator'; diff --git a/drizzle-kit/tests/push/singlestore-push.test.ts b/drizzle-kit/tests/push/singlestore-push.test.ts index 4ad3c6c0e..0bafd5956 100644 --- a/drizzle-kit/tests/push/singlestore-push.test.ts +++ b/drizzle-kit/tests/push/singlestore-push.test.ts @@ -1,5 +1,6 @@ +import chalk from 'chalk'; import Docker from 'dockerode'; -import { int, singlestoreTable } from 'drizzle-orm/singlestore-core'; +import { getTableConfig, index, int, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; import fs from 'fs'; import getPort from 'get-port'; import { Connection, createConnection } from 'mysql2/promise'; @@ -264,3 +265,630 @@ VIEW \`view\` AS (select \`id\` from \`test\`);`, await client.query(`DROP TABLE \`test\`;`); }); */ + +test('added column not null and without default to table with data', async (t) => { + const schema1 = { + companies: singlestoreTable('companies', { + id: int('id'), + name: text('name'), + }), + }; + + const schema2 = { + companies: singlestoreTable('companies', { + id: int('id'), + name: text('name'), + age: int('age').notNull(), + }), + }; + + const table = getTableConfig(schema1.companies); + + const seedStatements = [ + `INSERT INTO \`${table.name}\` (\`${schema1.companies.name.name}\`) VALUES ('drizzle');`, + `INSERT INTO \`${table.name}\` (\`${schema1.companies.name.name}\`) VALUES ('turso');`, + ]; + + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + undefined, + { + after: seedStatements, + }, + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'companies', + column: { + name: 'age', + type: 'int', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + schema: '', + }); + + expect(sqlStatements.length).toBe(2); + expect(sqlStatements[0]).toBe(`truncate table companies;`); + expect(sqlStatements[1]).toBe( + `ALTER TABLE \`companies\` ADD \`age\` int NOT NULL;`, + ); + + expect(columnsToRemove!.length).toBe(0); + expect(infoToPrint!.length).toBe(1); + expect(infoToPrint![0]).toBe( + `· You're about to add not-null ${ + chalk.underline( + 'age', + ) + } column without default value, which contains 2 items`, + ); + expect(shouldAskForApprove).toBe(true); + expect(tablesToRemove!.length).toBe(0); + expect(tablesToTruncate!.length).toBe(1); + expect(tablesToTruncate![0]).toBe('companies'); + + await client.query(`DROP TABLE \`companies\`;`); +}); + +test('added column not null and without default to table without data', async (t) => { + const schema1 = { + companies: singlestoreTable('companies', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }), + }; + + const schema2 = { + companies: singlestoreTable('companies', { + id: int('id').primaryKey(), + name: text('name').notNull(), + age: int('age').notNull(), + }), + }; + + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + undefined, + ); + + expect(statements.length).toBe(1); + expect(statements[0]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'companies', + column: { + name: 'age', + type: 'int', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + schema: '', + }); + + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + `ALTER TABLE \`companies\` ADD \`age\` int NOT NULL;`, + ); + + expect(infoToPrint!.length).toBe(0); + expect(columnsToRemove!.length).toBe(0); + expect(shouldAskForApprove).toBe(false); + expect(tablesToRemove!.length).toBe(0); + expect(tablesToTruncate!.length).toBe(0); + + await client.query(`DROP TABLE \`companies\`;`); +}); + +test('drop not null, add not null', async (t) => { + const schema1 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }), + posts: singlestoreTable( + 'posts', + { + id: int('id').primaryKey(), + name: text('name'), + userId: int('user_id'), + }, + ), + }; + + const schema2 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name'), + }), + posts: singlestoreTable( + 'posts', + { + id: int('id').primaryKey(), + name: text('name').notNull(), + userId: int('user_id'), + }, + ), + }; + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + undefined, + ); + + expect(statements!.length).toBe(2); + expect(statements![0]).toStrictEqual({ + columns: [ + { + autoincrement: false, + generated: undefined, + name: 'id', + notNull: true, + onUpdate: undefined, + primaryKey: false, + type: 'int', + }, + { + autoincrement: false, + generated: undefined, + name: 'name', + notNull: true, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + { + autoincrement: false, + generated: undefined, + name: 'user_id', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'int', + }, + ], + compositePKs: [ + 'posts_id;id', + ], + tableName: 'posts', + type: 'singlestore_recreate_table', + uniqueConstraints: [], + }); + expect(statements![1]).toStrictEqual({ + columns: [ + { + autoincrement: false, + generated: undefined, + name: 'id', + notNull: true, + onUpdate: undefined, + primaryKey: false, + type: 'int', + }, + { + autoincrement: false, + generated: undefined, + name: 'name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + ], + compositePKs: [ + 'users_id;id', + ], + tableName: 'users', + type: 'singlestore_recreate_table', + uniqueConstraints: [], + }); + expect(sqlStatements!.length).toBe(8); + expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_posts\` ( +\t\`id\` int NOT NULL, +\t\`name\` text NOT NULL, +\t\`user_id\` int, +\tCONSTRAINT \`posts_id\` PRIMARY KEY(\`id\`) +);\n`); + expect(sqlStatements![1]).toBe( + `INSERT INTO \`__new_posts\`(\`id\`, \`name\`, \`user_id\`) SELECT \`id\`, \`name\`, \`user_id\` FROM \`posts\`;`, + ); + expect(sqlStatements![2]).toBe(`DROP TABLE \`posts\`;`); + expect(sqlStatements![3]).toBe(`ALTER TABLE \`__new_posts\` RENAME TO \`posts\`;`); + expect(sqlStatements![4]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` int NOT NULL, +\t\`name\` text, +\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) +);\n`); + expect(sqlStatements![5]).toBe( + `INSERT INTO \`__new_users\`(\`id\`, \`name\`) SELECT \`id\`, \`name\` FROM \`users\`;`, + ); + expect(sqlStatements![6]).toBe( + `DROP TABLE \`users\`;`, + ); + expect(sqlStatements![7]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); + expect(columnsToRemove!.length).toBe(0); + expect(infoToPrint!.length).toBe(0); + expect(shouldAskForApprove).toBe(false); + expect(tablesToRemove!.length).toBe(0); + expect(tablesToTruncate!.length).toBe(0); + + await client.query(`DROP TABLE \`users\`;`); + await client.query(`DROP TABLE \`posts\`;`); +}); + +test('drop table with data', async (t) => { + const schema1 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }), + posts: singlestoreTable( + 'posts', + { + id: int('id').primaryKey(), + name: text('name'), + userId: int('user_id'), + }, + ), + }; + + const schema2 = { + posts: singlestoreTable( + 'posts', + { + id: int('id').primaryKey(), + name: text('name'), + userId: int('user_id'), + }, + ), + }; + + const seedStatements = [ + `INSERT INTO \`users\` (\`id\`, \`name\`) VALUES (1, 'drizzle')`, + ]; + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + undefined, + { after: seedStatements }, + ); + + expect(statements!.length).toBe(1); + expect(statements![0]).toStrictEqual({ + policies: [], + schema: undefined, + tableName: 'users', + type: 'drop_table', + }); + + expect(sqlStatements!.length).toBe(1); + expect(sqlStatements![0]).toBe(`DROP TABLE \`users\`;`); + expect(columnsToRemove!.length).toBe(0); + expect(infoToPrint!.length).toBe(1); + expect(infoToPrint![0]).toBe(`· You're about to delete ${chalk.underline('users')} table with 1 items`); + expect(shouldAskForApprove).toBe(true); + expect(tablesToRemove!.length).toBe(1); + expect(tablesToRemove![0]).toBe('users'); + expect(tablesToTruncate!.length).toBe(0); + + await client.query(`DROP TABLE \`users\`;`); + await client.query(`DROP TABLE \`posts\`;`); +}); + +test('change data type. db has indexes. table does not have values', async (t) => { + const schema1 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: int('name').notNull(), + }, (table) => [index('index').on(table.name)]), + }; + + const schema2 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name').notNull(), + }, (table) => [index('index').on(table.name)]), + }; + + const seedStatements = [`INSERT INTO users VALUES (1, 12)`]; + + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + undefined, + ); + + expect(statements!.length).toBe(2); + expect(statements![0]).toStrictEqual({ + columns: [ + { + autoincrement: false, + generated: undefined, + name: 'id', + notNull: true, + onUpdate: undefined, + primaryKey: false, + type: 'int', + }, + { + autoincrement: false, + generated: undefined, + name: 'name', + notNull: true, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + ], + compositePKs: [ + 'users_id;id', + ], + tableName: 'users', + type: 'singlestore_recreate_table', + uniqueConstraints: [], + }); + expect(statements![1]).toStrictEqual({ + data: 'index;name;false;;;', + internal: undefined, + schema: '', + tableName: 'users', + type: 'create_index', + }); + + expect(sqlStatements!.length).toBe(5); + expect(sqlStatements![0]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` int NOT NULL, +\t\`name\` text NOT NULL, +\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) +);\n`); + expect(sqlStatements![1]).toBe( + `INSERT INTO \`__new_users\`(\`id\`, \`name\`) SELECT \`id\`, \`name\` FROM \`users\`;`, + ); + expect(sqlStatements![2]).toBe(`DROP TABLE \`users\`;`); + expect(sqlStatements![3]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); + expect(sqlStatements![4]).toBe(`CREATE INDEX \`index\` ON \`users\` (\`name\`);`); + expect(columnsToRemove!.length).toBe(0); + expect(infoToPrint!.length).toBe(0); + expect(shouldAskForApprove).toBe(false); + expect(tablesToRemove!.length).toBe(0); + expect(tablesToTruncate!.length).toBe(0); + + await client.query(`DROP TABLE \`users\`;`); +}); + +test('change data type. db has indexes. table has values', async (t) => { + const schema1 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: int('name'), + }, (table) => [index('index').on(table.name)]), + }; + + const schema2 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name'), + }, (table) => [index('index').on(table.name)]), + }; + + const seedStatements = [`INSERT INTO users VALUES (1, 12);`, `INSERT INTO users (id) VALUES (2);`]; + + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + undefined, + { after: seedStatements }, + ); + + expect(statements!.length).toBe(2); + expect(statements![0]).toStrictEqual({ + columns: [ + { + autoincrement: false, + generated: undefined, + name: 'id', + notNull: true, + onUpdate: undefined, + primaryKey: false, + type: 'int', + }, + { + autoincrement: false, + generated: undefined, + name: 'name', + notNull: false, + onUpdate: undefined, + primaryKey: false, + type: 'text', + }, + ], + compositePKs: [ + 'users_id;id', + ], + tableName: 'users', + type: 'singlestore_recreate_table', + uniqueConstraints: [], + }); + expect(statements![1]).toStrictEqual({ + data: 'index;name;false;;;', + internal: undefined, + schema: '', + tableName: 'users', + type: 'create_index', + }); + + expect(sqlStatements!.length).toBe(6); + expect(sqlStatements![0]).toBe(`TRUNCATE TABLE \`users\`;`); + expect(sqlStatements![1]).toBe(`CREATE TABLE \`__new_users\` ( +\t\`id\` int NOT NULL, +\t\`name\` text, +\tCONSTRAINT \`users_id\` PRIMARY KEY(\`id\`) +);\n`); + expect(sqlStatements![2]).toBe( + `INSERT INTO \`__new_users\`(\`id\`, \`name\`) SELECT \`id\`, \`name\` FROM \`users\`;`, + ); + expect(sqlStatements![3]).toBe(`DROP TABLE \`users\`;`); + expect(sqlStatements![4]).toBe(`ALTER TABLE \`__new_users\` RENAME TO \`users\`;`); + expect(sqlStatements![5]).toBe(`CREATE INDEX \`index\` ON \`users\` (\`name\`);`); + expect(columnsToRemove!.length).toBe(0); + expect(infoToPrint!.length).toBe(1); + expect(infoToPrint![0]).toBe( + `· You're about recreate ${chalk.underline('users')} table with data type changing for ${ + chalk.underline('name') + } column, which contains 1 items`, + ); + expect(shouldAskForApprove).toBe(true); + expect(tablesToRemove!.length).toBe(0); + expect(tablesToTruncate!.length).toBe(1); + expect(tablesToTruncate![0]).toBe(`users`); + + await client.query(`DROP TABLE \`users\`;`); +}); + +test('add column. add default to column without not null', async (t) => { + const schema1 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name'), + }), + }; + + const schema2 = { + users: singlestoreTable('users', { + id: int('id').primaryKey(), + name: text('name').default('drizzle'), + age: int('age'), + }), + }; + + const { + statements, + sqlStatements, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await diffTestSchemasPushSingleStore( + client, + schema1, + schema2, + [], + 'drizzle', + false, + undefined, + ); + + expect(statements!.length).toBe(2); + expect(statements![0]).toStrictEqual({ + columnAutoIncrement: false, + columnName: 'name', + columnNotNull: false, + columnOnUpdate: undefined, + columnPk: false, + newDataType: 'text', + newDefaultValue: "'drizzle'", + schema: '', + tableName: 'users', + type: 'alter_table_alter_column_set_default', + }); + expect(statements![1]).toStrictEqual({ + type: 'alter_table_add_column', + tableName: 'users', + schema: '', + column: { + notNull: false, + primaryKey: false, + autoincrement: false, + name: 'age', + type: 'int', + }, + }); + expect(sqlStatements!.length).toBe(2); + expect(sqlStatements![0]).toBe(`ALTER TABLE \`users\` MODIFY COLUMN \`name\` text DEFAULT 'drizzle';`); + expect(sqlStatements![1]).toBe(`ALTER TABLE \`users\` ADD \`age\` int;`); + expect(columnsToRemove!.length).toBe(0); + expect(infoToPrint!.length).toBe(0); + expect(shouldAskForApprove).toBe(false); + expect(tablesToRemove!.length).toBe(0); + expect(tablesToTruncate!.length).toBe(0); + + await client.query(`DROP TABLE \`users\`;`); +}); diff --git a/drizzle-kit/tests/push/singlestore.test.ts b/drizzle-kit/tests/push/singlestore.test.ts index dea28759c..6f58e8ddd 100644 --- a/drizzle-kit/tests/push/singlestore.test.ts +++ b/drizzle-kit/tests/push/singlestore.test.ts @@ -5,15 +5,12 @@ import { binary, char, date, - datetime, decimal, double, float, int, - json, mediumint, primaryKey, - serial, singlestoreEnum, singlestoreTable, smallint, @@ -400,7 +397,7 @@ const singlestoreSuite: DialectSuite = { // It's not possible to create/alter/drop primary keys in SingleStore expect(sqlStatements).toStrictEqual([ - 'RENAME TABLE `products_categories` TO `products_to_categories`;', + 'ALTER TABLE `products_categories` RENAME TO `products_to_categories`;', ]); await context.client.query(`DROP TABLE \`products_categories\``); diff --git a/drizzle-kit/tests/schemaDiffer.ts b/drizzle-kit/tests/schemaDiffer.ts index 9c7f212aa..256288c24 100644 --- a/drizzle-kit/tests/schemaDiffer.ts +++ b/drizzle-kit/tests/schemaDiffer.ts @@ -38,6 +38,7 @@ import { viewsResolver, } from 'src/cli/commands/migrate'; import { pgSuggestions } from 'src/cli/commands/pgPushUtils'; +import { logSuggestionsAndReturn as singleStoreLogSuggestionsAndReturn } from 'src/cli/commands/singlestorePushUtils'; import { logSuggestionsAndReturn } from 'src/cli/commands/sqlitePushUtils'; import { Entities } from 'src/cli/validations/cli'; import { CasingType } from 'src/cli/validations/common'; @@ -1624,11 +1625,35 @@ export const diffTestSchemasPushSingleStore = async ( schema: string, cli: boolean = false, casing?: CasingType | undefined, + sqlStatementsToRun: { + before?: string[]; + after?: string[]; + runApply?: boolean; + } = { + before: [], + after: [], + runApply: true, + }, ) => { - const { sqlStatements } = await applySingleStoreDiffs(left, casing); - for (const st of sqlStatements) { + const shouldRunApply = sqlStatementsToRun.runApply === undefined + ? true + : sqlStatementsToRun.runApply; + + for (const st of sqlStatementsToRun.before ?? []) { await client.query(st); } + + if (shouldRunApply) { + const res = await applySingleStoreDiffs(left, casing); + for (const st of res.sqlStatements) { + await client.query(st); + } + } + + for (const st of sqlStatementsToRun.after ?? []) { + await client.query(st); + } + // do introspect into PgSchemaInternal const introspectedSchema = await fromSingleStoreDatabase( { @@ -1688,7 +1713,35 @@ export const diffTestSchemasPushSingleStore = async ( validatedCur, 'push', ); - return { sqlStatements, statements }; + + const { + statementsToExecute, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + } = await singleStoreLogSuggestionsAndReturn( + { + query: async (sql: string, params?: any[]) => { + const res = await client.execute(sql, params); + return res[0] as T[]; + }, + }, + statements, + sn1, + sn2, + ); + + return { + sqlStatements: statementsToExecute, + statements, + columnsToRemove, + infoToPrint, + shouldAskForApprove, + tablesToRemove, + tablesToTruncate, + }; } else { const { sqlStatements, statements } = await applySingleStoreSnapshotsDiff( sn1, diff --git a/drizzle-kit/tests/singlestore.test.ts b/drizzle-kit/tests/singlestore.test.ts index 3bdccab81..dca99ad2d 100644 --- a/drizzle-kit/tests/singlestore.test.ts +++ b/drizzle-kit/tests/singlestore.test.ts @@ -1,6 +1,7 @@ import { sql } from 'drizzle-orm'; import { index, + int, json, primaryKey, serial, @@ -214,6 +215,13 @@ test('add table #7', async () => { expect(statements.length).toBe(2); expect(statements[0]).toStrictEqual({ + type: 'rename_table', + tableNameFrom: 'users1', + tableNameTo: 'users2', + fromSchema: undefined, + toSchema: undefined, + }); + expect(statements[1]).toStrictEqual({ type: 'create_table', tableName: 'users', schema: undefined, @@ -226,13 +234,6 @@ test('add table #7', async () => { }, compositePkName: '', }); - expect(statements[1]).toStrictEqual({ - type: 'rename_table', - tableNameFrom: 'users1', - tableNameTo: 'users2', - fromSchema: undefined, - toSchema: undefined, - }); }); test('add schema + table #1', async () => { @@ -578,3 +579,400 @@ test('add table with indexes', async () => { 'CREATE INDEX `indexColExpr` ON `users` ((lower(`email`)),`email`);', ]); }); + +test('rename table', async () => { + const from = { + table: singlestoreTable('table', { + json: json('json').default([]), + }), + }; + + const to = { + table1: singlestoreTable('table1', { + json1: json('json').default([]), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, [`public.table->public.table1`]); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + 'ALTER TABLE `table` RENAME TO `table1`;', + ); +}); + +test('rename column', async () => { + const from = { + users: singlestoreTable('table', { + json: json('json').default([]), + }), + }; + + const to = { + users: singlestoreTable('table', { + json1: json('json1').default([]), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, [`public.table.json->public.table.json1`]); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + 'ALTER TABLE `table` CHANGE `json` `json1`;', + ); +}); + +test('change data type', async () => { + const from = { + table: singlestoreTable('table', { + id: int(), + age: text(), + }), + }; + + const to = { + table: singlestoreTable('table', { + id: int(), + age: int(), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); + expect(sqlStatements.length).toBe(4); + expect(sqlStatements[0]).toBe( + `CREATE TABLE \`__new_table\` ( +\t\`id\` int, +\t\`age\` int +);\n`, + ); + expect(sqlStatements[1]).toBe( + 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', + ); + expect(sqlStatements[2]).toBe( + 'DROP TABLE `table`;', + ); + expect(sqlStatements[3]).toBe( + 'ALTER TABLE `__new_table` RENAME TO `table`;', + ); +}); + +test('drop not null', async () => { + const from = { + table: singlestoreTable('table', { + id: int().notNull(), + age: int(), + }), + }; + + const to = { + table: singlestoreTable('table', { + id: int(), + age: int(), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); + expect(sqlStatements.length).toBe(4); + expect(sqlStatements[0]).toBe( + `CREATE TABLE \`__new_table\` ( +\t\`id\` int, +\t\`age\` int +);\n`, + ); + expect(sqlStatements[1]).toBe( + 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', + ); + expect(sqlStatements[2]).toBe( + 'DROP TABLE `table`;', + ); + expect(sqlStatements[3]).toBe( + 'ALTER TABLE `__new_table` RENAME TO `table`;', + ); +}); + +test('set not null', async () => { + const from = { + table: singlestoreTable('table', { + id: int(), + age: int(), + }), + }; + + const to = { + table: singlestoreTable('table', { + id: int().notNull(), + age: int(), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); + expect(sqlStatements.length).toBe(4); + expect(sqlStatements[0]).toBe( + `CREATE TABLE \`__new_table\` ( +\t\`id\` int NOT NULL, +\t\`age\` int +);\n`, + ); + expect(sqlStatements[1]).toBe( + 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', + ); + expect(sqlStatements[2]).toBe( + 'DROP TABLE `table`;', + ); + expect(sqlStatements[3]).toBe( + 'ALTER TABLE `__new_table` RENAME TO `table`;', + ); +}); + +test('set default with not null column', async () => { + const from = { + table: singlestoreTable('table', { + id: int().notNull(), + age: int(), + }), + }; + + const to = { + table: singlestoreTable('table', { + id: int().notNull().default(1), + age: int(), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); + expect(sqlStatements.length).toBe(4); + expect(sqlStatements[0]).toBe( + `CREATE TABLE \`__new_table\` ( +\t\`id\` int NOT NULL DEFAULT 1, +\t\`age\` int +);\n`, + ); + expect(sqlStatements[1]).toBe( + 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', + ); + expect(sqlStatements[2]).toBe( + 'DROP TABLE `table`;', + ); + expect(sqlStatements[3]).toBe( + 'ALTER TABLE `__new_table` RENAME TO `table`;', + ); +}); + +test('drop default with not null column', async () => { + const from = { + table: singlestoreTable('table', { + id: int().notNull().default(1), + age: int(), + }), + }; + + const to = { + table: singlestoreTable('table', { + id: int().notNull(), + age: int(), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); + expect(sqlStatements.length).toBe(4); + expect(sqlStatements[0]).toBe( + `CREATE TABLE \`__new_table\` ( +\t\`id\` int NOT NULL, +\t\`age\` int +);\n`, + ); + expect(sqlStatements[1]).toBe( + 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', + ); + expect(sqlStatements[2]).toBe( + 'DROP TABLE `table`;', + ); + expect(sqlStatements[3]).toBe( + 'ALTER TABLE `__new_table` RENAME TO `table`;', + ); +}); + +test('set default', async () => { + const from = { + table: singlestoreTable('table', { + id: int(), + age: int(), + }), + }; + + const to = { + table: singlestoreTable('table', { + id: int().default(1), + age: int(), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + 'ALTER TABLE `table` MODIFY COLUMN `id` int DEFAULT 1;', + ); +}); + +test('drop default', async () => { + const from = { + table: singlestoreTable('table', { + id: int().default(1), + age: int(), + }), + }; + + const to = { + table: singlestoreTable('table', { + id: int(), + age: int(), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); + expect(sqlStatements.length).toBe(1); + expect(sqlStatements[0]).toBe( + 'ALTER TABLE `table` MODIFY COLUMN `id` int;', + ); +}); + +test('set pk', async () => { + const from = { + table: singlestoreTable('table', { + id: int(), + age: int(), + }), + }; + + const to = { + table: singlestoreTable('table', { + id: int().primaryKey(), + age: int(), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); + expect(sqlStatements.length).toBe(4); + expect(sqlStatements[0]).toBe( + `CREATE TABLE \`__new_table\` ( +\t\`id\` int NOT NULL, +\t\`age\` int, +\tCONSTRAINT \`table_id\` PRIMARY KEY(\`id\`) +);\n`, + ); + expect(sqlStatements[1]).toBe( + 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', + ); + expect(sqlStatements[2]).toBe( + 'DROP TABLE `table`;', + ); + expect(sqlStatements[3]).toBe( + 'ALTER TABLE `__new_table` RENAME TO `table`;', + ); +}); + +test('drop pk', async () => { + const from = { + table: singlestoreTable('table', { + id: int().primaryKey(), + age: int(), + }), + }; + + const to = { + table: singlestoreTable('table', { + id: int(), + age: int(), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, []); + expect(sqlStatements.length).toBe(4); + expect(sqlStatements[0]).toBe( + `CREATE TABLE \`__new_table\` ( +\t\`id\` int, +\t\`age\` int +);\n`, + ); + expect(sqlStatements[1]).toBe( + 'INSERT INTO `__new_table`(`id`, `age`) SELECT `id`, `age` FROM `table`;', + ); + expect(sqlStatements[2]).toBe( + 'DROP TABLE `table`;', + ); + expect(sqlStatements[3]).toBe( + 'ALTER TABLE `__new_table` RENAME TO `table`;', + ); +}); + +test('set not null + rename column on table with indexes', async () => { + const from = { + table: singlestoreTable('table', { + id: int('id').default(1), + age: int(), + }), + }; + + const to = { + table: singlestoreTable('table', { + id3: int('id3').notNull().default(1), + age: int(), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, [`public.table.id->public.table.id3`]); + expect(sqlStatements.length).toBe(5); + expect(sqlStatements[0]).toBe( + 'ALTER TABLE \`table\` CHANGE `id` `id3`;', + ); + expect(sqlStatements[1]).toBe( + `CREATE TABLE \`__new_table\` ( +\t\`id3\` int NOT NULL DEFAULT 1, +\t\`age\` int +);\n`, + ); + expect(sqlStatements[2]).toBe( + 'INSERT INTO `__new_table`(`id3`, `age`) SELECT `id3`, `age` FROM `table`;', + ); + expect(sqlStatements[3]).toBe( + 'DROP TABLE `table`;', + ); + expect(sqlStatements[4]).toBe( + 'ALTER TABLE `__new_table` RENAME TO `table`;', + ); +}); + +test('set not null + rename table on table with indexes', async () => { + const from = { + table: singlestoreTable('table', { + id: int('id').default(1), + age: int(), + }), + }; + + const to = { + table1: singlestoreTable('table1', { + id: int('id').notNull().default(1), + age: int(), + }), + }; + + const { sqlStatements } = await diffTestSchemasSingleStore(from, to, [`public.table->public.table1`]); + expect(sqlStatements.length).toBe(5); + expect(sqlStatements[0]).toBe( + 'ALTER TABLE `table` RENAME TO `table1`;', + ); + expect(sqlStatements[1]).toBe( + `CREATE TABLE \`__new_table1\` ( +\t\`id\` int NOT NULL DEFAULT 1, +\t\`age\` int +);\n`, + ); + expect(sqlStatements[2]).toBe( + 'INSERT INTO `__new_table1`(\`id\`, \`age\`) SELECT \`id\`, \`age\` FROM `table1`;', + ); + expect(sqlStatements[3]).toBe( + 'DROP TABLE `table1`;', + ); + expect(sqlStatements[4]).toBe( + 'ALTER TABLE `__new_table1` RENAME TO `table1`;', + ); +}); diff --git a/drizzle-kit/tests/statements-combiner/singlestore-statements-combiner.test.ts b/drizzle-kit/tests/statements-combiner/singlestore-statements-combiner.test.ts new file mode 100644 index 000000000..0ba6cf278 --- /dev/null +++ b/drizzle-kit/tests/statements-combiner/singlestore-statements-combiner.test.ts @@ -0,0 +1,882 @@ +import { JsonStatement } from 'src/jsonStatements'; +import { SingleStoreSchemaSquashed } from 'src/serializer/singlestoreSchema'; +import { singleStoreCombineStatements } from 'src/statementCombiner'; +import { expect, test } from 'vitest'; + +test(`change column data type`, async (t) => { + const statements: JsonStatement[] = [ + { + type: 'alter_table_rename_column', + tableName: 'user', + oldColumnName: 'lastName', + newColumnName: 'lastName123', + schema: '', + }, + { + type: 'alter_table_alter_column_set_type', + tableName: 'user', + columnName: 'lastName123', + newDataType: 'int', + oldDataType: 'text', + schema: '', + columnDefault: undefined, + columnOnUpdate: undefined, + columnNotNull: false, + columnAutoIncrement: false, + columnPk: false, + columnIsUnique: false, + } as unknown as JsonStatement, + ]; + const json1: SingleStoreSchemaSquashed = { + version: '1', + dialect: 'singlestore', + tables: { + user: { + name: 'user', + columns: { + firstName: { + name: 'firstName', + type: 'int', + primaryKey: true, + notNull: true, + autoincrement: false, + }, + lastName: { + name: 'lastName', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + test: { + name: 'test', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + }, + indexes: {}, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }, + }, + }; + const json2: SingleStoreSchemaSquashed = { + version: '1', + dialect: 'singlestore', + tables: { + user: { + name: 'user', + columns: { + firstName: { + name: 'firstName', + type: 'int', + primaryKey: true, + notNull: true, + autoincrement: false, + }, + lastName: { + name: 'lastName123', + type: 'int', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + test: { + name: 'test', + type: 'int', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + }, + indexes: {}, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }, + }, + }; + + const newJsonStatements = [ + { + type: 'alter_table_rename_column', + tableName: 'user', + oldColumnName: 'lastName', + newColumnName: 'lastName123', + schema: '', + }, + { + type: 'singlestore_recreate_table', + tableName: 'user', + columns: [ + { + name: 'firstName', + type: 'int', + primaryKey: true, + notNull: true, + autoincrement: false, + }, + { + name: 'lastName123', + type: 'int', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + { + name: 'test', + type: 'int', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + ], + compositePKs: [], + uniqueConstraints: [], + }, + ]; + expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( + newJsonStatements, + ); +}); + +test(`set autoincrement`, async (t) => { + const statements: JsonStatement[] = [ + { + type: 'alter_table_alter_column_set_autoincrement', + tableName: 'users', + columnName: 'id', + schema: '', + newDataType: 'int', + columnDefault: undefined, + columnOnUpdate: undefined, + columnNotNull: true, + columnAutoIncrement: true, + columnPk: false, + } as unknown as JsonStatement, + ]; + + const json2: SingleStoreSchemaSquashed = { + version: '1', + dialect: 'singlestore', + tables: { + users: { + name: 'users', + columns: { + new_id: { + name: 'id', + type: 'int', + primaryKey: false, + notNull: true, + autoincrement: true, + }, + name: { + name: 'name', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + email: { + name: 'email', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + }, + indexes: {}, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }, + }, + }; + const newJsonStatements = [ + { + type: 'singlestore_recreate_table', + tableName: 'users', + columns: [ + { + name: 'id', + type: 'int', + primaryKey: false, + notNull: true, + autoincrement: true, + }, + { + name: 'name', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + { + name: 'email', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + ], + compositePKs: [], + uniqueConstraints: [], + }, + ]; + expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( + newJsonStatements, + ); +}); + +test(`drop autoincrement`, async (t) => { + const statements: JsonStatement[] = [ + { + type: 'alter_table_alter_column_drop_autoincrement', + tableName: 'users', + columnName: 'id', + schema: '', + newDataType: 'int', + columnDefault: undefined, + columnOnUpdate: undefined, + columnNotNull: true, + columnAutoIncrement: true, + columnPk: false, + } as unknown as JsonStatement, + ]; + + const json2: SingleStoreSchemaSquashed = { + version: '1', + dialect: 'singlestore', + tables: { + users: { + name: 'users', + columns: { + new_id: { + name: 'id', + type: 'int', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + name: { + name: 'name', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + email: { + name: 'email', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + }, + indexes: {}, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }, + }, + }; + const newJsonStatements = [ + { + type: 'singlestore_recreate_table', + tableName: 'users', + columns: [ + { + name: 'id', + type: 'int', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + { + name: 'name', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + { + name: 'email', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + ], + compositePKs: [], + uniqueConstraints: [], + }, + ]; + expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( + newJsonStatements, + ); +}); + +test(`drop autoincrement`, async (t) => { + const statements: JsonStatement[] = [ + { + type: 'alter_table_alter_column_drop_autoincrement', + tableName: 'users', + columnName: 'id', + schema: '', + newDataType: 'int', + columnDefault: undefined, + columnOnUpdate: undefined, + columnNotNull: true, + columnAutoIncrement: true, + columnPk: false, + } as unknown as JsonStatement, + ]; + + const json2: SingleStoreSchemaSquashed = { + version: '1', + dialect: 'singlestore', + tables: { + users: { + name: 'users', + columns: { + new_id: { + name: 'id', + type: 'int', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + name: { + name: 'name', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + email: { + name: 'email', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + }, + indexes: {}, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }, + }, + }; + const newJsonStatements = [ + { + type: 'singlestore_recreate_table', + tableName: 'users', + columns: [ + { + name: 'id', + type: 'int', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + { + name: 'name', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + { + name: 'email', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + ], + compositePKs: [], + uniqueConstraints: [], + }, + ]; + expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( + newJsonStatements, + ); +}); + +test(`set not null`, async (t) => { + const statements: JsonStatement[] = [ + { + type: 'alter_table_alter_column_set_notnull', + tableName: 'users', + columnName: 'name', + schema: '', + newDataType: 'text', + columnDefault: undefined, + columnOnUpdate: undefined, + columnNotNull: true, + columnAutoIncrement: false, + columnPk: false, + } as unknown as JsonStatement, + ]; + + const json2: SingleStoreSchemaSquashed = { + version: '1', + dialect: 'singlestore', + tables: { + users: { + name: 'users', + columns: { + new_id: { + name: 'id', + type: 'int', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + name: { + name: 'name', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + email: { + name: 'email', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + }, + indexes: {}, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }, + }, + }; + const newJsonStatements = [ + { + type: 'singlestore_recreate_table', + tableName: 'users', + columns: [ + { + name: 'id', + type: 'int', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + { + name: 'name', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + { + name: 'email', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + ], + compositePKs: [], + uniqueConstraints: [], + }, + ]; + expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( + newJsonStatements, + ); +}); + +test(`drop not null`, async (t) => { + const statements: JsonStatement[] = [ + { + type: 'alter_table_alter_column_drop_notnull', + tableName: 'users', + columnName: 'name', + schema: '', + newDataType: 'text', + columnDefault: undefined, + columnOnUpdate: undefined, + columnNotNull: false, + columnAutoIncrement: false, + columnPk: false, + } as unknown as JsonStatement, + ]; + + const json2: SingleStoreSchemaSquashed = { + version: '1', + dialect: 'singlestore', + tables: { + users: { + name: 'users', + columns: { + new_id: { + name: 'id', + type: 'int', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + name: { + name: 'name', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + email: { + name: 'email', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + }, + indexes: {}, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }, + }, + }; + const newJsonStatements = [ + { + type: 'singlestore_recreate_table', + tableName: 'users', + columns: [ + { + name: 'id', + type: 'int', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + { + name: 'name', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + { + name: 'email', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + ], + compositePKs: [], + uniqueConstraints: [], + }, + ]; + expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( + newJsonStatements, + ); +}); + +test(`renamed column and droped column "test"`, async (t) => { + const statements: JsonStatement[] = [ + { + type: 'alter_table_rename_column', + tableName: 'user', + oldColumnName: 'lastName', + newColumnName: 'lastName123', + schema: '', + }, + { + type: 'alter_table_drop_column', + tableName: 'user', + columnName: 'test', + schema: '', + }, + ]; + const json1: SingleStoreSchemaSquashed = { + version: '1', + dialect: 'singlestore', + tables: { + user: { + name: 'user', + columns: { + firstName: { + name: 'firstName', + type: 'int', + primaryKey: true, + notNull: true, + autoincrement: false, + }, + lastName: { + name: 'lastName', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + test: { + name: 'test', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + }, + indexes: {}, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }, + }, + }; + const json2: SingleStoreSchemaSquashed = { + version: '1', + dialect: 'singlestore', + tables: { + user: { + name: 'user', + columns: { + firstName: { + name: 'firstName', + type: 'int', + primaryKey: true, + notNull: true, + autoincrement: false, + }, + lastName: { + name: 'lastName123', + type: 'int', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + test: { + name: 'test', + type: 'int', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + }, + indexes: {}, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }, + }, + }; + + const newJsonStatements: JsonStatement[] = [ + { + type: 'alter_table_rename_column', + tableName: 'user', + oldColumnName: 'lastName', + newColumnName: 'lastName123', + schema: '', + }, + { + type: 'alter_table_drop_column', + tableName: 'user', + columnName: 'test', + schema: '', + }, + ]; + expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( + newJsonStatements, + ); +}); + +test(`droped column that is part of composite pk`, async (t) => { + const statements: JsonStatement[] = [ + { type: 'delete_composite_pk', tableName: 'user', data: 'id,iq' }, + { + type: 'alter_table_alter_column_set_pk', + tableName: 'user', + schema: '', + columnName: 'id', + }, + { + type: 'alter_table_drop_column', + tableName: 'user', + columnName: 'iq', + schema: '', + }, + ]; + const json1: SingleStoreSchemaSquashed = { + version: '1', + dialect: 'singlestore', + tables: { + user: { + name: 'user', + columns: { + id: { + name: 'id', + type: 'int', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + first_nam: { + name: 'first_nam', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + iq: { + name: 'iq', + type: 'int', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + }, + indexes: {}, + compositePrimaryKeys: { + user_id_iq_pk: 'id,iq', + }, + uniqueConstraints: {}, + }, + }, + }; + const json2: SingleStoreSchemaSquashed = { + version: '1', + dialect: 'singlestore', + tables: { + user: { + name: 'user', + columns: { + id: { + name: 'id', + type: 'int', + primaryKey: true, + notNull: false, + autoincrement: false, + }, + first_nam: { + name: 'first_name', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + }, + indexes: {}, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }, + }, + }; + + const newJsonStatements: JsonStatement[] = [ + { + type: 'singlestore_recreate_table', + tableName: 'user', + columns: [ + { + name: 'id', + type: 'int', + primaryKey: true, + notNull: false, + autoincrement: false, + }, + { + name: 'first_name', + type: 'text', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + ], + compositePKs: [], + uniqueConstraints: [], + }, + ]; + expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( + newJsonStatements, + ); +}); + +test(`add column with pk`, async (t) => { + const statements: JsonStatement[] = [ + { + type: 'alter_table_add_column', + tableName: 'table', + column: { + name: 'test', + type: 'integer', + primaryKey: true, + notNull: false, + autoincrement: false, + }, + schema: '', + }, + ]; + const json2: SingleStoreSchemaSquashed = { + version: '1', + dialect: 'singlestore', + tables: { + table: { + name: 'table', + columns: { + id1: { + name: 'id1', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + new_age: { + name: 'new_age', + type: 'integer', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + test: { + name: 'test', + type: 'integer', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + }, + indexes: {}, + compositePrimaryKeys: {}, + uniqueConstraints: {}, + }, + }, + }; + + const newJsonStatements = [ + { + columns: [ + { + name: 'id1', + type: 'text', + primaryKey: false, + notNull: true, + autoincrement: false, + }, + { + name: 'new_age', + type: 'integer', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + { + name: 'test', + type: 'integer', + primaryKey: false, + notNull: false, + autoincrement: false, + }, + ], + compositePKs: [], + tableName: 'table', + type: 'singlestore_recreate_table', + uniqueConstraints: [], + }, + ]; + expect(singleStoreCombineStatements(statements, json2)).toStrictEqual( + newJsonStatements, + ); +}); diff --git a/drizzle-orm/package.json b/drizzle-orm/package.json index 7e49ec522..e376281f8 100644 --- a/drizzle-orm/package.json +++ b/drizzle-orm/package.json @@ -1,6 +1,6 @@ { "name": "drizzle-orm", - "version": "0.38.4", + "version": "0.39.0", "description": "Drizzle ORM package for SQL databases", "type": "module", "scripts": { @@ -185,7 +185,7 @@ "@vercel/postgres": "^0.8.0", "@xata.io/client": "^0.29.3", "better-sqlite3": "^8.4.0", - "bun-types": "^0.6.6", + "bun-types": "^1.2.0", "cpy": "^10.1.0", "expo-sqlite": "^14.0.0", "knex": "^2.4.2", diff --git a/drizzle-orm/src/bun-sql/driver.ts b/drizzle-orm/src/bun-sql/driver.ts new file mode 100644 index 000000000..1b2c42c4f --- /dev/null +++ b/drizzle-orm/src/bun-sql/driver.ts @@ -0,0 +1,122 @@ +/// + +import type { SQLOptions } from 'bun'; +import { SQL } from 'bun'; +import { entityKind } from '~/entity.ts'; +import { DefaultLogger } from '~/logger.ts'; +import { PgDatabase } from '~/pg-core/db.ts'; +import { PgDialect } from '~/pg-core/dialect.ts'; +import { + createTableRelationsHelpers, + extractTablesRelationalConfig, + type RelationalSchemaConfig, + type TablesRelationalConfig, +} from '~/relations.ts'; +import { type DrizzleConfig, isConfig } from '~/utils.ts'; +import type { BunSQLQueryResultHKT } from './session.ts'; +import { BunSQLSession } from './session.ts'; + +export class BunSQLDatabase< + TSchema extends Record = Record, +> extends PgDatabase { + static override readonly [entityKind]: string = 'BunSQLDatabase'; +} + +function construct = Record>( + client: SQL, + config: DrizzleConfig = {}, +): BunSQLDatabase & { + $client: SQL; +} { + const dialect = new PgDialect({ casing: config.casing }); + let logger; + if (config.logger === true) { + logger = new DefaultLogger(); + } else if (config.logger !== false) { + logger = config.logger; + } + + let schema: RelationalSchemaConfig | undefined; + if (config.schema) { + const tablesConfig = extractTablesRelationalConfig( + config.schema, + createTableRelationsHelpers, + ); + schema = { + fullSchema: config.schema, + schema: tablesConfig.tables, + tableNamesMap: tablesConfig.tableNamesMap, + }; + } + + const session = new BunSQLSession(client, dialect, schema, { logger }); + const db = new BunSQLDatabase(dialect, session, schema as any) as BunSQLDatabase; + ( db).$client = client; + + return db as any; +} + +export function drizzle< + TSchema extends Record = Record, + TClient extends SQL = SQL, +>( + ...params: [ + TClient | string, + ] | [ + TClient | string, + DrizzleConfig, + ] | [ + ( + & DrizzleConfig + & ({ + connection: string | ({ url?: string } & SQLOptions); + } | { + client: TClient; + }) + ), + ] +): BunSQLDatabase & { + $client: TClient; +} { + if (typeof params[0] === 'string') { + const instance = new SQL(params[0]); + + return construct(instance, params[1]) as any; + } + + if (isConfig(params[0])) { + const { connection, client, ...drizzleConfig } = params[0] as { + connection?: { url?: string } & SQLOptions; + client?: TClient; + } & DrizzleConfig; + + if (client) return construct(client, drizzleConfig) as any; + + if (typeof connection === 'object' && connection.url !== undefined) { + const { url, ...config } = connection; + + const instance = new SQL({ url, ...config }); + return construct(instance, drizzleConfig) as any; + } + + const instance = new SQL(connection); + return construct(instance, drizzleConfig) as any; + } + + return construct(params[0] as TClient, params[1] as DrizzleConfig | undefined) as any; +} + +export namespace drizzle { + export function mock = Record>( + config?: DrizzleConfig, + ): BunSQLDatabase & { + $client: '$client is not available on drizzle.mock()'; + } { + return construct({ + options: { + parsers: {}, + serializers: {}, + }, + } as any, config) as any; + } +} diff --git a/drizzle-orm/src/bun-sql/index.ts b/drizzle-orm/src/bun-sql/index.ts new file mode 100644 index 000000000..b1b6a52e7 --- /dev/null +++ b/drizzle-orm/src/bun-sql/index.ts @@ -0,0 +1,2 @@ +export * from './driver.ts'; +export * from './session.ts'; diff --git a/drizzle-orm/src/bun-sql/migrator.ts b/drizzle-orm/src/bun-sql/migrator.ts new file mode 100644 index 000000000..48be01318 --- /dev/null +++ b/drizzle-orm/src/bun-sql/migrator.ts @@ -0,0 +1,11 @@ +import type { MigrationConfig } from '~/migrator.ts'; +import { readMigrationFiles } from '~/migrator.ts'; +import type { BunSQLDatabase } from './driver.ts'; + +export async function migrate>( + db: BunSQLDatabase, + config: MigrationConfig, +) { + const migrations = readMigrationFiles(config); + await db.dialect.migrate(migrations, db.session, config); +} diff --git a/drizzle-orm/src/bun-sql/session.ts b/drizzle-orm/src/bun-sql/session.ts new file mode 100644 index 000000000..17fe520c4 --- /dev/null +++ b/drizzle-orm/src/bun-sql/session.ts @@ -0,0 +1,199 @@ +/// + +import type { SavepointSQL, SQL, TransactionSQL } from 'bun'; +import { entityKind } from '~/entity.ts'; +import type { Logger } from '~/logger.ts'; +import { NoopLogger } from '~/logger.ts'; +import type { PgDialect } from '~/pg-core/dialect.ts'; +import { PgTransaction } from '~/pg-core/index.ts'; +import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.types.ts'; +import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; +import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; +import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; +import { fillPlaceholders, type Query } from '~/sql/sql.ts'; +import { tracer } from '~/tracing.ts'; +import { type Assume, mapResultRow } from '~/utils.ts'; + +export class BunSQLPreparedQuery extends PgPreparedQuery { + static override readonly [entityKind]: string = 'BunSQLPreparedQuery'; + + constructor( + private client: SQL, + private queryString: string, + private params: unknown[], + private logger: Logger, + private fields: SelectedFieldsOrdered | undefined, + private _isResponseInArrayMode: boolean, + private customResultMapper?: (rows: unknown[][]) => T['execute'], + ) { + super({ sql: queryString, params }); + } + + async execute(placeholderValues: Record | undefined = {}): Promise { + return tracer.startActiveSpan('drizzle.execute', async (span) => { + const params = fillPlaceholders(this.params, placeholderValues); + + span?.setAttributes({ + 'drizzle.query.text': this.queryString, + 'drizzle.query.params': JSON.stringify(params), + }); + + this.logger.logQuery(this.queryString, params); + + const { fields, queryString: query, client, joinsNotNullableMap, customResultMapper } = this; + if (!fields && !customResultMapper) { + return tracer.startActiveSpan('drizzle.driver.execute', () => { + return client.unsafe(query, params as any[]); + }); + } + + const rows: any[] = await tracer.startActiveSpan('drizzle.driver.execute', () => { + span?.setAttributes({ + 'drizzle.query.text': query, + 'drizzle.query.params': JSON.stringify(params), + }); + + return client.unsafe(query, params as any[]).values(); + }); + + return tracer.startActiveSpan('drizzle.mapResponse', () => { + return customResultMapper + ? customResultMapper(rows) + : rows.map((row) => mapResultRow(fields!, row, joinsNotNullableMap)); + }); + }); + } + + all(placeholderValues: Record | undefined = {}): Promise { + return tracer.startActiveSpan('drizzle.execute', async (span) => { + const params = fillPlaceholders(this.params, placeholderValues); + span?.setAttributes({ + 'drizzle.query.text': this.queryString, + 'drizzle.query.params': JSON.stringify(params), + }); + this.logger.logQuery(this.queryString, params); + return tracer.startActiveSpan('drizzle.driver.execute', () => { + span?.setAttributes({ + 'drizzle.query.text': this.queryString, + 'drizzle.query.params': JSON.stringify(params), + }); + return this.client.unsafe(this.queryString, params as any[]); + }); + }); + } + + /** @internal */ + isResponseInArrayMode(): boolean { + return this._isResponseInArrayMode; + } +} + +export interface BunSQLSessionOptions { + logger?: Logger; +} + +export class BunSQLSession< + TSQL extends SQL, + TFullSchema extends Record, + TSchema extends TablesRelationalConfig, +> extends PgSession { + static override readonly [entityKind]: string = 'BunSQLSession'; + + logger: Logger; + + constructor( + public client: TSQL, + dialect: PgDialect, + private schema: RelationalSchemaConfig | undefined, + /** @internal */ + readonly options: BunSQLSessionOptions = {}, + ) { + super(dialect); + this.logger = options.logger ?? new NoopLogger(); + } + + prepareQuery( + query: Query, + fields: SelectedFieldsOrdered | undefined, + name: string | undefined, + isResponseInArrayMode: boolean, + customResultMapper?: (rows: unknown[][]) => T['execute'], + ): PgPreparedQuery { + return new BunSQLPreparedQuery( + this.client, + query.sql, + query.params, + this.logger, + fields, + isResponseInArrayMode, + customResultMapper, + ); + } + + query(query: string, params: unknown[]): Promise { + this.logger.logQuery(query, params); + return this.client.unsafe(query, params as any[]).values(); + } + + queryObjects( + query: string, + params: unknown[], + ): Promise { + return this.client.unsafe(query, params as any[]); + } + + override transaction( + transaction: (tx: BunSQLTransaction) => Promise, + config?: PgTransactionConfig, + ): Promise { + return this.client.begin(async (client) => { + const session = new BunSQLSession( + client, + this.dialect, + this.schema, + this.options, + ); + const tx = new BunSQLTransaction(this.dialect, session, this.schema); + if (config) { + await tx.setTransaction(config); + } + return transaction(tx); + }) as Promise; + } +} + +export class BunSQLTransaction< + TFullSchema extends Record, + TSchema extends TablesRelationalConfig, +> extends PgTransaction { + static override readonly [entityKind]: string = 'BunSQLTransaction'; + + constructor( + dialect: PgDialect, + /** @internal */ + override readonly session: BunSQLSession, + schema: RelationalSchemaConfig | undefined, + nestedIndex = 0, + ) { + super(dialect, session, schema, nestedIndex); + } + + override transaction( + transaction: (tx: BunSQLTransaction) => Promise, + ): Promise { + return (this.session.client as TransactionSQL).savepoint((client) => { + const session = new BunSQLSession( + client, + this.dialect, + this.schema, + this.session.options, + ); + const tx = new BunSQLTransaction(this.dialect, session, this.schema); + return transaction(tx); + }) as Promise; + } +} + +export interface BunSQLQueryResultHKT extends PgQueryResultHKT { + type: Assume[]>; +} diff --git a/drizzle-orm/src/bun-sqlite/session.ts b/drizzle-orm/src/bun-sqlite/session.ts index fd02e4f00..88d736461 100644 --- a/drizzle-orm/src/bun-sqlite/session.ts +++ b/drizzle-orm/src/bun-sqlite/session.ts @@ -117,7 +117,7 @@ export class PreparedQuery super('sync', executeMethod, query); } - run(placeholderValues?: Record): void { + run(placeholderValues?: Record) { const params = fillPlaceholders(this.query.params, placeholderValues ?? {}); this.logger.logQuery(this.query.sql, params); return this.stmt.run(...params); diff --git a/drizzle-orm/src/mysql-core/db.ts b/drizzle-orm/src/mysql-core/db.ts index 39ca8c77a..6f7948838 100644 --- a/drizzle-orm/src/mysql-core/db.ts +++ b/drizzle-orm/src/mysql-core/db.ts @@ -26,7 +26,7 @@ import type { MySqlTransactionConfig, PreparedQueryHKTBase, } from './session.ts'; -import type { WithSubqueryWithSelection } from './subquery.ts'; +import type { WithBuilder } from './subquery.ts'; import type { MySqlTable } from './table.ts'; import type { MySqlViewBase } from './view-base.ts'; @@ -119,23 +119,30 @@ export class MySqlDatabase< * const result = await db.with(sq).select({ name: sq.name }).from(sq); * ``` */ - $with(alias: TAlias) { + $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const self = this; - return { - as( - qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), - ): WithSubqueryWithSelection { - if (typeof qb === 'function') { - qb = qb(new QueryBuilder(self.dialect)); - } + const as = ( + qb: + | TypedQueryBuilder + | SQL + | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), + ) => { + if (typeof qb === 'function') { + qb = qb(new QueryBuilder(self.dialect)); + } - return new Proxy( - new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), - new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), - ) as WithSubqueryWithSelection; - }, + return new Proxy( + new WithSubquery( + qb.getSQL(), + selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, + alias, + true, + ), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ); }; - } + return { as }; + }; $count( source: MySqlTable | MySqlViewBase | SQL | SQLWrapper, @@ -497,6 +504,7 @@ export const withReplicas = < ): MySQLWithReplicas => { const select: Q['select'] = (...args: []) => getReplica(replicas).select(...args); const selectDistinct: Q['selectDistinct'] = (...args: []) => getReplica(replicas).selectDistinct(...args); + const $count: Q['$count'] = (...args: [any]) => getReplica(replicas).$count(...args); const $with: Q['with'] = (...args: []) => getReplica(replicas).with(...args); const update: Q['update'] = (...args: [any]) => primary.update(...args); @@ -515,6 +523,7 @@ export const withReplicas = < $primary: primary, select, selectDistinct, + $count, with: $with, get query() { return getReplica(replicas).query; diff --git a/drizzle-orm/src/mysql-core/query-builders/query-builder.ts b/drizzle-orm/src/mysql-core/query-builders/query-builder.ts index 95b3d6cdd..5c144d48f 100644 --- a/drizzle-orm/src/mysql-core/query-builders/query-builder.ts +++ b/drizzle-orm/src/mysql-core/query-builders/query-builder.ts @@ -1,10 +1,10 @@ import { entityKind, is } from '~/entity.ts'; import type { MySqlDialectConfig } from '~/mysql-core/dialect.ts'; import { MySqlDialect } from '~/mysql-core/dialect.ts'; -import type { WithSubqueryWithSelection } from '~/mysql-core/subquery.ts'; +import type { WithBuilder } from '~/mysql-core/subquery.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; -import type { ColumnsSelection } from '~/sql/sql.ts'; +import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import { WithSubquery } from '~/subquery.ts'; import { MySqlSelectBuilder } from './select.ts'; import type { SelectedFields } from './select.types.ts'; @@ -20,24 +20,30 @@ export class QueryBuilder { this.dialectConfig = is(dialect, MySqlDialect) ? undefined : dialect; } - $with(alias: TAlias) { + $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const queryBuilder = this; + const as = ( + qb: + | TypedQueryBuilder + | SQL + | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), + ) => { + if (typeof qb === 'function') { + qb = qb(queryBuilder); + } - return { - as( - qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), - ): WithSubqueryWithSelection { - if (typeof qb === 'function') { - qb = qb(queryBuilder); - } - - return new Proxy( - new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), - new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), - ) as WithSubqueryWithSelection; - }, + return new Proxy( + new WithSubquery( + qb.getSQL(), + selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, + alias, + true, + ), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as any; }; - } + return { as }; + }; with(...queries: WithSubquery[]) { const self = this; diff --git a/drizzle-orm/src/mysql-core/subquery.ts b/drizzle-orm/src/mysql-core/subquery.ts index 9d2c1828c..9838cb194 100644 --- a/drizzle-orm/src/mysql-core/subquery.ts +++ b/drizzle-orm/src/mysql-core/subquery.ts @@ -1,6 +1,8 @@ +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; -import type { ColumnsSelection } from '~/sql/sql.ts'; -import type { Subquery, WithSubquery } from '~/subquery.ts'; +import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; +import type { Subquery, WithSubquery, WithSubqueryWithoutSelection } from '~/subquery.ts'; +import type { QueryBuilder } from './query-builders/query-builder.ts'; export type SubqueryWithSelection< TSelection extends ColumnsSelection, @@ -15,3 +17,19 @@ export type WithSubqueryWithSelection< > = & WithSubquery> & AddAliasToSelection; + +export interface WithBuilder { + (alias: TAlias): { + as: { + ( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithSelection; + ( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithoutSelection; + }; + }; + (alias: TAlias, selection: TSelection): { + as: (qb: SQL | ((qb: QueryBuilder) => SQL)) => WithSubqueryWithSelection; + }; +} diff --git a/drizzle-orm/src/neon/index.ts b/drizzle-orm/src/neon/index.ts index ee201ff1c..6def59dee 100644 --- a/drizzle-orm/src/neon/index.ts +++ b/drizzle-orm/src/neon/index.ts @@ -1 +1,2 @@ +export * from './neon-identity.ts'; export * from './rls.ts'; diff --git a/drizzle-orm/src/neon/neon-identity.ts b/drizzle-orm/src/neon/neon-identity.ts new file mode 100644 index 000000000..9c8d5486f --- /dev/null +++ b/drizzle-orm/src/neon/neon-identity.ts @@ -0,0 +1,19 @@ +import { jsonb, pgSchema, text, timestamp } from '~/pg-core/index.ts'; + +const neonIdentitySchema = pgSchema('neon_identity'); + +/** + * Table schema of the `users_sync` table used by Neon Identity. + * This table automatically synchronizes and stores user data from external authentication providers. + * + * @schema neon_identity + * @table users_sync + */ +export const usersSync = neonIdentitySchema.table('users_sync', { + rawJson: jsonb('raw_json').notNull(), + id: text().primaryKey().notNull(), + name: text(), + email: text(), + createdAt: timestamp('created_at', { withTimezone: true, mode: 'string' }), + deletedAt: timestamp('deleted_at', { withTimezone: true, mode: 'string' }), +}); diff --git a/drizzle-orm/src/pg-core/db.ts b/drizzle-orm/src/pg-core/db.ts index 29dc4f166..17d882895 100644 --- a/drizzle-orm/src/pg-core/db.ts +++ b/drizzle-orm/src/pg-core/db.ts @@ -28,7 +28,7 @@ import { RelationalQueryBuilder } from './query-builders/query.ts'; import { PgRaw } from './query-builders/raw.ts'; import { PgRefreshMaterializedView } from './query-builders/refresh-materialized-view.ts'; import type { SelectedFields } from './query-builders/select.types.ts'; -import type { WithSubqueryWithSelection } from './subquery.ts'; +import type { WithBuilder } from './subquery.ts'; import type { PgViewBase } from './view-base.ts'; import type { PgMaterializedView } from './view.ts'; @@ -120,23 +120,30 @@ export class PgDatabase< * const result = await db.with(sq).select({ name: sq.name }).from(sq); * ``` */ - $with(alias: TAlias) { + $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const self = this; - return { - as( - qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), - ): WithSubqueryWithSelection { - if (typeof qb === 'function') { - qb = qb(new QueryBuilder(self.dialect)); - } + const as = ( + qb: + | TypedQueryBuilder + | SQL + | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), + ) => { + if (typeof qb === 'function') { + qb = qb(new QueryBuilder(self.dialect)); + } - return new Proxy( - new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), - new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), - ) as WithSubqueryWithSelection; - }, + return new Proxy( + new WithSubquery( + qb.getSQL(), + selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, + alias, + true, + ), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ); }; - } + return { as }; + }; $count( source: PgTable | PgViewBase | SQL | SQLWrapper, @@ -205,7 +212,7 @@ export class PgDatabase< */ function select(): PgSelectBuilder; function select(fields: TSelection): PgSelectBuilder; - function select(fields?: SelectedFields): PgSelectBuilder { + function select(fields?: TSelection): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: self.session, @@ -240,7 +247,9 @@ export class PgDatabase< */ function selectDistinct(): PgSelectBuilder; function selectDistinct(fields: TSelection): PgSelectBuilder; - function selectDistinct(fields?: SelectedFields): PgSelectBuilder { + function selectDistinct( + fields?: TSelection, + ): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: self.session, @@ -280,10 +289,10 @@ export class PgDatabase< on: (PgColumn | SQLWrapper)[], fields: TSelection, ): PgSelectBuilder; - function selectDistinctOn( + function selectDistinctOn( on: (PgColumn | SQLWrapper)[], - fields?: SelectedFields, - ): PgSelectBuilder { + fields?: TSelection, + ): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: self.session, @@ -421,7 +430,7 @@ export class PgDatabase< */ select(): PgSelectBuilder; select(fields: TSelection): PgSelectBuilder; - select(fields?: SelectedFields): PgSelectBuilder { + select(fields?: TSelection): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: this.session, @@ -455,7 +464,7 @@ export class PgDatabase< */ selectDistinct(): PgSelectBuilder; selectDistinct(fields: TSelection): PgSelectBuilder; - selectDistinct(fields?: SelectedFields): PgSelectBuilder { + selectDistinct(fields?: TSelection): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: this.session, @@ -494,10 +503,10 @@ export class PgDatabase< on: (PgColumn | SQLWrapper)[], fields: TSelection, ): PgSelectBuilder; - selectDistinctOn( + selectDistinctOn( on: (PgColumn | SQLWrapper)[], - fields?: SelectedFields, - ): PgSelectBuilder { + fields?: TSelection, + ): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: this.session, @@ -647,8 +656,9 @@ export const withReplicas = < const select: Q['select'] = (...args: []) => getReplica(replicas).select(...args); const selectDistinct: Q['selectDistinct'] = (...args: []) => getReplica(replicas).selectDistinct(...args); const selectDistinctOn: Q['selectDistinctOn'] = (...args: [any]) => getReplica(replicas).selectDistinctOn(...args); + const $count: Q['$count'] = (...args: [any]) => getReplica(replicas).$count(...args); const _with: Q['with'] = (...args: any) => getReplica(replicas).with(...args); - const $with: Q['$with'] = (arg: any) => getReplica(replicas).$with(arg); + const $with: Q['$with'] = (arg: any) => getReplica(replicas).$with(arg) as any; const update: Q['update'] = (...args: [any]) => primary.update(...args); const insert: Q['insert'] = (...args: [any]) => primary.insert(...args); @@ -670,6 +680,7 @@ export const withReplicas = < select, selectDistinct, selectDistinctOn, + $count, $with, with: _with, get query() { diff --git a/drizzle-orm/src/pg-core/query-builders/delete.ts b/drizzle-orm/src/pg-core/query-builders/delete.ts index 682e52e2d..e37c06038 100644 --- a/drizzle-orm/src/pg-core/query-builders/delete.ts +++ b/drizzle-orm/src/pg-core/query-builders/delete.ts @@ -8,12 +8,14 @@ import type { PreparedQueryConfig, } from '~/pg-core/session.ts'; import type { PgTable } from '~/pg-core/table.ts'; +import { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { SelectResultFields } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; -import type { Query, SQL, SQLWrapper } from '~/sql/sql.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { ColumnsSelection, Query, SQL, SQLWrapper } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; -import { Table } from '~/table.ts'; +import { getTableName, Table } from '~/table.ts'; import { tracer } from '~/tracing.ts'; import { type NeonAuthToken, orderSelectedFields } from '~/utils.ts'; import type { PgColumn } from '../columns/common.ts'; @@ -28,6 +30,7 @@ export type PgDeleteWithout< PgDeleteBase< T['_']['table'], T['_']['queryResult'], + T['_']['selectedFields'], T['_']['returning'], TDynamic, T['_']['excludedMethods'] | K @@ -38,12 +41,14 @@ export type PgDeleteWithout< export type PgDelete< TTable extends PgTable = PgTable, TQueryResult extends PgQueryResultHKT = PgQueryResultHKT, + TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = Record | undefined, -> = PgDeleteBase; +> = PgDeleteBase; export interface PgDeleteConfig { where?: SQL | undefined; table: PgTable; + returningFields?: SelectedFieldsFlat; returning?: SelectedFieldsOrdered; withList?: Subquery[]; } @@ -55,6 +60,7 @@ export type PgDeleteReturningAll< PgDeleteBase< T['_']['table'], T['_']['queryResult'], + T['_']['table']['_']['columns'], T['_']['table']['$inferSelect'], TDynamic, T['_']['excludedMethods'] @@ -71,6 +77,7 @@ export type PgDeleteReturning< PgDeleteBase< T['_']['table'], T['_']['queryResult'], + TSelectedFields, SelectResultFields, TDynamic, T['_']['excludedMethods'] @@ -89,26 +96,33 @@ export type PgDeletePrepare = PgPreparedQuery< export type PgDeleteDynamic = PgDelete< T['_']['table'], T['_']['queryResult'], + T['_']['selectedFields'], T['_']['returning'] >; -export type AnyPgDeleteBase = PgDeleteBase; +export type AnyPgDeleteBase = PgDeleteBase; export interface PgDeleteBase< TTable extends PgTable, TQueryResult extends PgQueryResultHKT, + TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends + TypedQueryBuilder< + TSelectedFields, + TReturning extends undefined ? PgQueryResultKind : TReturning[] + >, QueryPromise : TReturning[]>, RunnableQuery : TReturning[], 'pg'>, SQLWrapper { readonly _: { - dialect: 'pg'; + readonly dialect: 'pg'; readonly table: TTable; readonly queryResult: TQueryResult; + readonly selectedFields: TSelectedFields; readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; @@ -119,12 +133,17 @@ export interface PgDeleteBase< export class PgDeleteBase< TTable extends PgTable, TQueryResult extends PgQueryResultHKT, + TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, // eslint-disable-next-line @typescript-eslint/no-unused-vars TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]> implements + TypedQueryBuilder< + TSelectedFields, + TReturning extends undefined ? PgQueryResultKind : TReturning[] + >, RunnableQuery : TReturning[], 'pg'>, SQLWrapper { @@ -203,6 +222,7 @@ export class PgDeleteBase< returning( fields: SelectedFieldsFlat = this.config.table[Table.Symbol.Columns], ): PgDeleteReturning { + this.config.returningFields = fields; this.config.returning = orderSelectedFields(fields); return this as any; } @@ -245,6 +265,22 @@ export class PgDeleteBase< }); }; + /** @internal */ + getSelectedFields(): this['_']['selectedFields'] { + return ( + this.config.returningFields + ? new Proxy( + this.config.returningFields, + new SelectionProxyHandler({ + alias: getTableName(this.config.table), + sqlAliasedBehavior: 'alias', + sqlBehavior: 'error', + }), + ) + : undefined + ) as this['_']['selectedFields']; + } + $dynamic(): PgDeleteDynamic { return this as any; } diff --git a/drizzle-orm/src/pg-core/query-builders/insert.ts b/drizzle-orm/src/pg-core/query-builders/insert.ts index 2cf266be4..5a61e9ed4 100644 --- a/drizzle-orm/src/pg-core/query-builders/insert.ts +++ b/drizzle-orm/src/pg-core/query-builders/insert.ts @@ -13,11 +13,12 @@ import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { SelectResultFields } from '~/query-builders/select.types.ts'; import { QueryPromise } from '~/query-promise.ts'; import type { RunnableQuery } from '~/runnable-query.ts'; -import type { Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; +import { SelectionProxyHandler } from '~/selection-proxy.ts'; +import type { ColumnsSelection, Placeholder, Query, SQLWrapper } from '~/sql/sql.ts'; import { Param, SQL, sql } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import type { InferInsertModel } from '~/table.ts'; -import { Columns, Table } from '~/table.ts'; +import { Columns, getTableName, Table } from '~/table.ts'; import { tracer } from '~/tracing.ts'; import { haveSameKeys, mapUpdateSet, type NeonAuthToken, orderSelectedFields } from '~/utils.ts'; import type { AnyPgColumn, PgColumn } from '../columns/common.ts'; @@ -30,6 +31,7 @@ export interface PgInsertConfig { values: Record[] | PgInsertSelectQueryBuilder | SQL; withList?: Subquery[]; onConflict?: SQL; + returningFields?: SelectedFieldsFlat; returning?: SelectedFieldsOrdered; select?: boolean; overridingSystemValue_?: boolean; @@ -136,6 +138,7 @@ export type PgInsertWithout = PgInsertBase< T['_']['table'], T['_']['queryResult'], + TSelectedFields, SelectResultFields, TDynamic, T['_']['excludedMethods'] @@ -158,6 +162,7 @@ export type PgInsertReturning< export type PgInsertReturningAll = PgInsertBase< T['_']['table'], T['_']['queryResult'], + T['_']['table']['_']['columns'], T['_']['table']['$inferSelect'], TDynamic, T['_']['excludedMethods'] @@ -186,21 +191,27 @@ export type PgInsertDynamic = PgInsert< T['_']['returning'] >; -export type AnyPgInsert = PgInsertBase; +export type AnyPgInsert = PgInsertBase; export type PgInsert< TTable extends PgTable = PgTable, TQueryResult extends PgQueryResultHKT = PgQueryResultHKT, + TSelectedFields extends ColumnsSelection | undefined = ColumnsSelection | undefined, TReturning extends Record | undefined = Record | undefined, -> = PgInsertBase; +> = PgInsertBase; export interface PgInsertBase< TTable extends PgTable, TQueryResult extends PgQueryResultHKT, + TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = undefined, TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends + TypedQueryBuilder< + TSelectedFields, + TReturning extends undefined ? PgQueryResultKind : TReturning[] + >, QueryPromise : TReturning[]>, RunnableQuery : TReturning[], 'pg'>, SQLWrapper @@ -209,6 +220,7 @@ export interface PgInsertBase< readonly dialect: 'pg'; readonly table: TTable; readonly queryResult: TQueryResult; + readonly selectedFields: TSelectedFields; readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; @@ -219,6 +231,7 @@ export interface PgInsertBase< export class PgInsertBase< TTable extends PgTable, TQueryResult extends PgQueryResultHKT, + TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = undefined, // eslint-disable-next-line @typescript-eslint/no-unused-vars TDynamic extends boolean = false, @@ -226,6 +239,10 @@ export class PgInsertBase< TExcludedMethods extends string = never, > extends QueryPromise : TReturning[]> implements + TypedQueryBuilder< + TSelectedFields, + TReturning extends undefined ? PgQueryResultKind : TReturning[] + >, RunnableQuery : TReturning[], 'pg'>, SQLWrapper { @@ -273,6 +290,7 @@ export class PgInsertBase< returning( fields: SelectedFieldsFlat = this.config.table[Table.Symbol.Columns], ): PgInsertWithout { + this.config.returningFields = fields; this.config.returning = orderSelectedFields(fields); return this as any; } @@ -405,6 +423,22 @@ export class PgInsertBase< }); }; + /** @internal */ + getSelectedFields(): this['_']['selectedFields'] { + return ( + this.config.returningFields + ? new Proxy( + this.config.returningFields, + new SelectionProxyHandler({ + alias: getTableName(this.config.table), + sqlAliasedBehavior: 'alias', + sqlBehavior: 'error', + }), + ) + : undefined + ) as this['_']['selectedFields']; + } + $dynamic(): PgInsertDynamic { return this as any; } diff --git a/drizzle-orm/src/pg-core/query-builders/query-builder.ts b/drizzle-orm/src/pg-core/query-builders/query-builder.ts index 9f08f642d..e6c749add 100644 --- a/drizzle-orm/src/pg-core/query-builders/query-builder.ts +++ b/drizzle-orm/src/pg-core/query-builders/query-builder.ts @@ -3,10 +3,10 @@ import type { PgDialectConfig } from '~/pg-core/dialect.ts'; import { PgDialect } from '~/pg-core/dialect.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; -import type { ColumnsSelection, SQLWrapper } from '~/sql/sql.ts'; +import type { ColumnsSelection, SQL, SQLWrapper } from '~/sql/sql.ts'; import { WithSubquery } from '~/subquery.ts'; import type { PgColumn } from '../columns/index.ts'; -import type { WithSubqueryWithSelection } from '../subquery.ts'; +import type { WithBuilder } from '../subquery.ts'; import { PgSelectBuilder } from './select.ts'; import type { SelectedFields } from './select.types.ts'; @@ -21,24 +21,30 @@ export class QueryBuilder { this.dialectConfig = is(dialect, PgDialect) ? undefined : dialect; } - $with(alias: TAlias) { + $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const queryBuilder = this; - - return { - as( - qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), - ): WithSubqueryWithSelection { - if (typeof qb === 'function') { - qb = qb(queryBuilder); - } - - return new Proxy( - new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), - new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), - ) as WithSubqueryWithSelection; - }, + const as = ( + qb: + | TypedQueryBuilder + | SQL + | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), + ) => { + if (typeof qb === 'function') { + qb = qb(queryBuilder); + } + + return new Proxy( + new WithSubquery( + qb.getSQL(), + selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, + alias, + true, + ), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as any; }; - } + return { as }; + }; with(...queries: WithSubquery[]) { const self = this; @@ -58,7 +64,9 @@ export class QueryBuilder { function selectDistinct(): PgSelectBuilder; function selectDistinct(fields: TSelection): PgSelectBuilder; - function selectDistinct(fields?: SelectedFields): PgSelectBuilder { + function selectDistinct( + fields?: TSelection, + ): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: undefined, @@ -72,10 +80,10 @@ export class QueryBuilder { on: (PgColumn | SQLWrapper)[], fields: TSelection, ): PgSelectBuilder; - function selectDistinctOn( + function selectDistinctOn( on: (PgColumn | SQLWrapper)[], - fields?: SelectedFields, - ): PgSelectBuilder { + fields?: TSelection, + ): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: undefined, @@ -99,7 +107,7 @@ export class QueryBuilder { selectDistinct(): PgSelectBuilder; selectDistinct(fields: TSelection): PgSelectBuilder; - selectDistinct(fields?: SelectedFields): PgSelectBuilder { + selectDistinct(fields?: TSelection): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: undefined, @@ -113,10 +121,10 @@ export class QueryBuilder { on: (PgColumn | SQLWrapper)[], fields: TSelection, ): PgSelectBuilder; - selectDistinctOn( + selectDistinctOn( on: (PgColumn | SQLWrapper)[], - fields?: SelectedFields, - ): PgSelectBuilder { + fields?: TSelection, + ): PgSelectBuilder { return new PgSelectBuilder({ fields: fields ?? undefined, session: undefined, diff --git a/drizzle-orm/src/pg-core/query-builders/select.ts b/drizzle-orm/src/pg-core/query-builders/select.ts index 597991f79..46b1bf422 100644 --- a/drizzle-orm/src/pg-core/query-builders/select.ts +++ b/drizzle-orm/src/pg-core/query-builders/select.ts @@ -26,6 +26,7 @@ import { Table } from '~/table.ts'; import { tracer } from '~/tracing.ts'; import { applyMixins, + type DrizzleTypeError, getTableColumns, getTableLikeName, haveSameKeys, @@ -52,6 +53,7 @@ import type { PgSetOperatorWithResult, SelectedFields, SetOperatorRightSelect, + TableLikeHasEmptySelection, } from './select.types.ts'; export class PgSelectBuilder< @@ -102,7 +104,10 @@ export class PgSelectBuilder< * {@link https://www.postgresql.org/docs/current/sql-select.html#SQL-FROM | Postgres from documentation} */ from( - source: TFrom, + source: TableLikeHasEmptySelection extends true ? DrizzleTypeError< + "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" + > + : TFrom, ): CreatePgSelectFromBuilderMode< TBuilderMode, GetSelectTableName, @@ -110,27 +115,28 @@ export class PgSelectBuilder< TSelection extends undefined ? 'single' : 'partial' > { const isPartialSelect = !!this.fields; + const src = source as TFrom; let fields: SelectedFields; if (this.fields) { fields = this.fields; - } else if (is(source, Subquery)) { + } else if (is(src, Subquery)) { // This is required to use the proxy handler to get the correct field values from the subquery fields = Object.fromEntries( - Object.keys(source._.selectedFields).map(( + Object.keys(src._.selectedFields).map(( key, - ) => [key, source[key as unknown as keyof typeof source] as unknown as SelectedFields[string]]), + ) => [key, src[key as unknown as keyof typeof src] as unknown as SelectedFields[string]]), ); - } else if (is(source, PgViewBase)) { - fields = source[ViewBaseConfig].selectedFields as SelectedFields; - } else if (is(source, SQL)) { + } else if (is(src, PgViewBase)) { + fields = src[ViewBaseConfig].selectedFields as SelectedFields; + } else if (is(src, SQL)) { fields = {}; } else { - fields = getTableColumns(source); + fields = getTableColumns(src); } return (new PgSelectBase({ - table: source, + table: src, fields, isPartialSelect, session: this.session, @@ -209,7 +215,7 @@ export abstract class PgSelectQueryBuilderBase< private createJoin( joinType: TJoinType, ): PgSelectJoinFn { - return ( + return (( table: PgTable | Subquery | PgViewBase | SQL, on: ((aliases: TSelection) => SQL | undefined) | SQL | undefined, ) => { @@ -280,7 +286,7 @@ export abstract class PgSelectQueryBuilderBase< } return this as any; - }; + }) as any; } /** diff --git a/drizzle-orm/src/pg-core/query-builders/select.types.ts b/drizzle-orm/src/pg-core/query-builders/select.types.ts index 9c5a538aa..87f21e526 100644 --- a/drizzle-orm/src/pg-core/query-builders/select.types.ts +++ b/drizzle-orm/src/pg-core/query-builders/select.types.ts @@ -23,7 +23,7 @@ import type { import type { ColumnsSelection, Placeholder, SQL, SQLWrapper, View } from '~/sql/sql.ts'; import type { Subquery } from '~/subquery.ts'; import type { Table, UpdateTableConfig } from '~/table.ts'; -import type { Assume, ValidateShape, ValueOrArray } from '~/utils.ts'; +import type { Assume, DrizzleTypeError, Equal, ValidateShape, ValueOrArray } from '~/utils.ts'; import type { PgPreparedQuery, PreparedQueryConfig } from '../session.ts'; import type { PgSelectBase, PgSelectQueryBuilderBase } from './select.ts'; @@ -79,6 +79,10 @@ export interface PgSelectConfig { }[]; } +export type TableLikeHasEmptySelection = T extends Subquery + ? Equal extends true ? true : false + : false; + export type PgSelectJoin< T extends AnyPgSelectQueryBuilder, TDynamic extends boolean, @@ -116,7 +120,10 @@ export type PgSelectJoinFn< TJoinedTable extends PgTable | Subquery | PgViewBase | SQL, TJoinedName extends GetSelectTableName = GetSelectTableName, >( - table: TJoinedTable, + table: TableLikeHasEmptySelection extends true ? DrizzleTypeError< + "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" + > + : TJoinedTable, on: ((aliases: T['_']['selection']) => SQL | undefined) | SQL | undefined, ) => PgSelectJoin; diff --git a/drizzle-orm/src/pg-core/query-builders/update.ts b/drizzle-orm/src/pg-core/query-builders/update.ts index 911916381..419a8aec8 100644 --- a/drizzle-orm/src/pg-core/query-builders/update.ts +++ b/drizzle-orm/src/pg-core/query-builders/update.ts @@ -9,6 +9,7 @@ import type { PreparedQueryConfig, } from '~/pg-core/session.ts'; import { PgTable } from '~/pg-core/table.ts'; +import { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AppendToNullabilityMap, AppendToResult, @@ -24,19 +25,27 @@ import type { RunnableQuery } from '~/runnable-query.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import { type ColumnsSelection, type Query, SQL, type SQLWrapper } from '~/sql/sql.ts'; import { Subquery } from '~/subquery.ts'; -import { Table } from '~/table.ts'; +import { getTableName, Table } from '~/table.ts'; import { type Assume, + DrizzleTypeError, + Equal, getTableLikeName, mapUpdateSet, type NeonAuthToken, orderSelectedFields, + Simplify, type UpdateSet, } from '~/utils.ts'; import { ViewBaseConfig } from '~/view-common.ts'; import type { PgColumn } from '../columns/common.ts'; import type { PgViewBase } from '../view-base.ts'; -import type { PgSelectJoinConfig, SelectedFields, SelectedFieldsOrdered } from './select.types.ts'; +import type { + PgSelectJoinConfig, + SelectedFields, + SelectedFieldsOrdered, + TableLikeHasEmptySelection, +} from './select.types.ts'; export interface PgUpdateConfig { where?: SQL | undefined; @@ -44,6 +53,7 @@ export interface PgUpdateConfig { table: PgTable; from?: PgTable | Subquery | PgViewBase | SQL; joins: PgSelectJoinConfig[]; + returningFields?: SelectedFields; returning?: SelectedFieldsOrdered; withList?: Subquery[]; } @@ -100,6 +110,7 @@ export type PgUpdateWithout< T['_']['table'], T['_']['queryResult'], T['_']['from'], + T['_']['selectedFields'], T['_']['returning'], T['_']['nullabilityMap'], T['_']['joins'], @@ -118,6 +129,7 @@ export type PgUpdateWithJoins< T['_']['table'], T['_']['queryResult'], TFrom, + T['_']['selectedFields'], T['_']['returning'], AppendToNullabilityMap, 'inner'>, [...T['_']['joins'], { @@ -138,7 +150,10 @@ export type PgUpdateJoinFn< > = < TJoinedTable extends PgTable | Subquery | PgViewBase | SQL, >( - table: TJoinedTable, + table: TableLikeHasEmptySelection extends true ? DrizzleTypeError< + "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" + > + : TJoinedTable, on: | ( ( @@ -161,6 +176,7 @@ export type PgUpdateJoin< T['_']['table'], T['_']['queryResult'], T['_']['from'], + T['_']['selectedFields'], T['_']['returning'], AppendToNullabilityMap, TJoinType>, [...T['_']['joins'], { @@ -204,6 +220,13 @@ export type PgUpdateReturningAll extends true ? T['_']['table']['_']['columns'] : Simplify< + & Record + & { + [K in keyof T['_']['joins'] as T['_']['joins'][K]['table']['_']['name']]: + T['_']['joins'][K]['table']['_']['columns']; + } + >, SelectResult< AccumulateToResult< T, @@ -232,6 +255,7 @@ export type PgUpdateReturning< T['_']['table'], T['_']['queryResult'], T['_']['from'], + TSelectedFields, SelectResult< AccumulateToResult< T, @@ -270,23 +294,29 @@ export type PgUpdate< TTable extends PgTable = PgTable, TQueryResult extends PgQueryResultHKT = PgQueryResultHKT, TFrom extends PgTable | Subquery | PgViewBase | SQL | undefined = undefined, + TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = Record | undefined, TNullabilityMap extends Record = Record, TJoins extends Join[] = [], -> = PgUpdateBase; +> = PgUpdateBase; -export type AnyPgUpdate = PgUpdateBase; +export type AnyPgUpdate = PgUpdateBase; export interface PgUpdateBase< TTable extends PgTable, TQueryResult extends PgQueryResultHKT, TFrom extends PgTable | Subquery | PgViewBase | SQL | undefined = undefined, + TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = undefined, TNullabilityMap extends Record = Record, TJoins extends Join[] = [], TDynamic extends boolean = false, TExcludedMethods extends string = never, > extends + TypedQueryBuilder< + TSelectedFields, + TReturning extends undefined ? PgQueryResultKind : TReturning[] + >, QueryPromise : TReturning[]>, RunnableQuery : TReturning[], 'pg'>, SQLWrapper @@ -298,6 +328,7 @@ export interface PgUpdateBase< readonly nullabilityMap: TNullabilityMap; readonly queryResult: TQueryResult; readonly from: TFrom; + readonly selectedFields: TSelectedFields; readonly returning: TReturning; readonly dynamic: TDynamic; readonly excludedMethods: TExcludedMethods; @@ -309,6 +340,7 @@ export class PgUpdateBase< TTable extends PgTable, TQueryResult extends PgQueryResultHKT, TFrom extends PgTable | Subquery | PgViewBase | SQL | undefined = undefined, + TSelectedFields extends ColumnsSelection | undefined = undefined, TReturning extends Record | undefined = undefined, // eslint-disable-next-line @typescript-eslint/no-unused-vars TNullabilityMap extends Record = Record, @@ -343,13 +375,17 @@ export class PgUpdateBase< } from( - source: TFrom, + source: TableLikeHasEmptySelection extends true ? DrizzleTypeError< + "Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause" + > + : TFrom, ): PgUpdateWithJoins { - const tableName = getTableLikeName(source); + const src = source as TFrom; + const tableName = getTableLikeName(src); if (typeof tableName === 'string') { this.joinsNotNullableMap[tableName] = true; } - this.config.from = source; + this.config.from = src; return this as any; } @@ -521,6 +557,7 @@ export class PgUpdateBase< } } + this.config.returningFields = fields; this.config.returning = orderSelectedFields(fields); return this as any; } @@ -559,6 +596,22 @@ export class PgUpdateBase< return this._prepare().execute(placeholderValues, this.authToken); }; + /** @internal */ + getSelectedFields(): this['_']['selectedFields'] { + return ( + this.config.returningFields + ? new Proxy( + this.config.returningFields, + new SelectionProxyHandler({ + alias: getTableName(this.config.table), + sqlAliasedBehavior: 'alias', + sqlBehavior: 'error', + }), + ) + : undefined + ) as this['_']['selectedFields']; + } + $dynamic(): PgUpdateDynamic { return this as any; } diff --git a/drizzle-orm/src/pg-core/subquery.ts b/drizzle-orm/src/pg-core/subquery.ts index 02d78dee1..5f92e240a 100644 --- a/drizzle-orm/src/pg-core/subquery.ts +++ b/drizzle-orm/src/pg-core/subquery.ts @@ -1,6 +1,8 @@ +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; -import type { ColumnsSelection } from '~/sql/sql.ts'; -import type { Subquery, WithSubquery } from '~/subquery.ts'; +import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; +import type { Subquery, WithSubquery, WithSubqueryWithoutSelection } from '~/subquery.ts'; +import type { QueryBuilder } from './query-builders/query-builder.ts'; export type SubqueryWithSelection = & Subquery> @@ -9,3 +11,19 @@ export type SubqueryWithSelection = & WithSubquery> & AddAliasToSelection; + +export interface WithBuilder { + (alias: TAlias): { + as: { + ( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithSelection; + ( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithoutSelection; + }; + }; + (alias: TAlias, selection: TSelection): { + as: (qb: SQL | ((qb: QueryBuilder) => SQL)) => WithSubqueryWithSelection; + }; +} diff --git a/drizzle-orm/src/query-builders/select.types.ts b/drizzle-orm/src/query-builders/select.types.ts index e7975af65..53a825565 100644 --- a/drizzle-orm/src/query-builders/select.types.ts +++ b/drizzle-orm/src/query-builders/select.types.ts @@ -166,7 +166,7 @@ export type SelectResultField = T extends Drizz export type SelectResultFields = Simplify< { - [Key in keyof TSelectedFields & string]: SelectResultField; + [Key in keyof TSelectedFields]: SelectResultField; } >; diff --git a/drizzle-orm/src/singlestore-core/db.ts b/drizzle-orm/src/singlestore-core/db.ts index 1d64448da..ab8ce7bab 100644 --- a/drizzle-orm/src/singlestore-core/db.ts +++ b/drizzle-orm/src/singlestore-core/db.ts @@ -24,7 +24,7 @@ import type { SingleStoreTransaction, SingleStoreTransactionConfig, } from './session.ts'; -import type { WithSubqueryWithSelection } from './subquery.ts'; +import type { WithBuilder } from './subquery.ts'; import type { SingleStoreTable } from './table.ts'; export class SingleStoreDatabase< @@ -113,23 +113,30 @@ export class SingleStoreDatabase< * const result = await db.with(sq).select({ name: sq.name }).from(sq); * ``` */ - $with(alias: TAlias) { + $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const self = this; - return { - as( - qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), - ): WithSubqueryWithSelection { - if (typeof qb === 'function') { - qb = qb(new QueryBuilder(self.dialect)); - } + const as = ( + qb: + | TypedQueryBuilder + | SQL + | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), + ) => { + if (typeof qb === 'function') { + qb = qb(new QueryBuilder(self.dialect)); + } - return new Proxy( - new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), - new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), - ) as WithSubqueryWithSelection; - }, + return new Proxy( + new WithSubquery( + qb.getSQL(), + selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, + alias, + true, + ), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ); }; - } + return { as }; + }; $count( source: SingleStoreTable | SQL | SQLWrapper, // SingleStoreViewBase | @@ -490,6 +497,7 @@ export const withReplicas = < ): SingleStoreWithReplicas => { const select: Q['select'] = (...args: []) => getReplica(replicas).select(...args); const selectDistinct: Q['selectDistinct'] = (...args: []) => getReplica(replicas).selectDistinct(...args); + const $count: Q['$count'] = (...args: [any]) => getReplica(replicas).$count(...args); const $with: Q['with'] = (...args: []) => getReplica(replicas).with(...args); const update: Q['update'] = (...args: [any]) => primary.update(...args); @@ -508,6 +516,7 @@ export const withReplicas = < $primary: primary, select, selectDistinct, + $count, with: $with, get query() { return getReplica(replicas).query; diff --git a/drizzle-orm/src/singlestore-core/query-builders/query-builder.ts b/drizzle-orm/src/singlestore-core/query-builders/query-builder.ts index 29d6c2290..24de14af2 100644 --- a/drizzle-orm/src/singlestore-core/query-builders/query-builder.ts +++ b/drizzle-orm/src/singlestore-core/query-builders/query-builder.ts @@ -3,8 +3,8 @@ import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; import type { SingleStoreDialectConfig } from '~/singlestore-core/dialect.ts'; import { SingleStoreDialect } from '~/singlestore-core/dialect.ts'; -import type { WithSubqueryWithSelection } from '~/singlestore-core/subquery.ts'; -import type { ColumnsSelection } from '~/sql/sql.ts'; +import type { WithBuilder } from '~/singlestore-core/subquery.ts'; +import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import { WithSubquery } from '~/subquery.ts'; import { SingleStoreSelectBuilder } from './select.ts'; import type { SelectedFields } from './select.types.ts'; @@ -20,24 +20,30 @@ export class QueryBuilder { this.dialectConfig = is(dialect, SingleStoreDialect) ? undefined : dialect; } - $with(alias: TAlias) { + $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const queryBuilder = this; + const as = ( + qb: + | TypedQueryBuilder + | SQL + | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), + ) => { + if (typeof qb === 'function') { + qb = qb(queryBuilder); + } - return { - as( - qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), - ): WithSubqueryWithSelection { - if (typeof qb === 'function') { - qb = qb(queryBuilder); - } - - return new Proxy( - new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), - new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), - ) as WithSubqueryWithSelection; - }, + return new Proxy( + new WithSubquery( + qb.getSQL(), + selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, + alias, + true, + ), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as any; }; - } + return { as }; + }; with(...queries: WithSubquery[]) { const self = this; diff --git a/drizzle-orm/src/singlestore-core/subquery.ts b/drizzle-orm/src/singlestore-core/subquery.ts index a4605c56d..b34b50e27 100644 --- a/drizzle-orm/src/singlestore-core/subquery.ts +++ b/drizzle-orm/src/singlestore-core/subquery.ts @@ -1,6 +1,8 @@ +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; -import type { ColumnsSelection } from '~/sql/sql.ts'; -import type { Subquery, WithSubquery } from '~/subquery.ts'; +import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; +import type { Subquery, WithSubquery, WithSubqueryWithoutSelection } from '~/subquery.ts'; +import type { QueryBuilder } from './query-builders/query-builder.ts'; export type SubqueryWithSelection< TSelection extends ColumnsSelection, @@ -15,3 +17,19 @@ export type WithSubqueryWithSelection< > = & WithSubquery> & AddAliasToSelection; + +export interface WithBuilder { + (alias: TAlias): { + as: { + ( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithSelection; + ( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithoutSelection; + }; + }; + (alias: TAlias, selection: TSelection): { + as: (qb: SQL | ((qb: QueryBuilder) => SQL)) => WithSubqueryWithSelection; + }; +} diff --git a/drizzle-orm/src/singlestore-core/table.ts b/drizzle-orm/src/singlestore-core/table.ts index ffad22d74..4eb01c62e 100644 --- a/drizzle-orm/src/singlestore-core/table.ts +++ b/drizzle-orm/src/singlestore-core/table.ts @@ -103,6 +103,35 @@ export function singlestoreTableWithSchema< } export interface SingleStoreTableFn { + < + TTableName extends string, + TColumnsMap extends Record, + >( + name: TTableName, + columns: TColumnsMap, + extraConfig?: ( + self: BuildColumns, + ) => SingleStoreTableExtraConfigValue[], + ): SingleStoreTableWithColumns<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'singlestore'; + }>; + + < + TTableName extends string, + TColumnsMap extends Record, + >( + name: TTableName, + columns: (columnTypes: SingleStoreColumnBuilders) => TColumnsMap, + extraConfig?: (self: BuildColumns) => SingleStoreTableExtraConfigValue[], + ): SingleStoreTableWithColumns<{ + name: TTableName; + schema: TSchemaName; + columns: BuildColumns; + dialect: 'singlestore'; + }>; /** * @deprecated The third parameter of singlestoreTable is changing and will only accept an array instead of an object * @@ -174,36 +203,6 @@ export interface SingleStoreTableFn; dialect: 'singlestore'; }>; - - < - TTableName extends string, - TColumnsMap extends Record, - >( - name: TTableName, - columns: TColumnsMap, - extraConfig?: ( - self: BuildColumns, - ) => SingleStoreTableExtraConfigValue[], - ): SingleStoreTableWithColumns<{ - name: TTableName; - schema: TSchemaName; - columns: BuildColumns; - dialect: 'singlestore'; - }>; - - < - TTableName extends string, - TColumnsMap extends Record, - >( - name: TTableName, - columns: (columnTypes: SingleStoreColumnBuilders) => TColumnsMap, - extraConfig?: (self: BuildColumns) => SingleStoreTableExtraConfigValue[], - ): SingleStoreTableWithColumns<{ - name: TTableName; - schema: TSchemaName; - columns: BuildColumns; - dialect: 'singlestore'; - }>; } export const singlestoreTable: SingleStoreTableFn = (name, columns, extraConfig) => { diff --git a/drizzle-orm/src/sql/sql.ts b/drizzle-orm/src/sql/sql.ts index ba7586fe8..50d4c1557 100644 --- a/drizzle-orm/src/sql/sql.ts +++ b/drizzle-orm/src/sql/sql.ts @@ -679,6 +679,10 @@ export function isView(view: unknown): view is View { return typeof view === 'object' && view !== null && IsDrizzleView in view; } +export function getViewName(view: T): T['_']['name'] { + return view[ViewBaseConfig].name; +} + export type InferSelectViewModel = Equal extends true ? { [x: string]: unknown } : SelectResult< diff --git a/drizzle-orm/src/sqlite-core/db.ts b/drizzle-orm/src/sqlite-core/db.ts index f63384f98..f8593c783 100644 --- a/drizzle-orm/src/sqlite-core/db.ts +++ b/drizzle-orm/src/sqlite-core/db.ts @@ -25,7 +25,7 @@ import { SQLiteCountBuilder } from './query-builders/count.ts'; import { RelationalQueryBuilder } from './query-builders/query.ts'; import { SQLiteRaw } from './query-builders/raw.ts'; import type { SelectedFields } from './query-builders/select.types.ts'; -import type { WithSubqueryWithSelection } from './subquery.ts'; +import type { WithBuilder, WithSubqueryWithSelection } from './subquery.ts'; import type { SQLiteViewBase } from './view-base.ts'; export class BaseSQLiteDatabase< @@ -119,23 +119,30 @@ export class BaseSQLiteDatabase< * const result = await db.with(sq).select({ name: sq.name }).from(sq); * ``` */ - $with(alias: TAlias) { + $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const self = this; - return { - as( - qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), - ): WithSubqueryWithSelection { - if (typeof qb === 'function') { - qb = qb(new QueryBuilder(self.dialect)); - } + const as = ( + qb: + | TypedQueryBuilder + | SQL + | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), + ) => { + if (typeof qb === 'function') { + qb = qb(new QueryBuilder(self.dialect)); + } - return new Proxy( - new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), - new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), - ) as WithSubqueryWithSelection; - }, + return new Proxy( + new WithSubquery( + qb.getSQL(), + selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, + alias, + true, + ), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ); }; - } + return { as }; + }; $count( source: SQLiteTable | SQLiteViewBase | SQL | SQLWrapper, @@ -603,6 +610,7 @@ export const withReplicas = < ): SQLiteWithReplicas => { const select: Q['select'] = (...args: []) => getReplica(replicas).select(...args); const selectDistinct: Q['selectDistinct'] = (...args: []) => getReplica(replicas).selectDistinct(...args); + const $count: Q['$count'] = (...args: [any]) => getReplica(replicas).$count(...args); const $with: Q['with'] = (...args: []) => getReplica(replicas).with(...args); const update: Q['update'] = (...args: [any]) => primary.update(...args); @@ -627,6 +635,7 @@ export const withReplicas = < $primary: primary, select, selectDistinct, + $count, with: $with, get query() { return getReplica(replicas).query; diff --git a/drizzle-orm/src/sqlite-core/query-builders/query-builder.ts b/drizzle-orm/src/sqlite-core/query-builders/query-builder.ts index d2df0cf99..8aec660f0 100644 --- a/drizzle-orm/src/sqlite-core/query-builders/query-builder.ts +++ b/drizzle-orm/src/sqlite-core/query-builders/query-builder.ts @@ -1,10 +1,10 @@ import { entityKind, is } from '~/entity.ts'; import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import { SelectionProxyHandler } from '~/selection-proxy.ts'; -import type { ColumnsSelection } from '~/sql/sql.ts'; +import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; import type { SQLiteDialectConfig } from '~/sqlite-core/dialect.ts'; import { SQLiteDialect, SQLiteSyncDialect } from '~/sqlite-core/dialect.ts'; -import type { WithSubqueryWithSelection } from '~/sqlite-core/subquery.ts'; +import type { WithBuilder } from '~/sqlite-core/subquery.ts'; import { WithSubquery } from '~/subquery.ts'; import { SQLiteSelectBuilder } from './select.ts'; import type { SelectedFields } from './select.types.ts'; @@ -20,24 +20,30 @@ export class QueryBuilder { this.dialectConfig = is(dialect, SQLiteDialect) ? undefined : dialect; } - $with(alias: TAlias) { + $with: WithBuilder = (alias: string, selection?: ColumnsSelection) => { const queryBuilder = this; + const as = ( + qb: + | TypedQueryBuilder + | SQL + | ((qb: QueryBuilder) => TypedQueryBuilder | SQL), + ) => { + if (typeof qb === 'function') { + qb = qb(queryBuilder); + } - return { - as( - qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), - ): WithSubqueryWithSelection { - if (typeof qb === 'function') { - qb = qb(queryBuilder); - } - - return new Proxy( - new WithSubquery(qb.getSQL(), qb.getSelectedFields() as SelectedFields, alias, true), - new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), - ) as WithSubqueryWithSelection; - }, + return new Proxy( + new WithSubquery( + qb.getSQL(), + selection ?? ('getSelectedFields' in qb ? qb.getSelectedFields() ?? {} : {}) as SelectedFields, + alias, + true, + ), + new SelectionProxyHandler({ alias, sqlAliasedBehavior: 'alias', sqlBehavior: 'error' }), + ) as any; }; - } + return { as }; + }; with(...queries: WithSubquery[]) { const self = this; diff --git a/drizzle-orm/src/sqlite-core/subquery.ts b/drizzle-orm/src/sqlite-core/subquery.ts index a4db42176..f8c0625df 100644 --- a/drizzle-orm/src/sqlite-core/subquery.ts +++ b/drizzle-orm/src/sqlite-core/subquery.ts @@ -1,6 +1,8 @@ +import type { TypedQueryBuilder } from '~/query-builders/query-builder.ts'; import type { AddAliasToSelection } from '~/query-builders/select.types.ts'; -import type { ColumnsSelection } from '~/sql/sql.ts'; -import type { Subquery, WithSubquery } from '~/subquery.ts'; +import type { ColumnsSelection, SQL } from '~/sql/sql.ts'; +import type { Subquery, WithSubquery, WithSubqueryWithoutSelection } from '~/subquery.ts'; +import type { QueryBuilder } from './query-builders/query-builder.ts'; export type SubqueryWithSelection = & Subquery> @@ -9,3 +11,19 @@ export type SubqueryWithSelection = & WithSubquery> & AddAliasToSelection; + +export interface WithBuilder { + (alias: TAlias): { + as: { + ( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithSelection; + ( + qb: TypedQueryBuilder | ((qb: QueryBuilder) => TypedQueryBuilder), + ): WithSubqueryWithoutSelection; + }; + }; + (alias: TAlias, selection: TSelection): { + as: (qb: SQL | ((qb: QueryBuilder) => SQL)) => WithSubqueryWithSelection; + }; +} diff --git a/drizzle-orm/src/subquery.ts b/drizzle-orm/src/subquery.ts index 37fe48d86..c2303cc71 100644 --- a/drizzle-orm/src/subquery.ts +++ b/drizzle-orm/src/subquery.ts @@ -44,3 +44,5 @@ export class WithSubquery< > extends Subquery { static override readonly [entityKind]: string = 'WithSubquery'; } + +export type WithSubqueryWithoutSelection = WithSubquery; diff --git a/drizzle-orm/src/vercel-postgres/session.ts b/drizzle-orm/src/vercel-postgres/session.ts index a901f24c8..02a9d5381 100644 --- a/drizzle-orm/src/vercel-postgres/session.ts +++ b/drizzle-orm/src/vercel-postgres/session.ts @@ -15,7 +15,7 @@ import type { SelectedFieldsOrdered } from '~/pg-core/query-builders/select.type import type { PgQueryResultHKT, PgTransactionConfig, PreparedQueryConfig } from '~/pg-core/session.ts'; import { PgPreparedQuery, PgSession } from '~/pg-core/session.ts'; import type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts'; -import { fillPlaceholders, type Query, sql } from '~/sql/sql.ts'; +import { fillPlaceholders, type Query, type SQL, sql } from '~/sql/sql.ts'; import { type Assume, mapResultRow } from '~/utils.ts'; export type VercelPgClient = VercelPool | VercelClient | VercelPoolClient; @@ -181,6 +181,12 @@ export class VercelPgSession< return this.client.query(query, params); } + override async count(sql: SQL): Promise { + const result = await this.execute(sql); + + return Number((result as any)['rows'][0]['count']); + } + override async transaction( transaction: (tx: VercelPgTransaction) => Promise, config?: PgTransactionConfig | undefined, diff --git a/drizzle-orm/type-tests/mysql/with.ts b/drizzle-orm/type-tests/mysql/with.ts index e6f240489..715254c3d 100644 --- a/drizzle-orm/type-tests/mysql/with.ts +++ b/drizzle-orm/type-tests/mysql/with.ts @@ -81,3 +81,29 @@ const orders = mysqlTable('orders', { const regionalSalesWith = db.$with('regional_sales_with').as(db.select().from(regionalSales)); db.with(regionalSalesWith).select().from(regionalSalesWith).where(like(regionalSalesWith.totalSales, 'abc')); } + +{ + const providers = mysqlTable('providers', { + id: serial().primaryKey(), + providerName: text().notNull(), + }); + + const sq1 = db.$with('providers_sq', { + name: providers.providerName, + }).as(sql`select provider_name as name from providers`); + const q1 = await db.with(sq1).select().from(sq1); + Expect>; + + const sq2 = db.$with('providers_sq', { + nested: { + id: providers.id, + }, + }).as(() => sql`select id from providers`); + const q2 = await db.with(sq2).select().from(sq2); + Expect>; + + // @ts-expect-error + db.$with('providers_sq', { name: providers.providerName }).as(db.select().from(providers)); + // @ts-expect-error + db.$with('providers_sq', { name: providers.providerName }).as((qb) => qb.select().from(providers)); +} diff --git a/drizzle-orm/type-tests/pg/with.ts b/drizzle-orm/type-tests/pg/with.ts index 288e3b6d0..0a1ab699e 100644 --- a/drizzle-orm/type-tests/pg/with.ts +++ b/drizzle-orm/type-tests/pg/with.ts @@ -3,18 +3,19 @@ import { Expect } from 'type-tests/utils.ts'; import { gt, inArray, like } from '~/expressions.ts'; import { integer, pgTable, serial, text } from '~/pg-core/index.ts'; import { sql } from '~/sql/sql.ts'; +import { DrizzleTypeError } from '~/utils.ts'; import { db } from './db.ts'; -const orders = pgTable('orders', { - id: serial('id').primaryKey(), - region: text('region').notNull(), - product: text('product').notNull(), - amount: integer('amount').notNull(), - quantity: integer('quantity').notNull(), - generated: text('generatedText').generatedAlwaysAs(sql``), -}); - { + const orders = pgTable('orders', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull(), + amount: integer('amount').notNull(), + quantity: integer('quantity').notNull(), + generated: text('generatedText').generatedAlwaysAs(sql``), + }); + const regionalSales = db .$with('regional_sales') .as((qb) => @@ -81,3 +82,249 @@ const orders = pgTable('orders', { const regionalSalesWith = db.$with('regional_sales_with').as(db.select().from(regionalSales)); db.with(regionalSalesWith).select().from(regionalSalesWith).where(like(regionalSalesWith.totalSales, 'abc')); } + +{ + const providers = pgTable('providers', { + id: serial().primaryKey(), + providerName: text().notNull(), + }); + const products = pgTable('products', { + id: serial().primaryKey(), + productName: text().notNull(), + }); + + const sq1 = db.$with('inserted_products').as( + db.insert(products).values({ productName: sql`` }), + ); + const sq2 = db.$with('inserted_products').as( + db.insert(products).values({ productName: sql`` }).returning(), + ); + const sq3 = db.$with('inserted_products').as( + db.insert(products).values({ productName: sql`` }).returning({ productName: products.productName }), + ); + + // @ts-expect-error + db.with(sq1).select().from(sq1); + // @ts-expect-error + db.with(sq1).select().from(providers).leftJoin(sq1, sql``); + + const q3 = await db.with(sq2).select().from(sq2); + Expect< + Equal + >; + const q4 = await db.with(sq3).select().from(providers).leftJoin(sq2, sql``); + Expect< + Equal + >; + + const q5 = await db.with(sq3).select().from(sq3); + Expect>; + const q6 = await db.with(sq3).select().from(providers).leftJoin(sq3, sql``); + Expect< + Equal< + typeof q6, + { providers: { id: number; providerName: string }; inserted_products: { productName: string } | null }[] + > + >; +} + +{ + const providers = pgTable('providers', { + id: serial().primaryKey(), + providerName: text().notNull(), + }); + const products = pgTable('products', { + id: serial().primaryKey(), + productName: text().notNull(), + }); + const otherProducts = pgTable('other_products', { + id: serial().primaryKey(), + productName: text().notNull(), + }); + + const sq1 = db.$with('updated_products').as( + db.update(products).set({ productName: sql`` }), + ); + const sq2 = db.$with('updated_products').as( + db.update(products).set({ productName: sql`` }).returning(), + ); + const sq3 = db.$with('updated_products').as( + db.update(products).set({ productName: sql`` }).returning({ productName: products.productName }), + ); + const sq4 = db.$with('updated_products').as( + db.update(products).set({ productName: sql`` }).from(otherProducts).returning(), + ); + + // @ts-expect-error + db.with(sq1).select().from(sq1); + // @ts-expect-error + db.with(sq1).select().from(providers).leftJoin(sq1, sql``); + + const q3 = await db.with(sq2).select().from(sq2); + Expect< + Equal + >; + const q4 = await db.with(sq3).select().from(providers).leftJoin(sq2, sql``); + Expect< + Equal + >; + + const q5 = await db.with(sq3).select().from(sq3); + Expect< + Equal + >; + const q6 = await db.with(sq3).select().from(providers).leftJoin(sq3, sql``); + Expect< + Equal + >; + + const q7 = await db.with(sq4).select().from(sq4); + Expect< + Equal + >; + const q8 = await db.with(sq4).select().from(providers).leftJoin(sq4, sql``); + Expect< + Equal + >; +} + +{ + const providers = pgTable('providers', { + id: serial().primaryKey(), + providerName: text().notNull(), + }); + const products = pgTable('products', { + id: serial().primaryKey(), + productName: text().notNull(), + }); + + const sq1 = db.$with('inserted_products').as( + db.delete(products), + ); + const sq2 = db.$with('inserted_products').as( + db.delete(products).returning(), + ); + const sq3 = db.$with('inserted_products').as( + db.delete(products).returning({ productName: products.productName }), + ); + + // @ts-expect-error + db.with(sq1).select().from(sq1); + // @ts-expect-error + db.with(sq1).select().from(providers).leftJoin(sq1, sql``); + + const q3 = await db.with(sq2).select().from(sq2); + Expect< + Equal + >; + const q4 = await db.with(sq3).select().from(providers).leftJoin(sq2, sql``); + Expect< + Equal + >; + + const q5 = await db.with(sq3).select().from(sq3); + Expect>; + const q6 = await db.with(sq3).select().from(providers).leftJoin(sq3, sql``); + Expect< + Equal< + typeof q6, + { providers: { id: number; providerName: string }; inserted_products: { productName: string } | null }[] + > + >; +} + +{ + const providers = pgTable('providers', { + id: serial().primaryKey(), + providerName: text().notNull(), + }); + + const sq1 = db.$with('providers_sq', { + name: providers.providerName, + }).as(sql`select provider_name as name from providers`); + const q1 = await db.with(sq1).select().from(sq1); + Expect>; + + const sq2 = db.$with('providers_sq', { + nested: { + id: providers.id, + }, + }).as(() => sql`select id from providers`); + const q2 = await db.with(sq2).select().from(sq2); + Expect>; + + // @ts-expect-error + db.$with('providers_sq', { name: providers.providerName }).as(db.select().from(providers)); + // @ts-expect-error + db.$with('providers_sq', { name: providers.providerName }).as((qb) => qb.select().from(providers)); +} diff --git a/drizzle-orm/type-tests/singlestore/with.ts b/drizzle-orm/type-tests/singlestore/with.ts index 4233fbbf1..ca48bb635 100644 --- a/drizzle-orm/type-tests/singlestore/with.ts +++ b/drizzle-orm/type-tests/singlestore/with.ts @@ -78,3 +78,29 @@ const orders = singlestoreTable('orders', { }[], typeof allFromWith> >; } + +{ + const providers = singlestoreTable('providers', { + id: serial().primaryKey(), + providerName: text().notNull(), + }); + + const sq1 = db.$with('providers_sq', { + name: providers.providerName, + }).as(sql`select provider_name as name from providers`); + const q1 = await db.with(sq1).select().from(sq1); + Expect>; + + const sq2 = db.$with('providers_sq', { + nested: { + id: providers.id, + }, + }).as(() => sql`select id from providers`); + const q2 = await db.with(sq2).select().from(sq2); + Expect>; + + // @ts-expect-error + db.$with('providers_sq', { name: providers.providerName }).as(db.select().from(providers)); + // @ts-expect-error + db.$with('providers_sq', { name: providers.providerName }).as((qb) => qb.select().from(providers)); +} diff --git a/drizzle-orm/type-tests/sqlite/with.ts b/drizzle-orm/type-tests/sqlite/with.ts index b26e4e7d7..5c0e3b23b 100644 --- a/drizzle-orm/type-tests/sqlite/with.ts +++ b/drizzle-orm/type-tests/sqlite/with.ts @@ -82,3 +82,29 @@ const orders = sqliteTable('orders', { const regionalSalesWith = db.$with('regional_sales_with').as(db.select().from(regionalSales)); db.with(regionalSalesWith).select().from(regionalSalesWith).where(like(regionalSalesWith.totalSales, 'abc')); } + +{ + const providers = sqliteTable('providers', { + id: integer().primaryKey(), + providerName: text().notNull(), + }); + + const sq1 = db.$with('providers_sq', { + name: providers.providerName, + }).as(sql`select provider_name as name from providers`); + const q1 = await db.with(sq1).select().from(sq1); + Expect>; + + const sq2 = db.$with('providers_sq', { + nested: { + id: providers.id, + }, + }).as(() => sql`select id from providers`); + const q2 = await db.with(sq2).select().from(sq2); + Expect>; + + // @ts-expect-error + db.$with('providers_sq', { name: providers.providerName }).as(db.select().from(providers)); + // @ts-expect-error + db.$with('providers_sq', { name: providers.providerName }).as((qb) => qb.select().from(providers)); +} diff --git a/drizzle-typebox/package.json b/drizzle-typebox/package.json index c03d64105..0c986c0a1 100644 --- a/drizzle-typebox/package.json +++ b/drizzle-typebox/package.json @@ -1,6 +1,6 @@ { "name": "drizzle-typebox", - "version": "0.2.1", + "version": "0.3.0", "description": "Generate Typebox schemas from Drizzle ORM schemas", "type": "module", "scripts": { diff --git a/drizzle-typebox/src/column.ts b/drizzle-typebox/src/column.ts index 9bef765bf..8f1244b71 100644 --- a/drizzle-typebox/src/column.ts +++ b/drizzle-typebox/src/column.ts @@ -60,9 +60,7 @@ import { isColumnType, isWithEnum } from './utils.ts'; import type { BufferSchema, JsonSchema } from './utils.ts'; export const literalSchema = t.Union([t.String(), t.Number(), t.Boolean(), t.Null()]); -export const jsonSchema: JsonSchema = t.Recursive((self) => - t.Union([literalSchema, t.Array(self), t.Record(t.String(), self)]) -) as any; +export const jsonSchema: JsonSchema = t.Union([literalSchema, t.Array(t.Any()), t.Record(t.String(), t.Any())]) as any; TypeRegistry.Set('Buffer', (_, value) => value instanceof Buffer); // eslint-disable-line no-instanceof/no-instanceof export const bufferSchema: BufferSchema = { [Kind]: 'Buffer', type: 'buffer' } as any; diff --git a/drizzle-typebox/src/index.ts b/drizzle-typebox/src/index.ts index 0a6499e5b..26230f99a 100644 --- a/drizzle-typebox/src/index.ts +++ b/drizzle-typebox/src/index.ts @@ -1,2 +1,3 @@ export * from './schema.ts'; +export type { BuildSchema } from './schema.types.internal.ts'; export * from './schema.types.ts'; diff --git a/drizzle-typebox/src/utils.ts b/drizzle-typebox/src/utils.ts index 686bf01b8..398979aac 100644 --- a/drizzle-typebox/src/utils.ts +++ b/drizzle-typebox/src/utils.ts @@ -14,7 +14,7 @@ export function isWithEnum(column: Column): column is typeof column & { enumValu export const isPgEnum: (entity: any) => entity is PgEnum<[string, ...string[]]> = isWithEnum as any; type Literal = Static; -export type Json = Literal | { [key: string]: Json } | Json[]; +export type Json = Literal | { [key: string]: any } | any[]; export interface JsonSchema extends TSchema { [Kind]: 'Union'; static: Json; diff --git a/drizzle-valibot/package.json b/drizzle-valibot/package.json index 621d36782..7818dae31 100644 --- a/drizzle-valibot/package.json +++ b/drizzle-valibot/package.json @@ -1,6 +1,6 @@ { "name": "drizzle-valibot", - "version": "0.3.1", + "version": "0.4.0", "description": "Generate valibot schemas from Drizzle ORM schemas", "type": "module", "scripts": { diff --git a/drizzle-valibot/src/column.ts b/drizzle-valibot/src/column.ts index 040dbac21..aa03bef40 100644 --- a/drizzle-valibot/src/column.ts +++ b/drizzle-valibot/src/column.ts @@ -61,8 +61,8 @@ import type { Json } from './utils.ts'; export const literalSchema = v.union([v.string(), v.number(), v.boolean(), v.null()]); export const jsonSchema: v.GenericSchema = v.union([ literalSchema, - v.array(v.lazy(() => jsonSchema)), - v.record(v.string(), v.lazy(() => jsonSchema)), + v.array(v.any()), + v.record(v.string(), v.any()), ]); export const bufferSchema: v.GenericSchema = v.custom((v) => v instanceof Buffer); // eslint-disable-line no-instanceof/no-instanceof diff --git a/drizzle-valibot/src/index.ts b/drizzle-valibot/src/index.ts index 0a6499e5b..26230f99a 100644 --- a/drizzle-valibot/src/index.ts +++ b/drizzle-valibot/src/index.ts @@ -1,2 +1,3 @@ export * from './schema.ts'; +export type { BuildSchema } from './schema.types.internal.ts'; export * from './schema.types.ts'; diff --git a/drizzle-valibot/src/utils.ts b/drizzle-valibot/src/utils.ts index eb5034d6f..877c2b8f0 100644 --- a/drizzle-valibot/src/utils.ts +++ b/drizzle-valibot/src/utils.ts @@ -14,7 +14,7 @@ export function isWithEnum(column: Column): column is typeof column & { enumValu export const isPgEnum: (entity: any) => entity is PgEnum<[string, ...string[]]> = isWithEnum as any; type Literal = v.InferOutput; -export type Json = Literal | { [key: string]: Json } | Json[]; +export type Json = Literal | { [key: string]: any } | any[]; export type IsNever = [T] extends [never] ? true : false; diff --git a/drizzle-zod/package.json b/drizzle-zod/package.json index cb1e472fa..ce6ece3d4 100644 --- a/drizzle-zod/package.json +++ b/drizzle-zod/package.json @@ -1,6 +1,6 @@ { "name": "drizzle-zod", - "version": "0.6.1", + "version": "0.7.0", "description": "Generate Zod schemas from Drizzle ORM schemas", "type": "module", "scripts": { diff --git a/drizzle-zod/src/column.ts b/drizzle-zod/src/column.ts index 23bc3c142..c6241cd4f 100644 --- a/drizzle-zod/src/column.ts +++ b/drizzle-zod/src/column.ts @@ -54,18 +54,19 @@ import type { } from 'drizzle-orm/singlestore-core'; import type { SQLiteInteger, SQLiteReal, SQLiteText } from 'drizzle-orm/sqlite-core'; import { z } from 'zod'; -import type { z as zod } from 'zod'; +import { z as zod } from 'zod'; import { CONSTANTS } from './constants.ts'; +import type { CreateSchemaFactoryOptions } from './schema.types.ts'; import { isColumnType, isWithEnum } from './utils.ts'; import type { Json } from './utils.ts'; export const literalSchema = z.union([z.string(), z.number(), z.boolean(), z.null()]); -export const jsonSchema: z.ZodType = z.lazy(() => - z.union([literalSchema, z.array(jsonSchema), z.record(jsonSchema)]) -); +export const jsonSchema: z.ZodType = z.union([literalSchema, z.record(z.any()), z.array(z.any())]); export const bufferSchema: z.ZodType = z.custom((v) => v instanceof Buffer); // eslint-disable-line no-instanceof/no-instanceof -export function columnToSchema(column: Column, z: typeof zod): z.ZodTypeAny { +export function columnToSchema(column: Column, factory: CreateSchemaFactoryOptions | undefined): z.ZodTypeAny { + const z = factory?.zodInstance ?? zod; + const coerce = factory?.coerce ?? {}; let schema!: z.ZodTypeAny; if (isWithEnum(column)) { @@ -98,15 +99,15 @@ export function columnToSchema(column: Column, z: typeof zod): z.ZodTypeAny { } else if (column.dataType === 'array') { schema = z.array(z.any()); } else if (column.dataType === 'number') { - schema = numberColumnToSchema(column, z); + schema = numberColumnToSchema(column, z, coerce); } else if (column.dataType === 'bigint') { - schema = bigintColumnToSchema(column, z); + schema = bigintColumnToSchema(column, z, coerce); } else if (column.dataType === 'boolean') { - schema = z.boolean(); + schema = coerce === true || coerce.boolean ? z.coerce.boolean() : z.boolean(); } else if (column.dataType === 'date') { - schema = z.date(); + schema = coerce === true || coerce.date ? z.coerce.date() : z.date(); } else if (column.dataType === 'string') { - schema = stringColumnToSchema(column, z); + schema = stringColumnToSchema(column, z, coerce); } else if (column.dataType === 'json') { schema = jsonSchema; } else if (column.dataType === 'custom') { @@ -123,7 +124,11 @@ export function columnToSchema(column: Column, z: typeof zod): z.ZodTypeAny { return schema; } -function numberColumnToSchema(column: Column, z: typeof zod): z.ZodTypeAny { +function numberColumnToSchema( + column: Column, + z: typeof zod, + coerce: CreateSchemaFactoryOptions['coerce'], +): z.ZodTypeAny { let unsigned = column.getSQLType().includes('unsigned'); let min!: number; let max!: number; @@ -223,19 +228,29 @@ function numberColumnToSchema(column: Column, z: typeof zod): z.ZodTypeAny { max = Number.MAX_SAFE_INTEGER; } - const schema = z.number().min(min).max(max); + let schema = coerce === true || coerce?.number ? z.coerce.number() : z.number(); + schema = schema.min(min).max(max); return integer ? schema.int() : schema; } -function bigintColumnToSchema(column: Column, z: typeof zod): z.ZodTypeAny { +function bigintColumnToSchema( + column: Column, + z: typeof zod, + coerce: CreateSchemaFactoryOptions['coerce'], +): z.ZodTypeAny { const unsigned = column.getSQLType().includes('unsigned'); const min = unsigned ? 0n : CONSTANTS.INT64_MIN; const max = unsigned ? CONSTANTS.INT64_UNSIGNED_MAX : CONSTANTS.INT64_MAX; - return z.bigint().min(min).max(max); + const schema = coerce === true || coerce?.bigint ? z.coerce.bigint() : z.bigint(); + return schema.min(min).max(max); } -function stringColumnToSchema(column: Column, z: typeof zod): z.ZodTypeAny { +function stringColumnToSchema( + column: Column, + z: typeof zod, + coerce: CreateSchemaFactoryOptions['coerce'], +): z.ZodTypeAny { if (isColumnType>>(column, ['PgUUID'])) { return z.string().uuid(); } @@ -278,7 +293,7 @@ function stringColumnToSchema(column: Column, z: typeof zod): z.ZodTypeAny { max = column.dimensions; } - let schema = z.string(); + let schema = coerce === true || coerce?.string ? z.coerce.string() : z.string(); schema = regex ? schema.regex(regex) : schema; return max && fixed ? schema.length(max) : max ? schema.max(max) : schema; } diff --git a/drizzle-zod/src/index.ts b/drizzle-zod/src/index.ts index 0a6499e5b..26230f99a 100644 --- a/drizzle-zod/src/index.ts +++ b/drizzle-zod/src/index.ts @@ -1,2 +1,3 @@ export * from './schema.ts'; +export type { BuildSchema } from './schema.types.internal.ts'; export * from './schema.types.ts'; diff --git a/drizzle-zod/src/schema.ts b/drizzle-zod/src/schema.ts index 67a9cb733..40c7e891c 100644 --- a/drizzle-zod/src/schema.ts +++ b/drizzle-zod/src/schema.ts @@ -38,7 +38,7 @@ function handleColumns( } const column = is(selected, Column) ? selected : undefined; - const schema = column ? columnToSchema(column, factory?.zodInstance ?? z) : z.any(); + const schema = column ? columnToSchema(column, factory) : z.any(); const refined = typeof refinement === 'function' ? refinement(schema) : schema; if (conditions.never(column)) { diff --git a/drizzle-zod/src/schema.types.ts b/drizzle-zod/src/schema.types.ts index 5873cd2a3..9ec093593 100644 --- a/drizzle-zod/src/schema.types.ts +++ b/drizzle-zod/src/schema.types.ts @@ -49,4 +49,5 @@ export interface CreateUpdateSchema { export interface CreateSchemaFactoryOptions { zodInstance?: any; + coerce?: Partial> | true; } diff --git a/drizzle-zod/src/utils.ts b/drizzle-zod/src/utils.ts index 506b80565..7ce85792a 100644 --- a/drizzle-zod/src/utils.ts +++ b/drizzle-zod/src/utils.ts @@ -14,7 +14,7 @@ export function isWithEnum(column: Column): column is typeof column & { enumValu export const isPgEnum: (entity: any) => entity is PgEnum<[string, ...string[]]> = isWithEnum as any; type Literal = z.infer; -export type Json = Literal | { [key: string]: Json } | Json[]; +export type Json = Literal | { [key: string]: any } | any[]; export type IsNever = [T] extends [never] ? true : false; diff --git a/drizzle-zod/tests/mysql.test.ts b/drizzle-zod/tests/mysql.test.ts index 73ba48dae..314631b6e 100644 --- a/drizzle-zod/tests/mysql.test.ts +++ b/drizzle-zod/tests/mysql.test.ts @@ -4,7 +4,7 @@ import { test } from 'vitest'; import { z } from 'zod'; import { jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; -import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; +import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = z.number().min(CONSTANTS.INT32_MIN).max(CONSTANTS.INT32_MAX).int(); @@ -454,6 +454,59 @@ test('all data types', (t) => { Expect>(); }); +test('type coercion - all', (t) => { + const table = mysqlTable('test', ({ + bigint, + boolean, + timestamp, + int, + text, + }) => ({ + bigint: bigint({ mode: 'bigint' }).notNull(), + boolean: boolean().notNull(), + timestamp: timestamp().notNull(), + int: int().notNull(), + text: text().notNull(), + })); + + const { createSelectSchema } = createSchemaFactory({ + coerce: true, + }); + const result = createSelectSchema(table); + const expected = z.object({ + bigint: z.coerce.bigint().min(CONSTANTS.INT64_MIN).max(CONSTANTS.INT64_MAX), + boolean: z.coerce.boolean(), + timestamp: z.coerce.date(), + int: z.coerce.number().min(CONSTANTS.INT32_MIN).max(CONSTANTS.INT32_MAX).int(), + text: z.coerce.string().max(CONSTANTS.INT16_UNSIGNED_MAX), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('type coercion - mixed', (t) => { + const table = mysqlTable('test', ({ + timestamp, + int, + }) => ({ + timestamp: timestamp().notNull(), + int: int().notNull(), + })); + + const { createSelectSchema } = createSchemaFactory({ + coerce: { + date: true, + }, + }); + const result = createSelectSchema(table); + const expected = z.object({ + timestamp: z.coerce.date(), + int: z.number().min(CONSTANTS.INT32_MIN).max(CONSTANTS.INT32_MAX).int(), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + /* Disallow unknown keys in table refinement - select */ { const table = mysqlTable('test', { id: int() }); // @ts-expect-error diff --git a/drizzle-zod/tests/pg.test.ts b/drizzle-zod/tests/pg.test.ts index 7964f65d6..4f82afc2d 100644 --- a/drizzle-zod/tests/pg.test.ts +++ b/drizzle-zod/tests/pg.test.ts @@ -14,7 +14,7 @@ import { test } from 'vitest'; import { z } from 'zod'; import { jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; -import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; +import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectEnumValues, expectSchemaShape } from './utils.ts'; const integerSchema = z.number().min(CONSTANTS.INT32_MIN).max(CONSTANTS.INT32_MAX).int(); @@ -500,6 +500,59 @@ test('all data types', (t) => { Expect>(); }); +test('type coercion - all', (t) => { + const table = pgTable('test', ({ + bigint, + boolean, + timestamp, + integer, + text, + }) => ({ + bigint: bigint({ mode: 'bigint' }).notNull(), + boolean: boolean().notNull(), + timestamp: timestamp().notNull(), + integer: integer().notNull(), + text: text().notNull(), + })); + + const { createSelectSchema } = createSchemaFactory({ + coerce: true, + }); + const result = createSelectSchema(table); + const expected = z.object({ + bigint: z.coerce.bigint().min(CONSTANTS.INT64_MIN).max(CONSTANTS.INT64_MAX), + boolean: z.coerce.boolean(), + timestamp: z.coerce.date(), + integer: z.coerce.number().min(CONSTANTS.INT32_MIN).max(CONSTANTS.INT32_MAX).int(), + text: z.coerce.string(), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('type coercion - mixed', (t) => { + const table = pgTable('test', ({ + timestamp, + integer, + }) => ({ + timestamp: timestamp().notNull(), + integer: integer().notNull(), + })); + + const { createSelectSchema } = createSchemaFactory({ + coerce: { + date: true, + }, + }); + const result = createSelectSchema(table); + const expected = z.object({ + timestamp: z.coerce.date(), + integer: z.number().min(CONSTANTS.INT32_MIN).max(CONSTANTS.INT32_MAX).int(), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + /* Disallow unknown keys in table refinement - select */ { const table = pgTable('test', { id: integer() }); // @ts-expect-error diff --git a/drizzle-zod/tests/singlestore.test.ts b/drizzle-zod/tests/singlestore.test.ts index b91c74be8..c736efa4c 100644 --- a/drizzle-zod/tests/singlestore.test.ts +++ b/drizzle-zod/tests/singlestore.test.ts @@ -1,10 +1,10 @@ -import { type Equal } from 'drizzle-orm'; +import type { Equal } from 'drizzle-orm'; import { customType, int, serial, singlestoreSchema, singlestoreTable, text } from 'drizzle-orm/singlestore-core'; import { test } from 'vitest'; import { z } from 'zod'; import { jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; -import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; +import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = z.number().min(CONSTANTS.INT32_MIN).max(CONSTANTS.INT32_MAX).int(); @@ -456,6 +456,59 @@ test('all data types', (t) => { Expect>(); }); +test('type coercion - all', (t) => { + const table = singlestoreTable('test', ({ + bigint, + boolean, + timestamp, + int, + text, + }) => ({ + bigint: bigint({ mode: 'bigint' }).notNull(), + boolean: boolean().notNull(), + timestamp: timestamp().notNull(), + int: int().notNull(), + text: text().notNull(), + })); + + const { createSelectSchema } = createSchemaFactory({ + coerce: true, + }); + const result = createSelectSchema(table); + const expected = z.object({ + bigint: z.coerce.bigint().min(CONSTANTS.INT64_MIN).max(CONSTANTS.INT64_MAX), + boolean: z.coerce.boolean(), + timestamp: z.coerce.date(), + int: z.coerce.number().min(CONSTANTS.INT32_MIN).max(CONSTANTS.INT32_MAX).int(), + text: z.coerce.string().max(CONSTANTS.INT16_UNSIGNED_MAX), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('type coercion - mixed', (t) => { + const table = singlestoreTable('test', ({ + timestamp, + int, + }) => ({ + timestamp: timestamp().notNull(), + int: int().notNull(), + })); + + const { createSelectSchema } = createSchemaFactory({ + coerce: { + date: true, + }, + }); + const result = createSelectSchema(table); + const expected = z.object({ + timestamp: z.coerce.date(), + int: z.number().min(CONSTANTS.INT32_MIN).max(CONSTANTS.INT32_MAX).int(), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + /* Disallow unknown keys in table refinement - select */ { const table = singlestoreTable('test', { id: int() }); // @ts-expect-error diff --git a/drizzle-zod/tests/sqlite.test.ts b/drizzle-zod/tests/sqlite.test.ts index bb0f254b5..5950f6efe 100644 --- a/drizzle-zod/tests/sqlite.test.ts +++ b/drizzle-zod/tests/sqlite.test.ts @@ -4,7 +4,7 @@ import { test } from 'vitest'; import { z } from 'zod'; import { bufferSchema, jsonSchema } from '~/column.ts'; import { CONSTANTS } from '~/constants.ts'; -import { createInsertSchema, createSelectSchema, createUpdateSchema } from '../src'; +import { createInsertSchema, createSchemaFactory, createSelectSchema, createUpdateSchema } from '../src'; import { Expect, expectSchemaShape } from './utils.ts'; const intSchema = z.number().min(Number.MIN_SAFE_INTEGER).max(Number.MAX_SAFE_INTEGER).int(); @@ -350,6 +350,56 @@ test('all data types', (t) => { Expect>(); }); +test('type coercion - all', (t) => { + const table = sqliteTable('test', ({ + blob, + integer, + text, + }) => ({ + blob: blob({ mode: 'bigint' }).notNull(), + integer1: integer({ mode: 'boolean' }).notNull(), + integer2: integer({ mode: 'timestamp' }).notNull(), + integer3: integer().notNull(), + text: text().notNull(), + })); + + const { createSelectSchema } = createSchemaFactory({ + coerce: true, + }); + const result = createSelectSchema(table); + const expected = z.object({ + blob: z.coerce.bigint().min(CONSTANTS.INT64_MIN).max(CONSTANTS.INT64_MAX), + integer1: z.coerce.boolean(), + integer2: z.coerce.date(), + integer3: z.coerce.number().min(Number.MIN_SAFE_INTEGER).max(Number.MAX_SAFE_INTEGER).int(), + text: z.coerce.string(), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + +test('type coercion - mixed', (t) => { + const table = sqliteTable('test', ({ + integer, + }) => ({ + integer1: integer({ mode: 'timestamp' }).notNull(), + integer2: integer().notNull(), + })); + + const { createSelectSchema } = createSchemaFactory({ + coerce: { + date: true, + }, + }); + const result = createSelectSchema(table); + const expected = z.object({ + integer1: z.coerce.date(), + integer2: z.number().min(Number.MIN_SAFE_INTEGER).max(Number.MAX_SAFE_INTEGER).int(), + }); + expectSchemaShape(t, expected).from(result); + Expect>(); +}); + /* Disallow unknown keys in table refinement - select */ { const table = sqliteTable('test', { id: int() }); // @ts-expect-error diff --git a/drizzle-zod/tests/utils.ts b/drizzle-zod/tests/utils.ts index 6a36f66c5..da473b116 100644 --- a/drizzle-zod/tests/utils.ts +++ b/drizzle-zod/tests/utils.ts @@ -9,6 +9,7 @@ export function expectSchemaShape>(t: TaskC for (const key of Object.keys(actual.shape)) { expect(actual.shape[key]!._def.typeName).toStrictEqual(expected.shape[key]?._def.typeName); expect(actual.shape[key]!._def?.checks).toEqual(expected.shape[key]?._def?.checks); + expect(actual.shape[key]!._def?.coerce).toEqual(expected.shape[key]?._def?.coerce); if (actual.shape[key]?._def.typeName === 'ZodOptional') { expect(actual.shape[key]!._def.innerType._def.typeName).toStrictEqual( actual.shape[key]!._def.innerType._def.typeName, diff --git a/integration-tests/tests/bun/bun-sql.test.ts b/integration-tests/tests/bun/bun-sql.test.ts new file mode 100644 index 000000000..d4ce5e45b --- /dev/null +++ b/integration-tests/tests/bun/bun-sql.test.ts @@ -0,0 +1,5178 @@ +import retry from 'async-retry'; +import { SQL as BunSQL } from 'bun'; +import { afterAll, afterEach, beforeAll, beforeEach, expect, test } from 'bun:test'; +import type Docker from 'dockerode'; +// eslint-disable-next-line @typescript-eslint/consistent-type-imports +import { + and, + arrayContained, + arrayContains, + arrayOverlaps, + asc, + avg, + avgDistinct, + count, + countDistinct, + eq, + Equal, + exists, + getTableColumns, + gt, + gte, + ilike, + inArray, + is, + lt, + max, + min, + notInArray, + or, + SQL, + sql, + SQLWrapper, + sum, + sumDistinct, + TransactionRollbackError, +} from 'drizzle-orm'; +import type { BunSQLDatabase } from 'drizzle-orm/bun-sql'; +import { drizzle } from 'drizzle-orm/bun-sql'; +import { authenticatedRole, crudPolicy } from 'drizzle-orm/neon'; +import { usersSync } from 'drizzle-orm/neon/neon-identity'; +import type { PgColumn, PgDatabase, PgQueryResultHKT } from 'drizzle-orm/pg-core'; +import { + alias, + bigserial, + boolean, + char, + cidr, + date, + except, + exceptAll, + foreignKey, + getMaterializedViewConfig, + getTableConfig, + getViewConfig, + index, + inet, + integer, + intersect, + intersectAll, + interval, + json, + jsonb, + macaddr, + macaddr8, + numeric, + PgDialect, + pgEnum, + pgMaterializedView, + PgPolicy, + pgPolicy, + pgSchema, + pgTable, + pgTableCreator, + pgView, + primaryKey, + serial, + text, + time, + timestamp, + union, + unionAll, + unique, + uniqueKeyName, + varchar, +} from 'drizzle-orm/pg-core'; +import { Expect } from '~/utils'; + +export const usersTable = pgTable('users', { + id: serial('id' as string).primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), +}); + +const usersOnUpdate = pgTable('users_on_update', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + updateCounter: integer('update_counter').default(sql`1`).$onUpdateFn(() => sql`update_counter + 1`), + updatedAt: timestamp('updated_at', { mode: 'date', precision: 3 }).$onUpdate(() => new Date()), + alwaysNull: text('always_null').$type().$onUpdate(() => null), +}); + +const citiesTable = pgTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: char('state', { length: 2 }), +}); + +const cities2Table = pgTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const users2Table = pgTable('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => citiesTable.id), +}); + +const coursesTable = pgTable('courses', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + categoryId: integer('category_id').references(() => courseCategoriesTable.id), +}); + +const courseCategoriesTable = pgTable('course_categories', { + id: serial('id').primaryKey(), + name: text('name').notNull(), +}); + +const orders = pgTable('orders', { + id: serial('id').primaryKey(), + region: text('region').notNull(), + product: text('product').notNull().$default(() => 'random_string'), + amount: integer('amount').notNull(), + quantity: integer('quantity').notNull(), +}); + +const network = pgTable('network_table', { + inet: inet('inet').notNull(), + cidr: cidr('cidr').notNull(), + macaddr: macaddr('macaddr').notNull(), + macaddr8: macaddr8('macaddr8').notNull(), +}); + +const salEmp = pgTable('sal_emp', { + name: text('name'), + payByQuarter: integer('pay_by_quarter').array(), + schedule: text('schedule').array().array(), +}); + +const _tictactoe = pgTable('tictactoe', { + squares: integer('squares').array(3).array(3), +}); + +export const usersMigratorTable = pgTable('users12', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + email: text('email').notNull(), +}); + +// To test aggregate functions +const aggregateTable = pgTable('aggregate_table', { + id: serial('id').notNull(), + name: text('name').notNull(), + a: integer('a'), + b: integer('b'), + c: integer('c'), + nullOnly: integer('null_only'), +}); + +// To test another schema and multischema +export const mySchema = pgSchema('mySchema'); + +export const usersMySchemaTable = mySchema.table('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + verified: boolean('verified').notNull().default(false), + jsonb: jsonb('jsonb').$type(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), +}); + +const citiesMySchemaTable = mySchema.table('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: char('state', { length: 2 }), +}); + +const users2MySchemaTable = mySchema.table('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').references(() => citiesTable.id), +}); + +const jsonTestTable = pgTable('jsontest', { + id: serial('id').primaryKey(), + json: json('json').$type<{ string: string; number: number }>(), + jsonb: jsonb('jsonb').$type<{ string: string; number: number }>(), +}); + +let pgContainer: Docker.Container; + +afterAll(async () => { + await pgContainer?.stop().catch(console.error); +}); + +let db: BunSQLDatabase; +let client: BunSQL; + +beforeAll(async () => { + console.log('here'); + const connectionString = process.env['PG_CONNECTION_STRING']; + client = await retry(async () => { + // @ts-expect-error + const connClient = new BunSQL(connectionString, { max: 1 }); + await connClient.unsafe(`select 1`); + return connClient; + }, { + retries: 20, + factor: 1, + minTimeout: 250, + maxTimeout: 250, + randomize: false, + onRetry() { + client?.end(); + }, + }); + db = drizzle(client, { logger: false }); +}); + +afterAll(async () => { + await client?.end(); +}); + +beforeEach(async () => { + await db.execute(sql`drop schema if exists public cascade`); + await db.execute(sql`drop schema if exists ${mySchema} cascade`); + await db.execute(sql`create schema public`); + await db.execute(sql`create schema if not exists custom_migrations`); + await db.execute(sql`create schema ${mySchema}`); + // public users + await db.execute( + sql` + create table users ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); + // public cities + await db.execute( + sql` + create table cities ( + id serial primary key, + name text not null, + state char(2) + ) + `, + ); + // public users2 + await db.execute( + sql` + create table users2 ( + id serial primary key, + name text not null, + city_id integer references cities(id) + ) + `, + ); + await db.execute( + sql` + create table course_categories ( + id serial primary key, + name text not null + ) + `, + ); + await db.execute( + sql` + create table courses ( + id serial primary key, + name text not null, + category_id integer references course_categories(id) + ) + `, + ); + await db.execute( + sql` + create table orders ( + id serial primary key, + region text not null, + product text not null, + amount integer not null, + quantity integer not null + ) + `, + ); + await db.execute( + sql` + create table network_table ( + inet inet not null, + cidr cidr not null, + macaddr macaddr not null, + macaddr8 macaddr8 not null + ) + `, + ); + await db.execute( + sql` + create table sal_emp ( + name text not null, + pay_by_quarter integer[] not null, + schedule text[][] not null + ) + `, + ); + await db.execute( + sql` + create table tictactoe ( + squares integer[3][3] not null + ) + `, + ); + // // mySchema users + await db.execute( + sql` + create table ${usersMySchemaTable} ( + id serial primary key, + name text not null, + verified boolean not null default false, + jsonb jsonb, + created_at timestamptz not null default now() + ) + `, + ); + // mySchema cities + await db.execute( + sql` + create table ${citiesMySchemaTable} ( + id serial primary key, + name text not null, + state char(2) + ) + `, + ); + // mySchema users2 + await db.execute( + sql` + create table ${users2MySchemaTable} ( + id serial primary key, + name text not null, + city_id integer references "mySchema".cities(id) + ) + `, + ); + + await db.execute( + sql` + create table jsontest ( + id serial primary key, + json json, + jsonb jsonb + ) + `, + ); +}); + +afterEach(async () => { + await db.execute(sql`drop schema if exists custom_migrations cascade`); +}); + +async function setupSetOperationTest(db: PgDatabase) { + await db.execute(sql`drop table if exists users2`); + await db.execute(sql`drop table if exists cities`); + await db.execute( + sql` + create table cities ( + id serial primary key, + name text not null + ) + `, + ); + await db.execute( + sql` + create table users2 ( + id serial primary key, + name text not null, + city_id integer references cities(id) + ) + `, + ); + + await db.insert(cities2Table).values([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await db.insert(users2Table).values([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 2 }, + { id: 3, name: 'Jack', cityId: 3 }, + { id: 4, name: 'Peter', cityId: 3 }, + { id: 5, name: 'Ben', cityId: 2 }, + { id: 6, name: 'Jill', cityId: 1 }, + { id: 7, name: 'Mary', cityId: 2 }, + { id: 8, name: 'Sally', cityId: 1 }, + ]); +} + +async function setupAggregateFunctionsTest(db: PgDatabase) { + await db.execute(sql`drop table if exists "aggregate_table"`); + await db.execute( + sql` + create table "aggregate_table" ( + "id" serial not null, + "name" text not null, + "a" integer, + "b" integer, + "c" integer, + "null_only" integer + ); + `, + ); + await db.insert(aggregateTable).values([ + { name: 'value 1', a: 5, b: 10, c: 20 }, + { name: 'value 1', a: 5, b: 20, c: 30 }, + { name: 'value 2', a: 10, b: 50, c: 60 }, + { name: 'value 3', a: 20, b: 20, c: null }, + { name: 'value 4', a: null, b: 90, c: 120 }, + { name: 'value 5', a: 80, b: 10, c: null }, + { name: 'value 6', a: null, b: null, c: 150 }, + ]); +} + +test('table configs: unique third param', async () => { + const cities1Table = pgTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: char('state', { length: 2 }), + }, (t) => ({ + f: unique('custom_name').on(t.name, t.state).nullsNotDistinct(), + f1: unique('custom_name1').on(t.name, t.state), + })); + + const tableConfig = getTableConfig(cities1Table); + + expect(tableConfig.uniqueConstraints).toHaveLength(2); + + expect(tableConfig.uniqueConstraints[0]?.name).toBe('custom_name'); + expect(tableConfig.uniqueConstraints[0]?.nullsNotDistinct).toBe(true); + expect(tableConfig.uniqueConstraints[0]?.columns.map((t) => t.name)).toEqual(['name', 'state']); + + expect(tableConfig.uniqueConstraints[1]?.name).toBe('custom_name1'); + expect(tableConfig.uniqueConstraints[1]?.nullsNotDistinct).toBe(false); + expect(tableConfig.uniqueConstraints[1]?.columns.map((t) => t.name)).toEqual(['name', 'state']); +}); + +test('table configs: unique in column', async () => { + const cities1Table = pgTable('cities1', { + id: serial('id').primaryKey(), + name: text('name').notNull().unique(), + state: char('state', { length: 2 }).unique('custom'), + field: char('field', { length: 2 }).unique('custom_field', { nulls: 'not distinct' }), + }); + + const tableConfig = getTableConfig(cities1Table); + + const columnName = tableConfig.columns.find((it) => it.name === 'name'); + + expect(columnName?.uniqueName).toBe(uniqueKeyName(cities1Table, [columnName!.name])); + expect(columnName?.isUnique).toBe(true); + + const columnState = tableConfig.columns.find((it) => it.name === 'state'); + expect(columnState?.uniqueName).toBe('custom'); + expect(columnState?.isUnique).toBe(true); + + const columnField = tableConfig.columns.find((it) => it.name === 'field'); + expect(columnField?.uniqueName).toBe('custom_field'); + expect(columnField?.isUnique).toBe(true); + expect(columnField?.uniqueType).toBe('not distinct'); +}); + +test('table config: foreign keys name', async () => { + const table = pgTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: foreignKey({ foreignColumns: [t.id], columns: [t.id], name: 'custom_fk' }), + })); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.foreignKeys).toHaveLength(1); + expect(tableConfig.foreignKeys[0]!.getName()).toBe('custom_fk'); +}); + +test('table config: primary keys name', async () => { + const table = pgTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + state: text('state'), + }, (t) => ({ + f: primaryKey({ columns: [t.id, t.name], name: 'custom_pk' }), + })); + + const tableConfig = getTableConfig(table); + + expect(tableConfig.primaryKeys).toHaveLength(1); + expect(tableConfig.primaryKeys[0]!.getName()).toBe('custom_pk'); +}); + +test('select all fields', async () => { + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(100); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('select sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('select typed sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + + const users = await db.select({ + name: sql`upper(${usersTable.name})`, + }).from(usersTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('select with empty array in inArray', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + const result = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable) + .where(inArray(usersTable.id, [])); + + expect(result).toEqual([]); +}); + +test('select with empty array in notInArray', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + const result = await db + .select({ + name: sql`upper(${usersTable.name})`, + }) + .from(usersTable) + .where(notInArray(usersTable.id, [])); + + expect(result).toEqual([{ name: 'JOHN' }, { name: 'JANE' }, { name: 'JANE' }]); +}); + +test('$default function', async () => { + const insertedOrder = await db.insert(orders).values({ id: 1, region: 'Ukraine', amount: 1, quantity: 1 }) + .returning(); + const selectedOrder = await db.select().from(orders); + + expect(insertedOrder).toEqual([{ + id: 1, + amount: 1, + quantity: 1, + region: 'Ukraine', + product: 'random_string', + }]); + + expect(selectedOrder).toEqual([{ + id: 1, + amount: 1, + quantity: 1, + region: 'Ukraine', + product: 'random_string', + }]); +}); + +test('select distinct', async () => { + const usersDistinctTable = pgTable('users_distinct', { + id: integer('id').notNull(), + name: text('name').notNull(), + age: integer('age').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id integer, name text, age integer)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John', age: 24 }, + { id: 1, name: 'John', age: 24 }, + { id: 2, name: 'John', age: 25 }, + { id: 1, name: 'Jane', age: 24 }, + { id: 1, name: 'Jane', age: 26 }, + ]); + const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( + usersDistinctTable.id, + ); + const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( + usersDistinctTable, + ).orderBy(usersDistinctTable.name); + const users4 = await db.selectDistinctOn([usersDistinctTable.id, usersDistinctTable.age]).from( + usersDistinctTable, + ).orderBy(usersDistinctTable.id, usersDistinctTable.age); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users1).toEqual([ + { id: 1, name: 'Jane', age: 24 }, + { id: 1, name: 'Jane', age: 26 }, + { id: 1, name: 'John', age: 24 }, + { id: 2, name: 'John', age: 25 }, + ]); + + expect(users2).toHaveLength(2); + expect(users2[0]?.id).toBe(1); + expect(users2[1]?.id).toBe(2); + + expect(users3).toHaveLength(2); + expect(users3[0]?.name).toBe('Jane'); + expect(users3[1]?.name).toBe('John'); + + expect(users4).toEqual([ + { id: 1, name: 'John', age: 24 }, + { id: 1, name: 'Jane', age: 26 }, + { id: 2, name: 'John', age: 25 }, + ]); +}); + +test('insert returning sql', async () => { + const users = await db + .insert(usersTable) + .values({ name: 'John' }) + .returning({ + name: sql`upper(${usersTable.name})`, + }); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('delete returning sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .delete(usersTable) + .where(eq(usersTable.name, 'John')) + .returning({ + name: sql`upper(${usersTable.name})`, + }); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('update returning sql', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .update(usersTable) + .set({ name: 'Jane' }) + .where(eq(usersTable.name, 'John')) + .returning({ + name: sql`upper(${usersTable.name})`, + }); + + expect(users).toEqual([{ name: 'JANE' }]); +}); + +test('update with returning all fields', async () => { + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .update(usersTable) + .set({ name: 'Jane' }) + .where(eq(usersTable.name, 'John')) + .returning(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); + expect(users).toEqual([ + { id: 1, name: 'Jane', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, + ]); +}); + +test('update with returning partial', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .update(usersTable) + .set({ name: 'Jane' }) + .where(eq(usersTable.name, 'John')) + .returning({ + id: usersTable.id, + name: usersTable.name, + }); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); +}); + +test('delete with returning all fields', async () => { + const now = Date.now(); + + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); + expect(users).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }, + ]); +}); + +test('delete with returning partial', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db.delete(usersTable).where(eq(usersTable.name, 'John')).returning({ + id: usersTable.id, + name: usersTable.name, + }); + + expect(users).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert + select', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const result = await db.select().from(usersTable); + expect(result).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }, + ]); + + await db.insert(usersTable).values({ name: 'Jane' }); + const result2 = await db.select().from(usersTable); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); +}); + +test('json insert', async () => { + await db.insert(usersTable).values({ name: 'John', jsonb: ['foo', 'bar'] }); + const result = await db + .select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + }) + .from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'John', jsonb: ['foo', 'bar'] }]); +}); + +test('char insert', async () => { + await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) + .from(citiesTable); + + expect(result).toEqual([{ id: 1, name: 'Austin', state: 'TX' }]); +}); + +test('char update', async () => { + await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); + await db.update(citiesTable).set({ name: 'Atlanta', state: 'GA' }).where(eq(citiesTable.id, 1)); + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) + .from(citiesTable); + + expect(result).toEqual([{ id: 1, name: 'Atlanta', state: 'GA' }]); +}); + +test('char delete', async () => { + await db.insert(citiesTable).values({ name: 'Austin', state: 'TX' }); + await db.delete(citiesTable).where(eq(citiesTable.state, 'TX')); + const result = await db + .select({ id: citiesTable.id, name: citiesTable.name, state: citiesTable.state }) + .from(citiesTable); + + expect(result).toEqual([]); +}); + +test('insert with overridden default values', async () => { + await db.insert(usersTable).values({ name: 'John', verified: true }); + const result = await db.select().from(usersTable); + + expect(result).toEqual([ + { id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }, + ]); +}); + +test('insert many', async () => { + await db + .insert(usersTable) + .values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db + .select({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }) + .from(usersTable); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); +}); + +test('insert many with returning', async () => { + const result = await db + .insert(usersTable) + .values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]) + .returning({ + id: usersTable.id, + name: usersTable.name, + jsonb: usersTable.jsonb, + verified: usersTable.verified, + }); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); +}); + +test('select with group by as field', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) + .groupBy(usersTable.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); +}); + +test('select with exists', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const user = alias(usersTable, 'user'); + const result = await db.select({ name: usersTable.name }).from(usersTable).where( + exists( + db.select({ one: sql`1` }).from(user).where(and(eq(usersTable.name, 'John'), eq(user.id, usersTable.id))), + ), + ); + + expect(result).toEqual([{ name: 'John' }]); +}); + +test('select with group by as sql', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) + .groupBy(sql`${usersTable.name}`); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); +}); + +test('select with group by as sql + column', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) + .groupBy(sql`${usersTable.name}`, usersTable.id); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); +}); + +test('select with group by as column + sql', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); +}); + +test('select with group by complex query', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db + .select({ name: usersTable.name }) + .from(usersTable) + .groupBy(usersTable.id, sql`${usersTable.name}`) + .orderBy(asc(usersTable.name)) + .limit(1); + + expect(result).toEqual([{ name: 'Jane' }]); +}); + +test('build query', async () => { + const query = db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .groupBy(usersTable.id, usersTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: 'select "id", "name" from "users" group by "users"."id", "users"."name"', + params: [], + }); +}); + +test.only('insert sql', async () => { + await db.insert(usersTable).values({ name: sql`${'John'}` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('partial join with alias', async () => { + const customerAlias = alias(usersTable, 'customer'); + + await db.insert(usersTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersTable.id, + name: usersTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }) + .from(usersTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersTable.id, 10)); + + expect(result).toEqual([ + { + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }, + ]); +}); + +test('full join with alias', async () => { + const pgTable = pgTableCreator((name) => `prefixed_${name}`); + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(users) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(users.id, 10)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('select from alias', async () => { + const pgTable = pgTableCreator((name) => `prefixed_${name}`); + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial primary key, name text not null)`); + + const user = alias(users, 'user'); + const customers = alias(users, 'customer'); + + await db.insert(users).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select() + .from(user) + .leftJoin(customers, eq(customers.id, 11)) + .where(eq(user.id, 10)); + + expect(result).toEqual([{ + user: { + id: 10, + name: 'Ivan', + }, + customer: { + id: 11, + name: 'Hans', + }, + }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('insert with spaces', async () => { + await db.insert(usersTable).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: usersTable.id, name: usersTable.name }).from(usersTable); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); +}); + +test('prepared statement', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const statement = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .prepare('statement1'); + const result = await statement.execute(); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert: placeholders on columns with encoder', async () => { + const statement = db.insert(usersTable).values({ + name: 'John', + jsonb: sql.placeholder('jsonb'), + }).prepare('encoder_statement'); + + await statement.execute({ jsonb: ['foo', 'bar'] }); + + const result = await db + .select({ + id: usersTable.id, + jsonb: usersTable.jsonb, + }) + .from(usersTable); + + expect(result).toEqual([ + { id: 1, jsonb: ['foo', 'bar'] }, + ]); +}); + +test('prepared statement reuse', async () => { + const stmt = db + .insert(usersTable) + .values({ + verified: true, + name: sql.placeholder('name'), + }) + .prepare('stmt2'); + + for (let i = 0; i < 10; i++) { + await stmt.execute({ name: `John ${i}` }); + } + + const result = await db + .select({ + id: usersTable.id, + name: usersTable.name, + verified: usersTable.verified, + }) + .from(usersTable); + + expect(result).toEqual([ + { id: 1, name: 'John 0', verified: true }, + { id: 2, name: 'John 1', verified: true }, + { id: 3, name: 'John 2', verified: true }, + { id: 4, name: 'John 3', verified: true }, + { id: 5, name: 'John 4', verified: true }, + { id: 6, name: 'John 5', verified: true }, + { id: 7, name: 'John 6', verified: true }, + { id: 8, name: 'John 7', verified: true }, + { id: 9, name: 'John 8', verified: true }, + { id: 10, name: 'John 9', verified: true }, + ]); +}); + +test('prepared statement with placeholder in .where', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const stmt = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .where(eq(usersTable.id, sql.placeholder('id'))) + .prepare('stmt3'); + const result = await stmt.execute({ id: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); +}); + +test('prepared statement with placeholder in .limit', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const stmt = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .where(eq(usersTable.id, sql.placeholder('id'))) + .limit(sql.placeholder('limit')) + .prepare('stmt_limit'); + + const result = await stmt.execute({ id: 1, limit: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + expect(result).toHaveLength(1); +}); + +test('prepared statement with placeholder in .offset', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); + const stmt = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .offset(sql.placeholder('offset')) + .prepare('stmt_offset'); + + const result = await stmt.execute({ offset: 1 }); + + expect(result).toEqual([{ id: 2, name: 'John1' }]); +}); + +test('prepared statement built using $dynamic', async () => { + function withLimitOffset(qb: any) { + return qb.limit(sql.placeholder('limit')).offset(sql.placeholder('offset')); + } + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'John1' }]); + const stmt = db + .select({ + id: usersTable.id, + name: usersTable.name, + }) + .from(usersTable) + .$dynamic(); + withLimitOffset(stmt).prepare('stmt_limit'); + + const result = await stmt.execute({ limit: 1, offset: 1 }); + + expect(result).toEqual([{ id: 2, name: 'John1' }]); + expect(result).toHaveLength(1); +}); + +// TODO change tests to new structure +test('Query check: Insert all defaults in 1 row', async () => { + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + const query = db + .insert(users) + .values({}) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into "users" ("id", "name", "state") values (default, default, default)', + params: [], + }); +}); + +test('Query check: Insert all defaults in multiple rows', async () => { + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state').default('UA'), + }); + + const query = db + .insert(users) + .values([{}, {}]) + .toSQL(); + + expect(query).toEqual({ + sql: 'insert into "users" ("id", "name", "state") values (default, default, default), (default, default, default)', + params: [], + }); +}); + +test('Insert all defaults in 1 row', async () => { + const users = pgTable('empty_insert_single', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, + ); + + await db.insert(users).values({}); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }]); +}); + +test('Insert all defaults in multiple rows', async () => { + const users = pgTable('empty_insert_multiple', { + id: serial('id').primaryKey(), + name: text('name').default('Dan'), + state: text('state'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial primary key, name text default 'Dan', state text)`, + ); + + await db.insert(users).values([{}, {}]); + + const res = await db.select().from(users); + + expect(res).toEqual([{ id: 1, name: 'Dan', state: null }, { id: 2, name: 'Dan', state: null }]); +}); + +test('build query insert with onConflict do update', async () => { + const query = db + .insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do update set "name" = $3', + params: ['John', '["foo","bar"]', 'John1'], + }); +}); + +test('build query insert with onConflict do update / multiple columns', async () => { + const query = db + .insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoUpdate({ target: [usersTable.id, usersTable.name], set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', + params: ['John', '["foo","bar"]', 'John1'], + }); +}); + +test('build query insert with onConflict do nothing', async () => { + const query = db + .insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoNothing() + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict do nothing', + params: ['John', '["foo","bar"]'], + }); +}); + +test('build query insert with onConflict do nothing + target', async () => { + const query = db + .insert(usersTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoNothing({ target: usersTable.id }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', + params: ['John', '["foo","bar"]'], + }); +}); + +test('insert with onConflict do update', async () => { + await db.insert(usersTable).values({ name: 'John' }); + + await db + .insert(usersTable) + .values({ id: 1, name: 'John' }) + .onConflictDoUpdate({ target: usersTable.id, set: { name: 'John1' } }); + + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)); + + expect(res).toEqual([{ id: 1, name: 'John1' }]); +}); + +test('insert with onConflict do nothing', async () => { + await db.insert(usersTable).values({ name: 'John' }); + + await db.insert(usersTable).values({ id: 1, name: 'John' }).onConflictDoNothing(); + + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)); + + expect(res).toEqual([{ id: 1, name: 'John' }]); +}); + +test('insert with onConflict do nothing + target', async () => { + await db.insert(usersTable).values({ name: 'John' }); + + await db + .insert(usersTable) + .values({ id: 1, name: 'John' }) + .onConflictDoNothing({ target: usersTable.id }); + + const res = await db + .select({ id: usersTable.id, name: usersTable.name }) + .from(usersTable) + .where(eq(usersTable.id, 1)); + + expect(res).toEqual([{ id: 1, name: 'John' }]); +}); + +test('left join (flat object fields)', async () => { + const { id: cityId } = await db + .insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]) + .returning({ id: citiesTable.id }) + .then((rows) => rows[0]!); + + await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); + + const res = await db + .select({ + userId: users2Table.id, + userName: users2Table.name, + cityId: citiesTable.id, + cityName: citiesTable.name, + }) + .from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { userId: 1, userName: 'John', cityId, cityName: 'Paris' }, + { userId: 2, userName: 'Jane', cityId: null, cityName: null }, + ]); +}); + +test('left join (grouped fields)', async () => { + const { id: cityId } = await db + .insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]) + .returning({ id: citiesTable.id }) + .then((rows) => rows[0]!); + + await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); + + const res = await db + .select({ + id: users2Table.id, + user: { + name: users2Table.name, + nameUpper: sql`upper(${users2Table.name})`, + }, + city: { + id: citiesTable.id, + name: citiesTable.name, + nameUpper: sql`upper(${citiesTable.name})`, + }, + }) + .from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { + id: 1, + user: { name: 'John', nameUpper: 'JOHN' }, + city: { id: cityId, name: 'Paris', nameUpper: 'PARIS' }, + }, + { + id: 2, + user: { name: 'Jane', nameUpper: 'JANE' }, + city: null, + }, + ]); +}); + +test('left join (all fields)', async () => { + const { id: cityId } = await db + .insert(citiesTable) + .values([{ name: 'Paris' }, { name: 'London' }]) + .returning({ id: citiesTable.id }) + .then((rows) => rows[0]!); + + await db.insert(users2Table).values([{ name: 'John', cityId }, { name: 'Jane' }]); + + const res = await db + .select() + .from(users2Table) + .leftJoin(citiesTable, eq(users2Table.cityId, citiesTable.id)); + + expect(res).toEqual([ + { + users2: { + id: 1, + name: 'John', + cityId, + }, + cities: { + id: cityId, + name: 'Paris', + state: null, + }, + }, + { + users2: { + id: 2, + name: 'Jane', + cityId: null, + }, + cities: null, + }, + ]); +}); + +test('join subquery', async () => { + await db + .insert(courseCategoriesTable) + .values([ + { name: 'Category 1' }, + { name: 'Category 2' }, + { name: 'Category 3' }, + { name: 'Category 4' }, + ]); + + await db + .insert(coursesTable) + .values([ + { name: 'Development', categoryId: 2 }, + { name: 'IT & Software', categoryId: 3 }, + { name: 'Marketing', categoryId: 4 }, + { name: 'Design', categoryId: 1 }, + ]); + + const sq2 = db + .select({ + categoryId: courseCategoriesTable.id, + category: courseCategoriesTable.name, + total: sql`count(${courseCategoriesTable.id})`, + }) + .from(courseCategoriesTable) + .groupBy(courseCategoriesTable.id, courseCategoriesTable.name) + .as('sq2'); + + const res = await db + .select({ + courseName: coursesTable.name, + categoryId: sq2.categoryId, + }) + .from(coursesTable) + .leftJoin(sq2, eq(coursesTable.categoryId, sq2.categoryId)) + .orderBy(coursesTable.name); + + expect(res).toEqual([ + { courseName: 'Design', categoryId: 1 }, + { courseName: 'Development', categoryId: 2 }, + { courseName: 'IT & Software', categoryId: 3 }, + { courseName: 'Marketing', categoryId: 4 }, + ]); +}); + +test('with ... select', async () => { + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const regionalSales = db + .$with('regional_sales') + .as( + db + .select({ + region: orders.region, + totalSales: sql`sum(${orders.amount})`.as('total_sales'), + }) + .from(orders) + .groupBy(orders.region), + ); + + const topRegions = db + .$with('top_regions') + .as( + db + .select({ + region: regionalSales.region, + }) + .from(regionalSales) + .where( + gt( + regionalSales.totalSales, + db.select({ sales: sql`sum(${regionalSales.totalSales})/10` }).from(regionalSales), + ), + ), + ); + + const result1 = await db + .with(regionalSales, topRegions) + .select({ + region: orders.region, + product: orders.product, + productUnits: sql`sum(${orders.quantity})::int`, + productSales: sql`sum(${orders.amount})::int`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + const result2 = await db + .with(regionalSales, topRegions) + .selectDistinct({ + region: orders.region, + product: orders.product, + productUnits: sql`sum(${orders.quantity})::int`, + productSales: sql`sum(${orders.amount})::int`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region, orders.product) + .orderBy(orders.region, orders.product); + const result3 = await db + .with(regionalSales, topRegions) + .selectDistinctOn([orders.region], { + region: orders.region, + productUnits: sql`sum(${orders.quantity})::int`, + productSales: sql`sum(${orders.amount})::int`, + }) + .from(orders) + .where(inArray(orders.region, db.select({ region: topRegions.region }).from(topRegions))) + .groupBy(orders.region) + .orderBy(orders.region); + + expect(result1).toEqual([ + { + region: 'Europe', + product: 'A', + productUnits: 3, + productSales: 30, + }, + { + region: 'Europe', + product: 'B', + productUnits: 5, + productSales: 50, + }, + { + region: 'US', + product: 'A', + productUnits: 7, + productSales: 70, + }, + { + region: 'US', + product: 'B', + productUnits: 9, + productSales: 90, + }, + ]); + expect(result2).toEqual(result1); + expect(result3).toEqual([ + { + region: 'Europe', + productUnits: 8, + productSales: 80, + }, + { + region: 'US', + productUnits: 16, + productSales: 160, + }, + ]); +}); + +test('with ... update', async () => { + const products = pgTable('products', { + id: serial('id').primaryKey(), + price: numeric('price').notNull(), + cheap: boolean('cheap').notNull().default(false), + }); + + await db.execute(sql`drop table if exists ${products}`); + await db.execute(sql` + create table ${products} ( + id serial primary key, + price numeric not null, + cheap boolean not null default false + ) + `); + + await db.insert(products).values([ + { price: '10.99' }, + { price: '25.85' }, + { price: '32.99' }, + { price: '2.50' }, + { price: '4.59' }, + ]); + + const averagePrice = db + .$with('average_price') + .as( + db + .select({ + value: sql`avg(${products.price})`.as('value'), + }) + .from(products), + ); + + const result = await db + .with(averagePrice) + .update(products) + .set({ + cheap: true, + }) + .where(lt(products.price, sql`(select * from ${averagePrice})`)) + .returning({ + id: products.id, + }); + + expect(result).toEqual([ + { id: 1 }, + { id: 4 }, + { id: 5 }, + ]); +}); + +test('with ... insert', async () => { + const users = pgTable('users', { + username: text('username').notNull(), + admin: boolean('admin').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (username text not null, admin boolean not null default false)`); + + const userCount = db + .$with('user_count') + .as( + db + .select({ + value: sql`count(*)`.as('value'), + }) + .from(users), + ); + + const result = await db + .with(userCount) + .insert(users) + .values([ + { username: 'user1', admin: sql`((select * from ${userCount}) = 0)` }, + ]) + .returning({ + admin: users.admin, + }); + + expect(result).toEqual([{ admin: true }]); +}); + +test('with ... delete', async () => { + await db.insert(orders).values([ + { region: 'Europe', product: 'A', amount: 10, quantity: 1 }, + { region: 'Europe', product: 'A', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 20, quantity: 2 }, + { region: 'Europe', product: 'B', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 30, quantity: 3 }, + { region: 'US', product: 'A', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 40, quantity: 4 }, + { region: 'US', product: 'B', amount: 50, quantity: 5 }, + ]); + + const averageAmount = db + .$with('average_amount') + .as( + db + .select({ + value: sql`avg(${orders.amount})`.as('value'), + }) + .from(orders), + ); + + const result = await db + .with(averageAmount) + .delete(orders) + .where(gt(orders.amount, sql`(select * from ${averageAmount})`)) + .returning({ + id: orders.id, + }); + + expect(result).toEqual([ + { id: 6 }, + { id: 7 }, + { id: 8 }, + ]); +}); + +test('select from subquery sql', async () => { + await db.insert(users2Table).values([{ name: 'John' }, { name: 'Jane' }]); + + const sq = db + .select({ name: sql`${users2Table.name} || ' modified'`.as('name') }) + .from(users2Table) + .as('sq'); + + const res = await db.select({ name: sq.name }).from(sq); + + expect(res).toEqual([{ name: 'John modified' }, { name: 'Jane modified' }]); +}); + +test('select a field without joining its table', () => { + expect(() => db.select({ name: users2Table.name }).from(usersTable).prepare('query')).toThrowError(); +}); + +test('select all fields from subquery without alias', () => { + const sq = db.$with('sq').as(db.select({ name: sql`upper(${users2Table.name})` }).from(users2Table)); + + expect(() => db.select().from(sq).prepare('query')).toThrowError(); +}); + +test('select count()', async () => { + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: sql`count(*)` }).from(usersTable); + + expect(res).toEqual([{ count: '2' }]); +}); + +test('select count w/ custom mapper', async () => { + function count(value: PgColumn | SQLWrapper): SQL; + function count(value: PgColumn | SQLWrapper, alias: string): SQL.Aliased; + function count(value: PgColumn | SQLWrapper, alias?: string): SQL | SQL.Aliased { + const result = sql`count(${value})`.mapWith(Number); + if (!alias) { + return result; + } + return result.as(alias); + } + + await db.insert(usersTable).values([{ name: 'John' }, { name: 'Jane' }]); + + const res = await db.select({ count: count(sql`*`) }).from(usersTable); + + expect(res).toEqual([{ count: 2 }]); +}); + +test('network types', async () => { + const value: typeof network.$inferSelect = { + inet: '127.0.0.1', + cidr: '192.168.100.128/25', + macaddr: '08:00:2b:01:02:03', + macaddr8: '08:00:2b:01:02:03:04:05', + }; + + await db.insert(network).values(value); + + const res = await db.select().from(network); + + expect(res).toEqual([value]); +}); + +test.skip('array types', async () => { + const values: typeof salEmp.$inferSelect[] = [ + { + name: 'John', + payByQuarter: [10000, 10000, 10000, 10000], + schedule: [['meeting', 'lunch'], ['training', 'presentation']], + }, + { + name: 'Carol', + payByQuarter: [20000, 25000, 25000, 25000], + schedule: [['breakfast', 'consulting'], ['meeting', 'lunch']], + }, + ]; + + await db.insert(salEmp).values(values); + + const res = await db.select().from(salEmp); + + expect(res).toEqual(values); +}); + +test('select for ...', () => { + { + const query = db + .select() + .from(users2Table) + .for('update') + .toSQL(); + + expect(query.sql).toMatch(/ for update$/); + } + + { + const query = db + .select() + .from(users2Table) + .for('update', { of: [users2Table, coursesTable] }) + .toSQL(); + + expect(query.sql).toMatch(/ for update of "users2", "courses"$/); + } + + { + const query = db + .select() + .from(users2Table) + .for('no key update', { of: users2Table }) + .toSQL(); + + expect(query.sql).toMatch(/for no key update of "users2"$/); + } + + { + const query = db + .select() + .from(users2Table) + .for('no key update', { of: users2Table, skipLocked: true }) + .toSQL(); + + expect(query.sql).toMatch(/ for no key update of "users2" skip locked$/); + } + + { + const query = db + .select() + .from(users2Table) + .for('share', { of: users2Table, noWait: true }) + .toSQL(); + + expect(query.sql).toMatch(/for share of "users2" no wait$/); + } +}); + +test('having', async () => { + await db.insert(citiesTable).values([{ name: 'London' }, { name: 'Paris' }, { name: 'New York' }]); + + await db.insert(users2Table).values([{ name: 'John', cityId: 1 }, { name: 'Jane', cityId: 1 }, { + name: 'Jack', + cityId: 2, + }]); + + const result = await db + .select({ + id: citiesTable.id, + name: sql`upper(${citiesTable.name})`.as('upper_name'), + usersCount: sql`count(${users2Table.id})::int`.as('users_count'), + }) + .from(citiesTable) + .leftJoin(users2Table, eq(users2Table.cityId, citiesTable.id)) + .where(({ name }) => sql`length(${name}) >= 3`) + .groupBy(citiesTable.id) + .having(({ usersCount }) => sql`${usersCount} > 0`) + .orderBy(({ name }) => name); + + expect(result).toEqual([ + { + id: 1, + name: 'LONDON', + usersCount: 2, + }, + { + id: 2, + name: 'PARIS', + usersCount: 1, + }, + ]); +}); + +test('view', async () => { + const newYorkers1 = pgView('new_yorkers') + .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); + + const newYorkers2 = pgView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); + + const newYorkers3 = pgView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); +}); + +// NEXT +test('materialized view', async () => { + const newYorkers1 = pgMaterializedView('new_yorkers') + .as((qb) => qb.select().from(users2Table).where(eq(users2Table.cityId, 1))); + + const newYorkers2 = pgMaterializedView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).as(sql`select * from ${users2Table} where ${eq(users2Table.cityId, 1)}`); + + const newYorkers3 = pgMaterializedView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).existing(); + + await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); + + await db.insert(citiesTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([]); + } + + await db.refreshMaterializedView(newYorkers1); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop materialized view ${newYorkers1}`); +}); + +test('select from existing view', async () => { + const schema = pgSchema('test_schema'); + + const newYorkers = schema.view('new_yorkers', { + id: integer('id').notNull(), + }).existing(); + + await db.execute(sql`drop schema if exists ${schema} cascade`); + await db.execute(sql`create schema ${schema}`); + await db.execute(sql`create view ${newYorkers} as select id from ${usersTable}`); + + await db.insert(usersTable).values({ id: 100, name: 'John' }); + + const result = await db.select({ + id: usersTable.id, + }).from(usersTable).innerJoin(newYorkers, eq(newYorkers.id, usersTable.id)); + + expect(result).toEqual([{ id: 100 }]); +}); + +// TODO: copy to SQLite and MySQL, add to docs +test('select from raw sql', async () => { + const result = await db.select({ + id: sql`id`, + name: sql`name`, + }).from(sql`(select 1 as id, 'John' as name) as users`); + + Expect>; + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); +}); + +test('select from raw sql with joins', async () => { + const result = await db + .select({ + id: sql`users.id`, + name: sql`users.name`, + userCity: sql`users.city`, + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, sql`cities.id = users.id`); + + Expect>; + + expect(result).toEqual([ + { id: 1, name: 'John', userCity: 'New York', cityName: 'Paris' }, + ]); +}); + +test('join on aliased sql from select', async () => { + const result = await db + .select({ + userId: sql`users.id`.as('userId'), + name: sql`users.name`, + userCity: sql`users.city`, + cityId: sql`cities.id`.as('cityId'), + cityName: sql`cities.name`, + }) + .from(sql`(select 1 as id, 'John' as name, 'New York' as city) as users`) + .leftJoin(sql`(select 1 as id, 'Paris' as name) as cities`, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); +}); + +test('join on aliased sql from with clause', async () => { + const users = db.$with('users').as( + db.select({ + id: sql`id`.as('userId'), + name: sql`name`.as('userName'), + city: sql`city`.as('city'), + }).from( + sql`(select 1 as id, 'John' as name, 'New York' as city) as users`, + ), + ); + + const cities = db.$with('cities').as( + db.select({ + id: sql`id`.as('cityId'), + name: sql`name`.as('cityName'), + }).from( + sql`(select 1 as id, 'Paris' as name) as cities`, + ), + ); + + const result = await db + .with(users, cities) + .select({ + userId: users.id, + name: users.name, + userCity: users.city, + cityId: cities.id, + cityName: cities.name, + }) + .from(users) + .leftJoin(cities, (cols) => eq(cols.cityId, cols.userId)); + + Expect< + Equal<{ userId: number; name: string; userCity: string; cityId: number; cityName: string }[], typeof result> + >; + + expect(result).toEqual([ + { userId: 1, name: 'John', userCity: 'New York', cityId: 1, cityName: 'Paris' }, + ]); +}); + +test('prefixed table', async () => { + const pgTable = pgTableCreator((name) => `myprefix_${name}`); + + const users = pgTable('test_prefixed_table_with_unique_name', { + id: integer('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table myprefix_test_prefixed_table_with_unique_name (id integer not null primary key, name text not null)`, + ); + + await db.insert(users).values({ id: 1, name: 'John' }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('select from enum', async () => { + const muscleEnum = pgEnum('muscle', [ + 'abdominals', + 'hamstrings', + 'adductors', + 'quadriceps', + 'biceps', + 'shoulders', + 'chest', + 'middle_back', + 'calves', + 'glutes', + 'lower_back', + 'lats', + 'triceps', + 'traps', + 'forearms', + 'neck', + 'abductors', + ]); + + const forceEnum = pgEnum('force', ['isometric', 'isotonic', 'isokinetic']); + + const levelEnum = pgEnum('level', ['beginner', 'intermediate', 'advanced']); + + const mechanicEnum = pgEnum('mechanic', ['compound', 'isolation']); + + const equipmentEnum = pgEnum('equipment', [ + 'barbell', + 'dumbbell', + 'bodyweight', + 'machine', + 'cable', + 'kettlebell', + ]); + + const categoryEnum = pgEnum('category', ['upper_body', 'lower_body', 'full_body']); + + const exercises = pgTable('exercises', { + id: serial('id').primaryKey(), + name: varchar('name').notNull(), + force: forceEnum('force'), + level: levelEnum('level'), + mechanic: mechanicEnum('mechanic'), + equipment: equipmentEnum('equipment'), + instructions: text('instructions'), + category: categoryEnum('category'), + primaryMuscles: muscleEnum('primary_muscles').array(), + secondaryMuscles: muscleEnum('secondary_muscles').array(), + createdAt: timestamp('created_at').notNull().default(sql`now()`), + updatedAt: timestamp('updated_at').notNull().default(sql`now()`), + }); + + await db.execute(sql`drop table if exists ${exercises}`); + await db.execute(sql`drop type if exists ${sql.identifier(muscleEnum.enumName)}`); + await db.execute(sql`drop type if exists ${sql.identifier(forceEnum.enumName)}`); + await db.execute(sql`drop type if exists ${sql.identifier(levelEnum.enumName)}`); + await db.execute(sql`drop type if exists ${sql.identifier(mechanicEnum.enumName)}`); + await db.execute(sql`drop type if exists ${sql.identifier(equipmentEnum.enumName)}`); + await db.execute(sql`drop type if exists ${sql.identifier(categoryEnum.enumName)}`); + + await db.execute( + sql`create type ${ + sql.identifier(muscleEnum.enumName) + } as enum ('abdominals', 'hamstrings', 'adductors', 'quadriceps', 'biceps', 'shoulders', 'chest', 'middle_back', 'calves', 'glutes', 'lower_back', 'lats', 'triceps', 'traps', 'forearms', 'neck', 'abductors')`, + ); + await db.execute( + sql`create type ${sql.identifier(forceEnum.enumName)} as enum ('isometric', 'isotonic', 'isokinetic')`, + ); + await db.execute( + sql`create type ${sql.identifier(levelEnum.enumName)} as enum ('beginner', 'intermediate', 'advanced')`, + ); + await db.execute(sql`create type ${sql.identifier(mechanicEnum.enumName)} as enum ('compound', 'isolation')`); + await db.execute( + sql`create type ${ + sql.identifier(equipmentEnum.enumName) + } as enum ('barbell', 'dumbbell', 'bodyweight', 'machine', 'cable', 'kettlebell')`, + ); + await db.execute( + sql`create type ${sql.identifier(categoryEnum.enumName)} as enum ('upper_body', 'lower_body', 'full_body')`, + ); + await db.execute(sql` + create table ${exercises} ( + id serial primary key, + name varchar not null, + force force, + level level, + mechanic mechanic, + equipment equipment, + instructions text, + category category, + primary_muscles muscle[], + secondary_muscles muscle[], + created_at timestamp not null default now(), + updated_at timestamp not null default now() + ) + `); + + await db.insert(exercises).values({ + name: 'Bench Press', + force: 'isotonic', + level: 'beginner', + mechanic: 'compound', + equipment: 'barbell', + instructions: + 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', + category: 'upper_body', + primaryMuscles: ['chest', 'triceps'], + secondaryMuscles: ['shoulders', 'traps'], + }); + + const result = await db.select().from(exercises); + + expect(result).toEqual([ + { + id: 1, + name: 'Bench Press', + force: 'isotonic', + level: 'beginner', + mechanic: 'compound', + equipment: 'barbell', + instructions: + 'Lie on your back on a flat bench. Grasp the barbell with an overhand grip, slightly wider than shoulder width. Unrack the barbell and hold it over you with your arms locked. Lower the barbell to your chest. Press the barbell back to the starting position.', + category: 'upper_body', + primaryMuscles: ['chest', 'triceps'], + secondaryMuscles: ['shoulders', 'traps'], + createdAt: result[0]!.createdAt, + updatedAt: result[0]!.updatedAt, + }, + ]); + + await db.execute(sql`drop table ${exercises}`); + await db.execute(sql`drop type ${sql.identifier(muscleEnum.enumName)}`); + await db.execute(sql`drop type ${sql.identifier(forceEnum.enumName)}`); + await db.execute(sql`drop type ${sql.identifier(levelEnum.enumName)}`); + await db.execute(sql`drop type ${sql.identifier(mechanicEnum.enumName)}`); + await db.execute(sql`drop type ${sql.identifier(equipmentEnum.enumName)}`); + await db.execute(sql`drop type ${sql.identifier(categoryEnum.enumName)}`); +}); + +test.skip('all date and time columns', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + dateString: date('date_string', { mode: 'string' }).notNull(), + time: time('time', { precision: 3 }).notNull(), + datetime: timestamp('datetime').notNull(), + datetimeWTZ: timestamp('datetime_wtz', { withTimezone: true }).notNull(), + datetimeString: timestamp('datetime_string', { mode: 'string' }).notNull(), + datetimeFullPrecision: timestamp('datetime_full_precision', { precision: 6, mode: 'string' }).notNull(), + datetimeWTZString: timestamp('datetime_wtz_string', { withTimezone: true, mode: 'string' }).notNull(), + interval: interval('interval').notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + date_string date not null, + time time(3) not null, + datetime timestamp not null, + datetime_wtz timestamp with time zone not null, + datetime_string timestamp not null, + datetime_full_precision timestamp(6) not null, + datetime_wtz_string timestamp with time zone not null, + interval interval not null + ) + `); + + const someDatetime = new Date('2022-01-01T00:00:00.123Z'); + const fullPrecision = '2022-01-01T00:00:00.123456Z'; + const someTime = '23:23:12.432'; + + await db.insert(table).values({ + dateString: '2022-01-01', + time: someTime, + datetime: someDatetime, + datetimeWTZ: someDatetime, + datetimeString: '2022-01-01T00:00:00.123Z', + datetimeFullPrecision: fullPrecision, + datetimeWTZString: '2022-01-01T00:00:00.123Z', + interval: '1 day', + }); + + const result = await db.select().from(table); + + Expect< + Equal<{ + id: number; + dateString: string; + time: string; + datetime: Date; + datetimeWTZ: Date; + datetimeString: string; + datetimeFullPrecision: string; + datetimeWTZString: string; + interval: string; + }[], typeof result> + >; + + Expect< + Equal<{ + dateString: string; + time: string; + datetime: Date; + datetimeWTZ: Date; + datetimeString: string; + datetimeFullPrecision: string; + datetimeWTZString: string; + interval: string; + id?: number | undefined; + }, typeof table.$inferInsert> + >; + + expect(result).toEqual([ + { + id: 1, + dateString: '2022-01-01', + time: someTime, + datetime: someDatetime, + datetimeWTZ: someDatetime, + datetimeString: '2022-01-01 00:00:00.123', + datetimeFullPrecision: fullPrecision.replace('T', ' ').replace('Z', ''), + datetimeWTZString: '2022-01-01 00:00:00.123+00', + interval: '1 day', + }, + ]); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns with timezone second case mode date', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) with time zone not null + ) + `); + + const insertedDate = new Date(); + + // 1. Insert date as new date + await db.insert(table).values([ + { timestamp: insertedDate }, + ]); + + // 2, Select as date and check that timezones are the same + // There is no way to check timezone in Date object, as it is always represented internally in UTC + const result = await db.select().from(table); + + expect(result).toEqual([{ id: 1, timestamp: insertedDate }]); + + // 3. Compare both dates + expect(insertedDate.getTime()).toBe(result[0]!.timestamp.getTime()); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('all date and time columns with timezone third case mode date', async () => { + const table = pgTable('all_columns', { + id: serial('id').primaryKey(), + timestamp: timestamp('timestamp_string', { mode: 'date', withTimezone: true, precision: 3 }).notNull(), + }); + + await db.execute(sql`drop table if exists ${table}`); + + await db.execute(sql` + create table ${table} ( + id serial primary key, + timestamp_string timestamp(3) with time zone not null + ) + `); + + const insertedDate = new Date('2022-01-01 20:00:00.123-04'); // used different time zones, internally is still UTC + const insertedDate2 = new Date('2022-01-02 04:00:00.123+04'); // They are both the same date in different time zones + + // 1. Insert date as new dates with different time zones + await db.insert(table).values([ + { timestamp: insertedDate }, + { timestamp: insertedDate2 }, + ]); + + // 2, Select and compare both dates + const result = await db.select().from(table); + + expect(result[0]?.timestamp.getTime()).toBe(result[1]!.timestamp.getTime()); + + await db.execute(sql`drop table if exists ${table}`); +}); + +test('orderBy with aliased column', () => { + const query = db.select({ + test: sql`something`.as('test'), + }).from(users2Table).orderBy((fields) => fields.test).toSQL(); + + expect(query.sql).toBe('select something as "test" from "users2" order by "test"'); +}); + +test('timestamp timezone', async () => { + const usersTableWithAndWithoutTimezone = pgTable('users_test_with_and_without_timezone', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + createdAt: timestamp('created_at', { withTimezone: true }).notNull().defaultNow(), + updatedAt: timestamp('updated_at', { withTimezone: false }).notNull().defaultNow(), + }); + + await db.execute(sql`drop table if exists ${usersTableWithAndWithoutTimezone}`); + + await db.execute( + sql` + create table users_test_with_and_without_timezone ( + id serial not null primary key, + name text not null, + created_at timestamptz not null default now(), + updated_at timestamp not null default now() + ) + `, + ); + + const date = new Date(Date.parse('2020-01-01T00:00:00+04:00')); + + await db.insert(usersTableWithAndWithoutTimezone).values({ name: 'With default times' }); + await db.insert(usersTableWithAndWithoutTimezone).values({ + name: 'Without default times', + createdAt: date, + updatedAt: date, + }); + const users = await db.select().from(usersTableWithAndWithoutTimezone); + + // check that the timestamps are set correctly for default times + expect(Math.abs(users[0]!.updatedAt.getTime() - Date.now())).toBeLessThan(2000); + expect(Math.abs(users[0]!.createdAt.getTime() - Date.now())).toBeLessThan(2000); + + // check that the timestamps are set correctly for non default times + expect(Math.abs(users[1]!.updatedAt.getTime() - date.getTime())).toBeLessThan(2000); + expect(Math.abs(users[1]!.createdAt.getTime() - date.getTime())).toBeLessThan(2000); +}); + +test('transaction', async () => { + const users = pgTable('users_transactions', { + id: serial('id').primaryKey(), + balance: integer('balance').notNull(), + }); + const products = pgTable('products_transactions', { + id: serial('id').primaryKey(), + price: integer('price').notNull(), + stock: integer('stock').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop table if exists ${products}`); + + await db.execute(sql`create table users_transactions (id serial not null primary key, balance integer not null)`); + await db.execute( + sql`create table products_transactions (id serial not null primary key, price integer not null, stock integer not null)`, + ); + + const user = await db.insert(users).values({ balance: 100 }).returning().then((rows) => rows[0]!); + const product = await db.insert(products).values({ price: 10, stock: 10 }).returning().then((rows) => rows[0]!); + + await db.transaction(async (tx) => { + await tx.update(users).set({ balance: user.balance - product.price }).where(eq(users.id, user.id)); + await tx.update(products).set({ stock: product.stock - 1 }).where(eq(products.id, product.id)); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 90 }]); + + await db.execute(sql`drop table ${users}`); + await db.execute(sql`drop table ${products}`); +}); + +test('transaction rollback', async () => { + const users = pgTable('users_transactions_rollback', { + id: serial('id').primaryKey(), + balance: integer('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_transactions_rollback (id serial not null primary key, balance integer not null)`, + ); + + await expect((async () => { + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + + const result = await db.select().from(users); + + expect(result).toEqual([]); + + await db.execute(sql`drop table ${users}`); +}); + +test('nested transaction', async () => { + const users = pgTable('users_nested_transactions', { + id: serial('id').primaryKey(), + balance: integer('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_nested_transactions (id serial not null primary key, balance integer not null)`, + ); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }); + }); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 200 }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('nested transaction rollback', async () => { + const users = pgTable('users_nested_transactions_rollback', { + id: serial('id').primaryKey(), + balance: integer('balance').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table users_nested_transactions_rollback (id serial not null primary key, balance integer not null)`, + ); + + await db.transaction(async (tx) => { + await tx.insert(users).values({ balance: 100 }); + + await expect((async () => { + await tx.transaction(async (tx) => { + await tx.update(users).set({ balance: 200 }); + tx.rollback(); + }); + })()).rejects.toThrowError(TransactionRollbackError); + }); + + const result = await db.select().from(users); + + expect(result).toEqual([{ id: 1, balance: 100 }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('join subquery with join', async () => { + const internalStaff = pgTable('internal_staff', { + userId: integer('user_id').notNull(), + }); + + const customUser = pgTable('custom_user', { + id: integer('id').notNull(), + }); + + const ticket = pgTable('ticket', { + staffId: integer('staff_id').notNull(), + }); + + await db.execute(sql`drop table if exists ${internalStaff}`); + await db.execute(sql`drop table if exists ${customUser}`); + await db.execute(sql`drop table if exists ${ticket}`); + + await db.execute(sql`create table internal_staff (user_id integer not null)`); + await db.execute(sql`create table custom_user (id integer not null)`); + await db.execute(sql`create table ticket (staff_id integer not null)`); + + await db.insert(internalStaff).values({ userId: 1 }); + await db.insert(customUser).values({ id: 1 }); + await db.insert(ticket).values({ staffId: 1 }); + + const subq = db + .select() + .from(internalStaff) + .leftJoin(customUser, eq(internalStaff.userId, customUser.id)) + .as('internal_staff'); + + const mainQuery = await db + .select() + .from(ticket) + .leftJoin(subq, eq(subq.internal_staff.userId, ticket.staffId)); + + expect(mainQuery).toEqual([{ + ticket: { staffId: 1 }, + internal_staff: { + internal_staff: { userId: 1 }, + custom_user: { id: 1 }, + }, + }]); + + await db.execute(sql`drop table ${internalStaff}`); + await db.execute(sql`drop table ${customUser}`); + await db.execute(sql`drop table ${ticket}`); +}); + +test('subquery with view', async () => { + const users = pgTable('users_subquery_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }); + + const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 1 }, + { name: 'Jill', cityId: 2 }, + ]); + + const sq = db.$with('sq').as(db.select().from(newYorkers)); + const result = await db.with(sq).select().from(sq); + + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 3, name: 'Jack', cityId: 1 }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); +}); + +test('join view as subquery', async () => { + const users = pgTable('users_join_view', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }); + + const newYorkers = pgView('new_yorkers').as((qb) => qb.select().from(users).where(eq(users.cityId, 1))); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`drop view if exists ${newYorkers}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + await db.execute(sql`create view ${newYorkers} as select * from ${users} where city_id = 1`); + + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 1 }, + { name: 'Jill', cityId: 2 }, + ]); + + const sq = db.select().from(newYorkers).as('new_yorkers_sq'); + + const result = await db.select().from(users).leftJoin(sq, eq(users.id, sq.id)); + + expect(result).toEqual([ + { + users_join_view: { id: 1, name: 'John', cityId: 1 }, + new_yorkers_sq: { id: 1, name: 'John', cityId: 1 }, + }, + { + users_join_view: { id: 2, name: 'Jane', cityId: 2 }, + new_yorkers_sq: null, + }, + { + users_join_view: { id: 3, name: 'Jack', cityId: 1 }, + new_yorkers_sq: { id: 3, name: 'Jack', cityId: 1 }, + }, + { + users_join_view: { id: 4, name: 'Jill', cityId: 2 }, + new_yorkers_sq: null, + }, + ]); + + await db.execute(sql`drop view ${newYorkers}`); + await db.execute(sql`drop table ${users}`); +}); + +test('table selection with single table', async () => { + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, city_id integer not null)`, + ); + + await db.insert(users).values({ name: 'John', cityId: 1 }); + + const result = await db.select({ users }).from(users); + + expect(result).toEqual([{ users: { id: 1, name: 'John', cityId: 1 } }]); + + await db.execute(sql`drop table ${users}`); +}); + +test('set null to jsonb field', async () => { + const users = pgTable('users', { + id: serial('id').primaryKey(), + jsonb: jsonb('jsonb'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, jsonb jsonb)`, + ); + + const result = await db.insert(users).values({ jsonb: null }).returning(); + + expect(result).toEqual([{ id: 1, jsonb: null }]); + + await db.execute(sql`drop table ${users}`); +}); + +test.skip('insert undefined', async () => { + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.insert(users).values({ name: undefined }); + })()).resolves.not.toThrowError(); + + await db.execute(sql`drop table ${users}`); +}); + +test('update undefined', async () => { + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name'), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute( + sql`create table ${users} (id serial not null primary key, name text)`, + ); + + await expect((async () => { + await db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + await expect((async () => { + db.update(users).set({ name: undefined }); + })()).rejects.toThrowError(); + + await db.execute(sql`drop table ${users}`); +}); + +test('array operators', async () => { + const posts = pgTable('posts', { + id: serial('id').primaryKey(), + tags: text('tags').array(), + }); + + await db.execute(sql`drop table if exists ${posts}`); + + await db.execute( + sql`create table ${posts} (id serial primary key, tags text[])`, + ); + + await db.insert(posts).values([{ + tags: ['ORM'], + }, { + tags: ['Typescript'], + }, { + tags: ['Typescript', 'ORM'], + }, { + tags: ['Typescript', 'Frontend', 'React'], + }, { + tags: ['Typescript', 'ORM', 'Database', 'Postgres'], + }, { + tags: ['Java', 'Spring', 'OOP'], + }]); + + const contains = await db.select({ id: posts.id }).from(posts) + .where(arrayContains(posts.tags, ['Typescript', 'ORM'])); + const contained = await db.select({ id: posts.id }).from(posts) + .where(arrayContained(posts.tags, ['Typescript', 'ORM'])); + const overlaps = await db.select({ id: posts.id }).from(posts) + .where(arrayOverlaps(posts.tags, ['Typescript', 'ORM'])); + const withSubQuery = await db.select({ id: posts.id }).from(posts) + .where(arrayContains( + posts.tags, + db.select({ tags: posts.tags }).from(posts).where(eq(posts.id, 1)), + )); + + expect(contains).toEqual([{ id: 3 }, { id: 5 }]); + expect(contained).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }]); + expect(overlaps).toEqual([{ id: 1 }, { id: 2 }, { id: 3 }, { id: 4 }, { id: 5 }]); + expect(withSubQuery).toEqual([{ id: 1 }, { id: 3 }, { id: 5 }]); +}); + +test('set operations (union) from query builder with subquery', async () => { + await setupSetOperationTest(db); + + const sq = db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).as('sq'); + + const result = await db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).union( + db.select().from(sq), + ).orderBy(asc(sql`name`)).limit(2).offset(1); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 3, name: 'Jack' }, + { id: 2, name: 'Jane' }, + ]); + + await expect((async () => { + db + .select({ id: cities2Table.id, name: citiesTable.name, name2: users2Table.name }) + .from(cities2Table).union( + // @ts-expect-error + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table), + ).orderBy(asc(sql`name`)); + })()).rejects.toThrowError(); +}); + +test('set operations (union) as function', async () => { + await setupSetOperationTest(db); + + const result = await union( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`name`)).limit(1).offset(1); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + union( + db + .select({ name: citiesTable.name, id: cities2Table.id }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`name`)); + })()).rejects.toThrowError(); +}); + +test('set operations (union all) from query builder', async () => { + await setupSetOperationTest(db); + + const result = await db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).limit(2).unionAll( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).limit(2), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(4); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + { id: 2, name: 'London' }, + ]); + + await expect((async () => { + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).limit(2).unionAll( + db + .select({ name: citiesTable.name, id: cities2Table.id }) + .from(cities2Table).limit(2), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); +}); + +test('set operations (union all) as function', async () => { + await setupSetOperationTest(db); + + const result = await unionAll( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + + expect(result).toHaveLength(3); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + ]); + + await expect((async () => { + unionAll( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + })()).rejects.toThrowError(); +}); + +test('set operations (intersect) from query builder', async () => { + await setupSetOperationTest(db); + + const result = await db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).intersect( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(gt(citiesTable.id, 1)), + ).orderBy(asc(sql`name`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).intersect( + // @ts-expect-error + db + .select({ id: cities2Table.id, name: citiesTable.name, id2: cities2Table.id }) + .from(cities2Table).where(gt(citiesTable.id, 1)), + ).orderBy(asc(sql`name`)); + })()).rejects.toThrowError(); +}); + +test('set operations (intersect) as function', async () => { + await setupSetOperationTest(db); + + const result = await intersect( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + + expect(result).toHaveLength(0); + + expect(result).toEqual([]); + + await expect((async () => { + intersect( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + })()).rejects.toThrowError(); +}); + +test('set operations (intersect all) from query builder', async () => { + await setupSetOperationTest(db); + + const result = await db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).limit(2).intersectAll( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).limit(2), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + { id: 2, name: 'London' }, + ]); + + await expect((async () => { + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).limit(2).intersectAll( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(cities2Table).limit(2), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); +}); + +test('set operations (intersect all) as function', async () => { + await setupSetOperationTest(db); + + const result = await intersectAll( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'John' }, + ]); + + await expect((async () => { + intersectAll( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ); + })()).rejects.toThrowError(); +}); + +test('set operations (except) from query builder', async () => { + await setupSetOperationTest(db); + + const result = await db + .select() + .from(cities2Table).except( + db + .select() + .from(cities2Table).where(gt(citiesTable.id, 1)), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + db + .select() + .from(cities2Table).except( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(cities2Table).where(gt(citiesTable.id, 1)), + ); + })()).rejects.toThrowError(); +}); + +test('set operations (except) as function', async () => { + await setupSetOperationTest(db); + + const result = await except( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table), + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + except( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); +}); + +test('set operations (except all) from query builder', async () => { + await setupSetOperationTest(db); + + const result = await db + .select() + .from(cities2Table).exceptAll( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(2); + + expect(result).toEqual([ + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + ]); + + await expect((async () => { + db + .select({ name: cities2Table.name, id: cities2Table.id }) + .from(cities2Table).exceptAll( + db + .select({ id: cities2Table.id, name: citiesTable.name }) + .from(cities2Table).where(eq(citiesTable.id, 1)), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); +}); + +test('set operations (except all) as function', async () => { + await setupSetOperationTest(db); + + const result = await exceptAll( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gt(users2Table.id, 7)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`id`)).limit(5).offset(2); + + expect(result).toHaveLength(4); + + expect(result).toEqual([ + { id: 4, name: 'Peter' }, + { id: 5, name: 'Ben' }, + { id: 6, name: 'Jill' }, + { id: 7, name: 'Mary' }, + ]); + + await expect((async () => { + exceptAll( + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gt(users2Table.id, 7)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); +}); + +test('set operations (mixed) from query builder with subquery', async () => { + await setupSetOperationTest(db); + const sq = db + .select() + .from(cities2Table).where(gt(citiesTable.id, 1)).as('sq'); + + const result = await db + .select() + .from(cities2Table).except( + ({ unionAll }) => + unionAll( + db.select().from(sq), + db.select().from(cities2Table).where(eq(citiesTable.id, 2)), + ), + ); + + expect(result).toHaveLength(1); + + expect(result).toEqual([ + { id: 1, name: 'New York' }, + ]); + + await expect((async () => { + db + .select() + .from(cities2Table).except( + ({ unionAll }) => + unionAll( + db + .select({ name: cities2Table.name, id: cities2Table.id }) + .from(cities2Table).where(gt(citiesTable.id, 1)), + db.select().from(cities2Table).where(eq(citiesTable.id, 2)), + ), + ); + })()).rejects.toThrowError(); +}); + +test('set operations (mixed all) as function', async () => { + await setupSetOperationTest(db); + + const result = await union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 7)), + ), + db + .select().from(cities2Table).where(gt(citiesTable.id, 1)), + ).orderBy(asc(sql`id`)); + + expect(result).toHaveLength(6); + + expect(result).toEqual([ + { id: 1, name: 'John' }, + { id: 2, name: 'London' }, + { id: 3, name: 'Tampa' }, + { id: 5, name: 'Ben' }, + { id: 6, name: 'Jill' }, + { id: 8, name: 'Sally' }, + ]); + + await expect((async () => { + union( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(eq(users2Table.id, 1)), + except( + db + .select({ id: users2Table.id, name: users2Table.name }) + .from(users2Table).where(gte(users2Table.id, 5)), + db + .select({ name: users2Table.name, id: users2Table.id }) + .from(users2Table).where(eq(users2Table.id, 7)), + ), + db + .select().from(cities2Table).where(gt(citiesTable.id, 1)), + ).orderBy(asc(sql`id`)); + })()).rejects.toThrowError(); +}); + +test('aggregate function: count', async () => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: count() }).from(table); + const result2 = await db.select({ value: count(table.a) }).from(table); + const result3 = await db.select({ value: countDistinct(table.name) }).from(table); + + expect(result1[0]?.value).toBe(7); + expect(result2[0]?.value).toBe(5); + expect(result3[0]?.value).toBe(6); +}); + +test('aggregate function: avg', async () => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: avg(table.b) }).from(table); + const result2 = await db.select({ value: avg(table.nullOnly) }).from(table); + const result3 = await db.select({ value: avgDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('33.3333333333333333'); + expect(result2[0]?.value).toBeNull(); + expect(result3[0]?.value).toBe('42.5000000000000000'); +}); + +test('aggregate function: sum', async () => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: sum(table.b) }).from(table); + const result2 = await db.select({ value: sum(table.nullOnly) }).from(table); + const result3 = await db.select({ value: sumDistinct(table.b) }).from(table); + + expect(result1[0]?.value).toBe('200'); + expect(result2[0]?.value).toBeNull(); + expect(result3[0]?.value).toBe('170'); +}); + +test('aggregate function: max', async () => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: max(table.b) }).from(table); + const result2 = await db.select({ value: max(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(90); + expect(result2[0]?.value).toBeNull(); +}); + +test('aggregate function: min', async () => { + const table = aggregateTable; + await setupAggregateFunctionsTest(db); + + const result1 = await db.select({ value: min(table.b) }).from(table); + const result2 = await db.select({ value: min(table.nullOnly) }).from(table); + + expect(result1[0]?.value).toBe(10); + expect(result2[0]?.value).toBeNull(); +}); + +test.skip('array mapping and parsing', async () => { + const arrays = pgTable('arrays_tests', { + id: serial('id').primaryKey(), + tags: text('tags').array(), + nested: text('nested').array().array(), + numbers: integer('numbers').notNull().array(), + }); + + await db.execute(sql`drop table if exists ${arrays}`); + await db.execute(sql` + create table ${arrays} ( + id serial primary key, + tags text[], + nested text[][], + numbers integer[] + ) + `); + + await db.insert(arrays).values({ + tags: ['', 'b', 'c'], + nested: [['1', ''], ['3', '\\a']], + numbers: [1, 2, 3], + }); + + const result = await db.select().from(arrays); + + expect(result).toEqual([{ + id: 1, + tags: ['', 'b', 'c'], + nested: [['1', ''], ['3', '\\a']], + numbers: [1, 2, 3], + }]); + + await db.execute(sql`drop table ${arrays}`); +}); + +test('test $onUpdateFn and $onUpdate works as $default', async () => { + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id serial primary key, + name text not null, + update_counter integer default 1 not null, + updated_at timestamp(3), + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { name: 'John' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + + await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { name: 'John', id: 1, updateCounter: 1, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: 1, alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, + ]); +}); + +test('test $onUpdateFn and $onUpdate works updating', async () => { + await db.execute(sql`drop table if exists ${usersOnUpdate}`); + + await db.execute( + sql` + create table ${usersOnUpdate} ( + id serial primary key, + name text not null, + update_counter integer default 1, + updated_at timestamp(3), + always_null text + ) + `, + ); + + await db.insert(usersOnUpdate).values([ + { name: 'John', alwaysNull: 'this will be null after updating' }, + { name: 'Jane' }, + { name: 'Jack' }, + { name: 'Jill' }, + ]); + + const { updatedAt, ...rest } = getTableColumns(usersOnUpdate); + await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + await db.update(usersOnUpdate).set({ name: 'Angel' }).where(eq(usersOnUpdate.id, 1)); + await db.update(usersOnUpdate).set({ updateCounter: null }).where(eq(usersOnUpdate.id, 2)); + + const justDates = await db.select({ updatedAt }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + const response = await db.select({ ...rest }).from(usersOnUpdate).orderBy(asc(usersOnUpdate.id)); + + expect(response).toEqual([ + { name: 'Angel', id: 1, updateCounter: 2, alwaysNull: null }, + { name: 'Jane', id: 2, updateCounter: null, alwaysNull: null }, + { name: 'Jack', id: 3, updateCounter: 1, alwaysNull: null }, + { name: 'Jill', id: 4, updateCounter: 1, alwaysNull: null }, + ]); + const msDelay = 15000; + + // expect(initial[0]?.updatedAt?.valueOf()).not.toBe(justDates[0]?.updatedAt?.valueOf()); + + for (const eachUser of justDates) { + expect(eachUser.updatedAt!.valueOf()).toBeGreaterThan(Date.now() - msDelay); + } +}); + +test('test if method with sql operators', async () => { + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + age: integer('age').notNull(), + city: text('city').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + + await db.execute(sql` + create table ${users} ( + id serial primary key, + name text not null, + age integer not null, + city text not null + ) + `); + + await db.insert(users).values([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition1 = true; + + const [result1] = await db.select().from(users).where(eq(users.id, 1).if(condition1)); + + expect(result1).toEqual({ id: 1, name: 'John', age: 20, city: 'New York' }); + + const condition2 = 1; + + const [result2] = await db.select().from(users).where(sql`${users.id} = 1`.if(condition2)); + + expect(result2).toEqual({ id: 1, name: 'John', age: 20, city: 'New York' }); + + const condition3 = 'non-empty string'; + + const result3 = await db.select().from(users).where( + or(eq(users.id, 1).if(condition3), eq(users.id, 2).if(condition3)), + ); + + expect(result3).toEqual([{ id: 1, name: 'John', age: 20, city: 'New York' }, { + id: 2, + name: 'Alice', + age: 21, + city: 'New York', + }]); + + const condtition4 = false; + + const result4 = await db.select().from(users).where(eq(users.id, 1).if(condtition4)); + + expect(result4).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition5 = undefined; + + const result5 = await db.select().from(users).where(sql`${users.id} = 1`.if(condition5)); + + expect(result5).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition6 = null; + + const result6 = await db.select().from(users).where( + or(eq(users.id, 1).if(condition6), eq(users.id, 2).if(condition6)), + ); + + expect(result6).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition7 = { + term1: 0, + term2: 1, + }; + + const result7 = await db.select().from(users).where( + and(gt(users.age, 20).if(condition7.term1), eq(users.city, 'New York').if(condition7.term2)), + ); + + expect(result7).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + ]); + + const condition8 = { + term1: '', + term2: 'non-empty string', + }; + + const result8 = await db.select().from(users).where( + or(lt(users.age, 21).if(condition8.term1), eq(users.city, 'London').if(condition8.term2)), + ); + + expect(result8).toEqual([ + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition9 = { + term1: 1, + term2: true, + }; + + const result9 = await db.select().from(users).where( + and( + inArray(users.city, ['New York', 'London']).if(condition9.term1), + ilike(users.name, 'a%').if(condition9.term2), + ), + ); + + expect(result9).toEqual([ + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + ]); + + const condition10 = { + term1: 4, + term2: 19, + }; + + const result10 = await db.select().from(users).where( + and( + sql`length(${users.name}) <= ${condition10.term1}`.if(condition10.term1), + gt(users.age, condition10.term2).if(condition10.term2 > 20), + ), + ); + + expect(result10).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition11 = true; + + const result11 = await db.select().from(users).where( + or(eq(users.city, 'New York'), gte(users.age, 22))!.if(condition11), + ); + + expect(result11).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition12 = false; + + const result12 = await db.select().from(users).where( + and(eq(users.city, 'London'), gte(users.age, 23))!.if(condition12), + ); + + expect(result12).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition13 = true; + + const result13 = await db.select().from(users).where(sql`(city = 'New York' or age >= 22)`.if(condition13)); + + expect(result13).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + const condition14 = false; + + const result14 = await db.select().from(users).where(sql`(city = 'London' and age >= 23)`.if(condition14)); + + expect(result14).toEqual([ + { id: 1, name: 'John', age: 20, city: 'New York' }, + { id: 2, name: 'Alice', age: 21, city: 'New York' }, + { id: 3, name: 'Nick', age: 22, city: 'London' }, + { id: 4, name: 'Lina', age: 23, city: 'London' }, + ]); + + await db.execute(sql`drop table ${users}`); +}); + +// MySchema tests +test('mySchema :: select all fields', async () => { + const now = Date.now(); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + + expect(result[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(result[0]!.createdAt.getTime() - now)).toBeLessThan(100); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('mySchema :: select sql', async () => { + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('mySchema :: select typed sql', async () => { + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.select({ + name: sql`upper(${usersMySchemaTable.name})`, + }).from(usersMySchemaTable); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('mySchema :: select distinct', async () => { + const usersDistinctTable = pgTable('users_distinct', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${usersDistinctTable}`); + await db.execute(sql`create table ${usersDistinctTable} (id integer, name text)`); + + await db.insert(usersDistinctTable).values([ + { id: 1, name: 'John' }, + { id: 1, name: 'John' }, + { id: 2, name: 'John' }, + { id: 1, name: 'Jane' }, + ]); + const users1 = await db.selectDistinct().from(usersDistinctTable).orderBy( + usersDistinctTable.id, + usersDistinctTable.name, + ); + const users2 = await db.selectDistinctOn([usersDistinctTable.id]).from(usersDistinctTable).orderBy( + usersDistinctTable.id, + ); + const users3 = await db.selectDistinctOn([usersDistinctTable.name], { name: usersDistinctTable.name }).from( + usersDistinctTable, + ).orderBy(usersDistinctTable.name); + + await db.execute(sql`drop table ${usersDistinctTable}`); + + expect(users1).toEqual([{ id: 1, name: 'Jane' }, { id: 1, name: 'John' }, { id: 2, name: 'John' }]); + + expect(users2).toHaveLength(2); + expect(users2[0]?.id).toBe(1); + expect(users2[1]?.id).toBe(2); + + expect(users3).toHaveLength(2); + expect(users3[0]?.name).toBe('Jane'); + expect(users3[1]?.name).toBe('John'); +}); + +test('mySchema :: insert returning sql', async () => { + const users = await db.insert(usersMySchemaTable).values({ name: 'John' }).returning({ + name: sql`upper(${usersMySchemaTable.name})`, + }); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('mySchema :: delete returning sql', async () => { + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning({ + name: sql`upper(${usersMySchemaTable.name})`, + }); + + expect(users).toEqual([{ name: 'JOHN' }]); +}); + +test('mySchema :: update with returning partial', async () => { + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.update(usersMySchemaTable).set({ name: 'Jane' }).where(eq(usersMySchemaTable.name, 'John')) + .returning({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + }); + + expect(users).toEqual([{ id: 1, name: 'Jane' }]); +}); + +test('mySchema :: delete with returning all fields', async () => { + const now = Date.now(); + + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const users = await db.delete(usersMySchemaTable).where(eq(usersMySchemaTable.name, 'John')).returning(); + + expect(users[0]!.createdAt).toBeInstanceOf(Date); + expect(Math.abs(users[0]!.createdAt.getTime() - now)).toBeLessThan(100); + expect(users).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: users[0]!.createdAt }]); +}); + +test('mySchema :: insert + select', async () => { + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const result = await db.select().from(usersMySchemaTable); + expect(result).toEqual([{ id: 1, name: 'John', verified: false, jsonb: null, createdAt: result[0]!.createdAt }]); + + await db.insert(usersMySchemaTable).values({ name: 'Jane' }); + const result2 = await db.select().from(usersMySchemaTable); + expect(result2).toEqual([ + { id: 1, name: 'John', verified: false, jsonb: null, createdAt: result2[0]!.createdAt }, + { id: 2, name: 'Jane', verified: false, jsonb: null, createdAt: result2[1]!.createdAt }, + ]); +}); + +test('mySchema :: insert with overridden default values', async () => { + await db.insert(usersMySchemaTable).values({ name: 'John', verified: true }); + const result = await db.select().from(usersMySchemaTable); + + expect(result).toEqual([{ id: 1, name: 'John', verified: true, jsonb: null, createdAt: result[0]!.createdAt }]); +}); + +test('mySchema :: insert many', async () => { + await db.insert(usersMySchemaTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]); + const result = await db.select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + jsonb: usersMySchemaTable.jsonb, + verified: usersMySchemaTable.verified, + }).from(usersMySchemaTable); + + expect(result).toEqual([ + { id: 1, name: 'John', jsonb: null, verified: false }, + { id: 2, name: 'Bruce', jsonb: ['foo', 'bar'], verified: false }, + { id: 3, name: 'Jane', jsonb: null, verified: false }, + { id: 4, name: 'Austin', jsonb: null, verified: true }, + ]); +}); + +test('mySchema :: select with group by as field', async () => { + await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.name); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'John' }]); +}); + +test('mySchema :: select with group by as column + sql', async () => { + await db.insert(usersMySchemaTable).values([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]); + + const result = await db.select({ name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, sql`${usersMySchemaTable.name}`); + + expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }, { name: 'John' }]); +}); + +test('mySchema :: build query', async () => { + const query = db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from(usersMySchemaTable) + .groupBy(usersMySchemaTable.id, usersMySchemaTable.name) + .toSQL(); + + expect(query).toEqual({ + sql: 'select "id", "name" from "mySchema"."users" group by "mySchema"."users"."id", "mySchema"."users"."name"', + params: [], + }); +}); + +test('mySchema :: partial join with alias', async () => { + const customerAlias = alias(usersMySchemaTable, 'customer'); + + await db.insert(usersMySchemaTable).values([{ id: 10, name: 'Ivan' }, { id: 11, name: 'Hans' }]); + const result = await db + .select({ + user: { + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + }, + customer: { + id: customerAlias.id, + name: customerAlias.name, + }, + }).from(usersMySchemaTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(usersMySchemaTable.id, 10)); + + expect(result).toEqual([{ + user: { id: 10, name: 'Ivan' }, + customer: { id: 11, name: 'Hans' }, + }]); +}); + +test('mySchema :: insert with spaces', async () => { + await db.insert(usersMySchemaTable).values({ name: sql`'Jo h n'` }); + const result = await db.select({ id: usersMySchemaTable.id, name: usersMySchemaTable.name }).from( + usersMySchemaTable, + ); + + expect(result).toEqual([{ id: 1, name: 'Jo h n' }]); +}); + +test('mySchema :: prepared statement with placeholder in .limit', async () => { + await db.insert(usersMySchemaTable).values({ name: 'John' }); + const stmt = db + .select({ + id: usersMySchemaTable.id, + name: usersMySchemaTable.name, + }) + .from(usersMySchemaTable) + .where(eq(usersMySchemaTable.id, sql.placeholder('id'))) + .limit(sql.placeholder('limit')) + .prepare('mySchema_stmt_limit'); + + const result = await stmt.execute({ id: 1, limit: 1 }); + + expect(result).toEqual([{ id: 1, name: 'John' }]); + expect(result).toHaveLength(1); +}); + +test('mySchema :: build query insert with onConflict do update / multiple columns', async () => { + const query = db.insert(usersMySchemaTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoUpdate({ target: [usersMySchemaTable.id, usersMySchemaTable.name], set: { name: 'John1' } }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "mySchema"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id","name") do update set "name" = $3', + params: ['John', '["foo","bar"]', 'John1'], + }); +}); + +test('mySchema :: build query insert with onConflict do nothing + target', async () => { + const query = db.insert(usersMySchemaTable) + .values({ name: 'John', jsonb: ['foo', 'bar'] }) + .onConflictDoNothing({ target: usersMySchemaTable.id }) + .toSQL(); + + expect(query).toEqual({ + sql: + 'insert into "mySchema"."users" ("id", "name", "verified", "jsonb", "created_at") values (default, $1, default, $2, default) on conflict ("id") do nothing', + params: ['John', '["foo","bar"]'], + }); +}); + +test('mySchema :: select from tables with same name from different schema using alias', async () => { + await db.insert(usersMySchemaTable).values({ id: 10, name: 'Ivan' }); + await db.insert(usersTable).values({ id: 11, name: 'Hans' }); + + const customerAlias = alias(usersTable, 'customer'); + + const result = await db + .select().from(usersMySchemaTable) + .leftJoin(customerAlias, eq(customerAlias.id, 11)) + .where(eq(customerAlias.id, 11)); + + expect(result).toEqual([{ + users: { + id: 10, + name: 'Ivan', + verified: false, + jsonb: null, + createdAt: result[0]!.users.createdAt, + }, + customer: { + id: 11, + name: 'Hans', + verified: false, + jsonb: null, + createdAt: result[0]!.customer!.createdAt, + }, + }]); +}); + +test('mySchema :: view', async () => { + const newYorkers1 = mySchema.view('new_yorkers') + .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); + + const newYorkers2 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); + + const newYorkers3 = mySchema.view('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).existing(); + + await db.execute(sql`create view ${newYorkers1} as ${getViewConfig(newYorkers1).query}`); + + await db.insert(citiesMySchemaTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2MySchemaTable).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop view ${newYorkers1}`); +}); + +test('mySchema :: materialized view', async () => { + const newYorkers1 = mySchema.materializedView('new_yorkers') + .as((qb) => qb.select().from(users2MySchemaTable).where(eq(users2MySchemaTable.cityId, 1))); + + const newYorkers2 = mySchema.materializedView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).as(sql`select * from ${users2MySchemaTable} where ${eq(users2MySchemaTable.cityId, 1)}`); + + const newYorkers3 = mySchema.materializedView('new_yorkers', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull(), + }).existing(); + + await db.execute(sql`create materialized view ${newYorkers1} as ${getMaterializedViewConfig(newYorkers1).query}`); + + await db.insert(citiesMySchemaTable).values([{ name: 'New York' }, { name: 'Paris' }]); + + await db.insert(users2MySchemaTable).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 1 }, + { name: 'Jack', cityId: 2 }, + ]); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([]); + } + + await db.refreshMaterializedView(newYorkers1); + + { + const result = await db.select().from(newYorkers1); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers2); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select().from(newYorkers3); + expect(result).toEqual([ + { id: 1, name: 'John', cityId: 1 }, + { id: 2, name: 'Jane', cityId: 1 }, + ]); + } + + { + const result = await db.select({ name: newYorkers1.name }).from(newYorkers1); + expect(result).toEqual([ + { name: 'John' }, + { name: 'Jane' }, + ]); + } + + await db.execute(sql`drop materialized view ${newYorkers1}`); +}); + +test('limit 0', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .select() + .from(usersTable) + .limit(0); + + expect(users).toEqual([]); +}); + +test('limit -1', async () => { + await db.insert(usersTable).values({ name: 'John' }); + const users = await db + .select() + .from(usersTable) + .limit(-1); + + expect(users.length).toBeGreaterThan(0); +}); + +test('Object keys as column names', async () => { + // Tests the following: + // Column with required config + // Column with optional config without providing a value + // Column with optional config providing a value + // Column without config + const users = pgTable('users', { + id: bigserial({ mode: 'number' }).primaryKey(), + firstName: varchar(), + lastName: varchar({ length: 50 }), + admin: boolean(), + }); + + await db.execute(sql`drop table if exists users`); + await db.execute( + sql` + create table users ( + "id" bigserial primary key, + "firstName" varchar, + "lastName" varchar(50), + "admin" boolean + ) + `, + ); + + await db.insert(users).values([ + { firstName: 'John', lastName: 'Doe', admin: true }, + { firstName: 'Jane', lastName: 'Smith', admin: false }, + ]); + const result = await db + .select({ id: users.id, firstName: users.firstName, lastName: users.lastName }) + .from(users) + .where(eq(users.admin, true)); + + expect(result).toEqual([ + { id: 1, firstName: 'John', lastName: 'Doe' }, + ]); + + await db.execute(sql`drop table users`); +}); + +test.skip('proper json and jsonb handling', async () => { + const jsonTable = pgTable('json_table', { + json: json('json').$type<{ name: string; age: number }>(), + jsonb: jsonb('jsonb').$type<{ name: string; age: number }>(), + }); + + await db.execute(sql`drop table if exists ${jsonTable}`); + + await db.execute(sql`create table ${jsonTable} (json json, jsonb jsonb)`); + + await db.insert(jsonTable).values({ json: { name: 'Tom', age: 75 }, jsonb: { name: 'Pete', age: 23 } }); + + const result = await db.select().from(jsonTable); + + const justNames = await db.select({ + name1: sql`${jsonTable.json}->>'name'`.as('name1'), + name2: sql`${jsonTable.jsonb}->>'name'`.as('name2'), + }).from(jsonTable); + + expect(result).toStrictEqual([ + { + json: { name: 'Tom', age: 75 }, + jsonb: { name: 'Pete', age: 23 }, + }, + ]); + + expect(justNames).toStrictEqual([ + { + name1: 'Tom', + name2: 'Pete', + }, + ]); +}); + +test.todo('set json/jsonb fields with objects and retrieve with the ->> operator', async () => { + const obj = { string: 'test', number: 123 }; + const { string: testString, number: testNumber } = obj; + + await db.insert(jsonTestTable).values({ + json: obj, + jsonb: obj, + }); + + const result = await db.select({ + jsonStringField: sql`${jsonTestTable.json}->>'string'`, + jsonNumberField: sql`${jsonTestTable.json}->>'number'`, + jsonbStringField: sql`${jsonTestTable.jsonb}->>'string'`, + jsonbNumberField: sql`${jsonTestTable.jsonb}->>'number'`, + }).from(jsonTestTable); + + expect(result).toStrictEqual([{ + jsonStringField: testString, + jsonNumberField: String(testNumber), + jsonbStringField: testString, + jsonbNumberField: String(testNumber), + }]); +}); + +test.todo('set json/jsonb fields with strings and retrieve with the ->> operator', async () => { + const obj = { string: 'test', number: 123 }; + const { string: testString, number: testNumber } = obj; + + await db.insert(jsonTestTable).values({ + json: sql`${JSON.stringify(obj)}`, + jsonb: sql`${JSON.stringify(obj)}`, + }); + + const result = await db.select({ + jsonStringField: sql`${jsonTestTable.json}->>'string'`, + jsonNumberField: sql`${jsonTestTable.json}->>'number'`, + jsonbStringField: sql`${jsonTestTable.jsonb}->>'string'`, + jsonbNumberField: sql`${jsonTestTable.jsonb}->>'number'`, + }).from(jsonTestTable); + + expect(result).toStrictEqual([{ + jsonStringField: testString, + jsonNumberField: String(testNumber), + jsonbStringField: testString, + jsonbNumberField: String(testNumber), + }]); +}); + +test.todo('set json/jsonb fields with objects and retrieve with the -> operator', async () => { + const obj = { string: 'test', number: 123 }; + const { string: testString, number: testNumber } = obj; + + await db.insert(jsonTestTable).values({ + json: obj, + jsonb: obj, + }); + + const result = await db.select({ + jsonStringField: sql`${jsonTestTable.json}->'string'`, + jsonNumberField: sql`${jsonTestTable.json}->'number'`, + jsonbStringField: sql`${jsonTestTable.jsonb}->'string'`, + jsonbNumberField: sql`${jsonTestTable.jsonb}->'number'`, + }).from(jsonTestTable); + + expect(result).toStrictEqual([{ + jsonStringField: testString, + jsonNumberField: testNumber, + jsonbStringField: testString, + jsonbNumberField: testNumber, + }]); +}); + +test.todo('set json/jsonb fields with strings and retrieve with the -> operator', async () => { + const obj = { string: 'test', number: 123 }; + const { string: testString, number: testNumber } = obj; + + await db.insert(jsonTestTable).values({ + json: sql`${JSON.stringify(obj)}`, + jsonb: sql`${JSON.stringify(obj)}`, + }); + + const result = await db.select({ + jsonStringField: sql`${jsonTestTable.json}->'string'`, + jsonNumberField: sql`${jsonTestTable.json}->'number'`, + jsonbStringField: sql`${jsonTestTable.jsonb}->'string'`, + jsonbNumberField: sql`${jsonTestTable.jsonb}->'number'`, + }).from(jsonTestTable); + + expect(result).toStrictEqual([{ + jsonStringField: testString, + jsonNumberField: testNumber, + jsonbStringField: testString, + jsonbNumberField: testNumber, + }]); +}); + +test('update ... from', async () => { + await db.insert(cities2Table).values([ + { name: 'New York City' }, + { name: 'Seattle' }, + ]); + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + ]); + + const result = await db + .update(users2Table) + .set({ + cityId: cities2Table.id, + }) + .from(cities2Table) + .where(and(eq(cities2Table.name, 'Seattle'), eq(users2Table.name, 'John'))) + .returning(); + + expect(result).toStrictEqual([{ + id: 1, + name: 'John', + cityId: 2, + cities: { + id: 2, + name: 'Seattle', + }, + }]); +}); + +test('update ... from with alias', async () => { + await db.insert(cities2Table).values([ + { name: 'New York City' }, + { name: 'Seattle' }, + ]); + await db.insert(users2Table).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + ]); + + const users = alias(users2Table, 'u'); + const cities = alias(cities2Table, 'c'); + const result = await db + .update(users) + .set({ + cityId: cities.id, + }) + .from(cities) + .where(and(eq(cities.name, 'Seattle'), eq(users.name, 'John'))) + .returning(); + + expect(result).toStrictEqual([{ + id: 1, + name: 'John', + cityId: 2, + c: { + id: 2, + name: 'Seattle', + }, + }]); +}); + +test('update ... from with join', async () => { + const states = pgTable('states', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const cities = pgTable('cities', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + stateId: integer('state_id').references(() => states.id), + }); + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + cityId: integer('city_id').notNull().references(() => cities.id), + }); + + await db.execute(sql`drop table if exists "states" cascade`); + await db.execute(sql`drop table if exists "cities" cascade`); + await db.execute(sql`drop table if exists "users" cascade`); + await db.execute(sql` + create table "states" ( + "id" serial primary key, + "name" text not null + ) + `); + await db.execute(sql` + create table "cities" ( + "id" serial primary key, + "name" text not null, + "state_id" integer references "states"("id") + ) + `); + await db.execute(sql` + create table "users" ( + "id" serial primary key, + "name" text not null, + "city_id" integer not null references "cities"("id") + ) + `); + + await db.insert(states).values([ + { name: 'New York' }, + { name: 'Washington' }, + ]); + await db.insert(cities).values([ + { name: 'New York City', stateId: 1 }, + { name: 'Seattle', stateId: 2 }, + { name: 'London' }, + ]); + await db.insert(users).values([ + { name: 'John', cityId: 1 }, + { name: 'Jane', cityId: 2 }, + { name: 'Jack', cityId: 3 }, + ]); + + const result1 = await db + .update(users) + .set({ + cityId: cities.id, + }) + .from(cities) + .leftJoin(states, eq(cities.stateId, states.id)) + .where(and(eq(cities.name, 'Seattle'), eq(users.name, 'John'))) + .returning(); + const result2 = await db + .update(users) + .set({ + cityId: cities.id, + }) + .from(cities) + .leftJoin(states, eq(cities.stateId, states.id)) + .where(and(eq(cities.name, 'London'), eq(users.name, 'Jack'))) + .returning(); + + expect(result1).toStrictEqual([{ + id: 1, + name: 'John', + cityId: 2, + cities: { + id: 2, + name: 'Seattle', + stateId: 2, + }, + states: { + id: 2, + name: 'Washington', + }, + }]); + expect(result2).toStrictEqual([{ + id: 3, + name: 'Jack', + cityId: 3, + cities: { + id: 3, + name: 'London', + stateId: null, + }, + states: null, + }]); +}); + +test('insert into ... select', async () => { + const notifications = pgTable('notifications', { + id: serial('id').primaryKey(), + sentAt: timestamp('sent_at').notNull().defaultNow(), + message: text('message').notNull(), + }); + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const userNotications = pgTable('user_notifications', { + userId: integer('user_id').notNull().references(() => users.id, { onDelete: 'cascade' }), + notificationId: integer('notification_id').notNull().references(() => notifications.id, { + onDelete: 'cascade', + }), + }, (t) => ({ + pk: primaryKey({ columns: [t.userId, t.notificationId] }), + })); + + await db.execute(sql`drop table if exists notifications`); + await db.execute(sql`drop table if exists users`); + await db.execute(sql`drop table if exists user_notifications`); + await db.execute(sql` + create table notifications ( + id serial primary key, + sent_at timestamp not null default now(), + message text not null + ) + `); + await db.execute(sql` + create table users ( + id serial primary key, + name text not null + ) + `); + await db.execute(sql` + create table user_notifications ( + user_id int references users(id) on delete cascade, + notification_id int references notifications(id) on delete cascade, + primary key (user_id, notification_id) + ) + `); + + const newNotification = await db + .insert(notifications) + .values({ message: 'You are one of the 3 lucky winners!' }) + .returning({ id: notifications.id }) + .then((result) => result[0]); + await db.insert(users).values([ + { name: 'Alice' }, + { name: 'Bob' }, + { name: 'Charlie' }, + { name: 'David' }, + { name: 'Eve' }, + ]); + + const sentNotifications = await db + .insert(userNotications) + .select( + db + .select({ + userId: users.id, + notificationId: sql`${newNotification!.id}`.as('notification_id'), + }) + .from(users) + .where(inArray(users.name, ['Alice', 'Charlie', 'Eve'])) + .orderBy(asc(users.id)), + ) + .returning(); + + expect(sentNotifications).toStrictEqual([ + { userId: 1, notificationId: newNotification!.id }, + { userId: 3, notificationId: newNotification!.id }, + { userId: 5, notificationId: newNotification!.id }, + ]); +}); + +test('insert into ... select with keys in different order', async () => { + const users1 = pgTable('users1', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + const users2 = pgTable('users2', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists users1`); + await db.execute(sql`drop table if exists users2`); + await db.execute(sql` + create table users1 ( + id serial primary key, + name text not null + ) + `); + await db.execute(sql` + create table users2 ( + id serial primary key, + name text not null + ) + `); + + expect( + () => + db + .insert(users1) + .select( + db + .select({ + name: users2.name, + id: users2.id, + }) + .from(users2), + ), + ).toThrowError(); +}); + +test('policy', () => { + { + const policy = pgPolicy('test policy'); + + expect(is(policy, PgPolicy)).toBe(true); + expect(policy.name).toBe('test policy'); + } + + { + const policy = pgPolicy('test policy', { + as: 'permissive', + for: 'all', + to: 'public', + using: sql`1=1`, + withCheck: sql`1=1`, + }); + + expect(is(policy, PgPolicy)).toBe(true); + expect(policy.name).toBe('test policy'); + expect(policy.as).toBe('permissive'); + expect(policy.for).toBe('all'); + expect(policy.to).toBe('public'); + const dialect = new PgDialect(); + expect(is(policy.using, SQL)).toBe(true); + expect(dialect.sqlToQuery(policy.using!).sql).toBe('1=1'); + expect(is(policy.withCheck, SQL)).toBe(true); + expect(dialect.sqlToQuery(policy.withCheck!).sql).toBe('1=1'); + } + + { + const policy = pgPolicy('test policy', { + to: 'custom value', + }); + + expect(policy.to).toBe('custom value'); + } + + { + const p1 = pgPolicy('test policy'); + const p2 = pgPolicy('test policy 2', { + as: 'permissive', + for: 'all', + to: 'public', + using: sql`1=1`, + withCheck: sql`1=1`, + }); + const table = pgTable('table_with_policy', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }, () => ({ + p1, + p2, + })); + const config = getTableConfig(table); + expect(config.policies).toHaveLength(2); + expect(config.policies[0]).toBe(p1); + expect(config.policies[1]).toBe(p2); + } +}); + +test('neon: policy', () => { + { + const policy = crudPolicy({ + read: true, + modify: true, + role: authenticatedRole, + }); + + for (const it of Object.values(policy)) { + expect(is(it, PgPolicy)).toBe(true); + expect(it?.to).toStrictEqual(authenticatedRole); + it?.using ? expect(it.using).toStrictEqual(sql`true`) : ''; + it?.withCheck ? expect(it.withCheck).toStrictEqual(sql`true`) : ''; + } + } + + { + const table = pgTable('name', { + id: integer('id'), + }, (t) => [ + index('name').on(t.id), + crudPolicy({ + read: true, + modify: true, + role: authenticatedRole, + }), + primaryKey({ columns: [t.id], name: 'custom' }), + ]); + + const { policies, indexes, primaryKeys } = getTableConfig(table); + + expect(policies.length).toBe(4); + expect(indexes.length).toBe(1); + expect(primaryKeys.length).toBe(1); + + expect(policies[0]?.name === 'crud-custom-policy-modify'); + expect(policies[1]?.name === 'crud-custom-policy-read'); + } +}); + +test('neon: neon_identity', () => { + const usersSyncTable = usersSync; + + const { columns, schema, name } = getTableConfig(usersSyncTable); + + expect(name).toBe('users_sync'); + expect(schema).toBe('neon_identity'); + expect(columns).toHaveLength(6); +}); + +test('Enable RLS function', () => { + const usersWithRLS = pgTable('users', { + id: integer(), + }).enableRLS(); + + const config1 = getTableConfig(usersWithRLS); + + const usersNoRLS = pgTable('users', { + id: integer(), + }); + + const config2 = getTableConfig(usersNoRLS); + + expect(config1.enableRLS).toBeTruthy(); + expect(config2.enableRLS).toBeFalsy(); +}); + +test('$count separate', async () => { + const countTestTable = pgTable('count_test', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${countTestTable}`); + await db.execute(sql`create table ${countTestTable} (id int, name text)`); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = await db.$count(countTestTable); + + await db.execute(sql`drop table ${countTestTable}`); + + expect(count).toStrictEqual(4); +}); + +test('$count embedded', async () => { + const countTestTable = pgTable('count_test', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${countTestTable}`); + await db.execute(sql`create table ${countTestTable} (id int, name text)`); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = await db.select({ + count: db.$count(countTestTable), + }).from(countTestTable); + + await db.execute(sql`drop table ${countTestTable}`); + + expect(count).toStrictEqual([ + { count: 4 }, + { count: 4 }, + { count: 4 }, + { count: 4 }, + ]); +}); + +test('$count separate reuse', async () => { + const countTestTable = pgTable('count_test', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${countTestTable}`); + await db.execute(sql`create table ${countTestTable} (id int, name text)`); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = db.$count(countTestTable); + + const count1 = await count; + + await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); + + const count2 = await count; + + await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); + + const count3 = await count; + + await db.execute(sql`drop table ${countTestTable}`); + + expect(count1).toStrictEqual(4); + expect(count2).toStrictEqual(5); + expect(count3).toStrictEqual(6); +}); + +test('$count embedded reuse', async () => { + const countTestTable = pgTable('count_test', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${countTestTable}`); + await db.execute(sql`create table ${countTestTable} (id int, name text)`); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = db.select({ + count: db.$count(countTestTable), + }).from(countTestTable); + + const count1 = await count; + + await db.insert(countTestTable).values({ id: 5, name: 'fifth' }); + + const count2 = await count; + + await db.insert(countTestTable).values({ id: 6, name: 'sixth' }); + + const count3 = await count; + + await db.execute(sql`drop table ${countTestTable}`); + + expect(count1).toStrictEqual([ + { count: 4 }, + { count: 4 }, + { count: 4 }, + { count: 4 }, + ]); + expect(count2).toStrictEqual([ + { count: 5 }, + { count: 5 }, + { count: 5 }, + { count: 5 }, + { count: 5 }, + ]); + expect(count3).toStrictEqual([ + { count: 6 }, + { count: 6 }, + { count: 6 }, + { count: 6 }, + { count: 6 }, + { count: 6 }, + ]); +}); + +test('$count separate with filters', async () => { + const countTestTable = pgTable('count_test', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${countTestTable}`); + await db.execute(sql`create table ${countTestTable} (id int, name text)`); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = await db.$count(countTestTable, gt(countTestTable.id, 1)); + + await db.execute(sql`drop table ${countTestTable}`); + + expect(count).toStrictEqual(3); +}); + +test('$count embedded with filters', async () => { + const countTestTable = pgTable('count_test', { + id: integer('id').notNull(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${countTestTable}`); + await db.execute(sql`create table ${countTestTable} (id int, name text)`); + + await db.insert(countTestTable).values([ + { id: 1, name: 'First' }, + { id: 2, name: 'Second' }, + { id: 3, name: 'Third' }, + { id: 4, name: 'Fourth' }, + ]); + + const count = await db.select({ + count: db.$count(countTestTable, gt(countTestTable.id, 1)), + }).from(countTestTable); + + await db.execute(sql`drop table ${countTestTable}`); + + expect(count).toStrictEqual([ + { count: 3 }, + { count: 3 }, + { count: 3 }, + { count: 3 }, + ]); +}); + +test('insert multiple rows into table with generated identity column', async () => { + const identityColumnsTable = pgTable('identity_columns_table', { + id: integer('id').generatedAlwaysAsIdentity(), + id1: integer('id1').generatedByDefaultAsIdentity(), + name: text('name').notNull(), + }); + + // not passing identity columns + await db.execute(sql`drop table if exists ${identityColumnsTable}`); + await db.execute( + sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, + ); + + let result = await db.insert(identityColumnsTable).values([ + { name: 'John' }, + { name: 'Jane' }, + { name: 'Bob' }, + ]).returning(); + + expect(result).toEqual([ + { id: 1, id1: 1, name: 'John' }, + { id: 2, id1: 2, name: 'Jane' }, + { id: 3, id1: 3, name: 'Bob' }, + ]); + + // passing generated by default as identity column + await db.execute(sql`drop table if exists ${identityColumnsTable}`); + await db.execute( + sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, + ); + + result = await db.insert(identityColumnsTable).values([ + { name: 'John', id1: 3 }, + { name: 'Jane', id1: 5 }, + { name: 'Bob', id1: 5 }, + ]).returning(); + + expect(result).toEqual([ + { id: 1, id1: 3, name: 'John' }, + { id: 2, id1: 5, name: 'Jane' }, + { id: 3, id1: 5, name: 'Bob' }, + ]); + + // passing all identity columns + await db.execute(sql`drop table if exists ${identityColumnsTable}`); + await db.execute( + sql`create table ${identityColumnsTable} ("id" integer generated always as identity, "id1" integer generated by default as identity, "name" text)`, + ); + + result = await db.insert(identityColumnsTable).overridingSystemValue().values([ + { name: 'John', id: 2, id1: 3 }, + { name: 'Jane', id: 4, id1: 5 }, + { name: 'Bob', id: 4, id1: 5 }, + ]).returning(); + + expect(result).toEqual([ + { id: 2, id1: 3, name: 'John' }, + { id: 4, id1: 5, name: 'Jane' }, + { id: 4, id1: 5, name: 'Bob' }, + ]); +}); + +test('insert as cte', async () => { + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); + + const sq1 = db.$with('sq').as( + db.insert(users).values({ name: 'John' }).returning(), + ); + const result1 = await db.with(sq1).select().from(sq1); + const result2 = await db.with(sq1).select({ id: sq1.id }).from(sq1); + + const sq2 = db.$with('sq').as( + db.insert(users).values({ name: 'Jane' }).returning({ id: users.id, name: users.name }), + ); + const result3 = await db.with(sq2).select().from(sq2); + const result4 = await db.with(sq2).select({ name: sq2.name }).from(sq2); + + expect(result1).toEqual([{ id: 1, name: 'John' }]); + expect(result2).toEqual([{ id: 2 }]); + expect(result3).toEqual([{ id: 3, name: 'Jane' }]); + expect(result4).toEqual([{ name: 'Jane' }]); +}); + +test('update as cte', async () => { + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + age: integer('age').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, age integer not null)`, + ); + + await db.insert(users).values([ + { name: 'John', age: 30 }, + { name: 'Jane', age: 30 }, + ]); + + const sq1 = db.$with('sq').as( + db.update(users).set({ age: 25 }).where(eq(users.name, 'John')).returning(), + ); + const result1 = await db.with(sq1).select().from(sq1); + await db.update(users).set({ age: 30 }); + const result2 = await db.with(sq1).select({ age: sq1.age }).from(sq1); + + const sq2 = db.$with('sq').as( + db.update(users).set({ age: 20 }).where(eq(users.name, 'Jane')).returning({ name: users.name, age: users.age }), + ); + const result3 = await db.with(sq2).select().from(sq2); + await db.update(users).set({ age: 30 }); + const result4 = await db.with(sq2).select({ age: sq2.age }).from(sq2); + + expect(result1).toEqual([{ id: 1, name: 'John', age: 25 }]); + expect(result2).toEqual([{ age: 25 }]); + expect(result3).toEqual([{ name: 'Jane', age: 20 }]); + expect(result4).toEqual([{ age: 20 }]); +}); + +test('delete as cte', async () => { + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); + + await db.insert(users).values([ + { name: 'John' }, + { name: 'Jane' }, + ]); + + const sq1 = db.$with('sq').as( + db.delete(users).where(eq(users.name, 'John')).returning(), + ); + const result1 = await db.with(sq1).select().from(sq1); + await db.insert(users).values({ name: 'John' }); + const result2 = await db.with(sq1).select({ name: sq1.name }).from(sq1); + + const sq2 = db.$with('sq').as( + db.delete(users).where(eq(users.name, 'Jane')).returning({ id: users.id, name: users.name }), + ); + const result3 = await db.with(sq2).select().from(sq2); + await db.insert(users).values({ name: 'Jane' }); + const result4 = await db.with(sq2).select({ name: sq2.name }).from(sq2); + + expect(result1).toEqual([{ id: 1, name: 'John' }]); + expect(result2).toEqual([{ name: 'John' }]); + expect(result3).toEqual([{ id: 2, name: 'Jane' }]); + expect(result4).toEqual([{ name: 'Jane' }]); +}); + +test('sql operator as cte', async () => { + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); + await db.insert(users).values([ + { name: 'John' }, + { name: 'Jane' }, + ]); + + const sq1 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(sql`select * from ${users} where ${users.name} = 'John'`); + const result1 = await db.with(sq1).select().from(sq1); + + const sq2 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); + const result2 = await db.with(sq2).select().from(sq1); + + expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); + expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); +}); diff --git a/integration-tests/tests/imports/index.test.ts b/integration-tests/tests/imports/index.test.ts index 7a44942fa..f4c0200cf 100644 --- a/integration-tests/tests/imports/index.test.ts +++ b/integration-tests/tests/imports/index.test.ts @@ -21,6 +21,7 @@ it('dynamic imports check for CommonJS', async () => { if ( o1.startsWith('drizzle-orm/bun-sqlite') || o1.startsWith('drizzle-orm/pglite') || o1.startsWith('drizzle-orm/expo-sqlite') || o1.startsWith('drizzle-orm/libsql/wasm') + || o1.startsWith('drizzle-orm/bun-sql') ) { continue; } @@ -46,7 +47,10 @@ it('dynamic imports check for ESM', async () => { const promises: ProcessPromise[] = []; for (const [i, key] of Object.keys(pj['exports']).entries()) { const o1 = path.join('drizzle-orm', key); - if (o1.startsWith('drizzle-orm/bun-sqlite') || o1.startsWith('drizzle-orm/expo-sqlite')) { + if ( + o1.startsWith('drizzle-orm/bun-sqlite') || o1.startsWith('drizzle-orm/expo-sqlite') + || o1.startsWith('drizzle-orm/bun-sql') + ) { continue; } fs.writeFileSync(`${IMPORTS_FOLDER}/imports_${i}.mjs`, 'imp'); diff --git a/integration-tests/tests/mysql/mysql-common.ts b/integration-tests/tests/mysql/mysql-common.ts index 98e425f9c..0ec7b8b8d 100644 --- a/integration-tests/tests/mysql/mysql-common.ts +++ b/integration-tests/tests/mysql/mysql-common.ts @@ -4780,4 +4780,39 @@ export function tests(driver?: string) { expect(query.sql).not.include('USE INDEX'); }); + + test('sql operator as cte', async (ctx) => { + const { db } = ctx.mysql; + + const users = mysqlTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); + await db.insert(users).values([ + { name: 'John' }, + { name: 'Jane' }, + ]); + + const sq1 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(sql`select * from ${users} where ${users.name} = 'John'`); + const result1 = await db.with(sq1).select().from(sq1); + + const sq2 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); + const result2 = await db.with(sq2).select().from(sq1); + + expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); + expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); + }); } diff --git a/integration-tests/tests/pg/pg-common.ts b/integration-tests/tests/pg/pg-common.ts index 5e5f4ec72..d4f9d98c2 100644 --- a/integration-tests/tests/pg/pg-common.ts +++ b/integration-tests/tests/pg/pg-common.ts @@ -31,7 +31,7 @@ import { sumDistinct, TransactionRollbackError, } from 'drizzle-orm'; -import { authenticatedRole, crudPolicy } from 'drizzle-orm/neon'; +import { authenticatedRole, crudPolicy, usersSync } from 'drizzle-orm/neon'; import type { NeonHttpDatabase } from 'drizzle-orm/neon-http'; import type { PgColumn, PgDatabase, PgQueryResultHKT } from 'drizzle-orm/pg-core'; import { @@ -4227,6 +4227,15 @@ export function tests() { test('mySchema :: insert many', async (ctx) => { const { db } = ctx.pg; + console.log('before'); + console.log( + db.insert(usersMySchemaTable).values([ + { name: 'John' }, + { name: 'Bruce', jsonb: ['foo', 'bar'] }, + { name: 'Jane' }, + { name: 'Austin', verified: true }, + ]).toSQL(), + ); await db.insert(usersMySchemaTable).values([ { name: 'John' }, { name: 'Bruce', jsonb: ['foo', 'bar'] }, @@ -5130,6 +5139,16 @@ export function tests() { } }); + test('neon: neon_identity', () => { + const usersSyncTable = usersSync; + + const { columns, schema, name } = getTableConfig(usersSyncTable); + + expect(name).toBe('users_sync'); + expect(schema).toBe('neon_identity'); + expect(columns).toHaveLength(6); + }); + test('Enable RLS function', () => { const usersWithRLS = pgTable('users', { id: integer(), @@ -5418,5 +5437,144 @@ export function tests() { { id: 4, id1: 5, name: 'Bob' }, ]); }); + + test('insert as cte', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); + + const sq1 = db.$with('sq').as( + db.insert(users).values({ name: 'John' }).returning(), + ); + const result1 = await db.with(sq1).select().from(sq1); + const result2 = await db.with(sq1).select({ id: sq1.id }).from(sq1); + + const sq2 = db.$with('sq').as( + db.insert(users).values({ name: 'Jane' }).returning({ id: users.id, name: users.name }), + ); + const result3 = await db.with(sq2).select().from(sq2); + const result4 = await db.with(sq2).select({ name: sq2.name }).from(sq2); + + expect(result1).toEqual([{ id: 1, name: 'John' }]); + expect(result2).toEqual([{ id: 2 }]); + expect(result3).toEqual([{ id: 3, name: 'Jane' }]); + expect(result4).toEqual([{ name: 'Jane' }]); + }); + + test('update as cte', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + age: integer('age').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute( + sql`create table ${users} (id serial not null primary key, name text not null, age integer not null)`, + ); + + await db.insert(users).values([ + { name: 'John', age: 30 }, + { name: 'Jane', age: 30 }, + ]); + + const sq1 = db.$with('sq').as( + db.update(users).set({ age: 25 }).where(eq(users.name, 'John')).returning(), + ); + const result1 = await db.with(sq1).select().from(sq1); + await db.update(users).set({ age: 30 }); + const result2 = await db.with(sq1).select({ age: sq1.age }).from(sq1); + + const sq2 = db.$with('sq').as( + db.update(users).set({ age: 20 }).where(eq(users.name, 'Jane')).returning({ name: users.name, age: users.age }), + ); + const result3 = await db.with(sq2).select().from(sq2); + await db.update(users).set({ age: 30 }); + const result4 = await db.with(sq2).select({ age: sq2.age }).from(sq2); + + expect(result1).toEqual([{ id: 1, name: 'John', age: 25 }]); + expect(result2).toEqual([{ age: 25 }]); + expect(result3).toEqual([{ name: 'Jane', age: 20 }]); + expect(result4).toEqual([{ age: 20 }]); + }); + + test('delete as cte', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); + + await db.insert(users).values([ + { name: 'John' }, + { name: 'Jane' }, + ]); + + const sq1 = db.$with('sq').as( + db.delete(users).where(eq(users.name, 'John')).returning(), + ); + const result1 = await db.with(sq1).select().from(sq1); + await db.insert(users).values({ name: 'John' }); + const result2 = await db.with(sq1).select({ name: sq1.name }).from(sq1); + + const sq2 = db.$with('sq').as( + db.delete(users).where(eq(users.name, 'Jane')).returning({ id: users.id, name: users.name }), + ); + const result3 = await db.with(sq2).select().from(sq2); + await db.insert(users).values({ name: 'Jane' }); + const result4 = await db.with(sq2).select({ name: sq2.name }).from(sq2); + + expect(result1).toEqual([{ id: 1, name: 'John' }]); + expect(result2).toEqual([{ name: 'John' }]); + expect(result3).toEqual([{ id: 2, name: 'Jane' }]); + expect(result4).toEqual([{ name: 'Jane' }]); + }); + + test('sql operator as cte', async (ctx) => { + const { db } = ctx.pg; + + const users = pgTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); + await db.insert(users).values([ + { name: 'John' }, + { name: 'Jane' }, + ]); + + const sq1 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(sql`select * from ${users} where ${users.name} = 'John'`); + const result1 = await db.with(sq1).select().from(sq1); + + const sq2 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); + const result2 = await db.with(sq2).select().from(sq1); + + expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); + expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); + }); }); } diff --git a/integration-tests/tests/replicas/mysql.test.ts b/integration-tests/tests/replicas/mysql.test.ts index 673a8da65..f0202a781 100644 --- a/integration-tests/tests/replicas/mysql.test.ts +++ b/integration-tests/tests/replicas/mysql.test.ts @@ -803,3 +803,111 @@ describe('[findMany] read replicas mysql', () => { expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); }); }); + +describe('[$count] read replicas postgres', () => { + it('primary $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + const spyRead2 = vi.spyOn(read2, '$count'); + + db.$primary.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); + + it('random replica $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + const spyRead2 = vi.spyOn(read2, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + db.$count(users); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(1); + }); + + it('single read replica $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.$count(users); + expect(spyRead1).toHaveBeenCalledTimes(2); + }); + + it('single read replica $count + primary $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.$primary.$count(users); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + }); + + it('always first read $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + const spyRead2 = vi.spyOn(read2, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + db.$count(users); + + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); diff --git a/integration-tests/tests/replicas/postgres.test.ts b/integration-tests/tests/replicas/postgres.test.ts index 0860aac6a..ab8dda4f5 100644 --- a/integration-tests/tests/replicas/postgres.test.ts +++ b/integration-tests/tests/replicas/postgres.test.ts @@ -825,3 +825,111 @@ describe('[findMany] read replicas postgres', () => { ); }); }); + +describe('[$count] read replicas postgres', () => { + it('primary $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + const spyRead2 = vi.spyOn(read2, '$count'); + + db.$primary.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); + + it('random replica $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + const spyRead2 = vi.spyOn(read2, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + db.select().from(users); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); + + it('single read replica $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.$count(users); + expect(spyRead1).toHaveBeenCalledTimes(2); + }); + + it('single read replica $count + primary $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.$primary.$count(users); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + }); + + it('always first read $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + const spyRead2 = vi.spyOn(read2, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + db.$count(users); + + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); diff --git a/integration-tests/tests/replicas/singlestore.test.ts b/integration-tests/tests/replicas/singlestore.test.ts index 8ddad5b04..f0f0ed14f 100644 --- a/integration-tests/tests/replicas/singlestore.test.ts +++ b/integration-tests/tests/replicas/singlestore.test.ts @@ -812,3 +812,111 @@ describe('[transaction] replicas singlestore', () => { // // expect(query2.toSQL().sql).toEqual('select `id`, `name`, `verified` from `users` `usersTable`'); // // }); // }); + +describe('[$count] read replicas postgres', () => { + it('primary $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + const spyRead2 = vi.spyOn(read2, '$count'); + + db.$primary.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); + + it('random replica $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + const spyRead2 = vi.spyOn(read2, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + db.select().from(users); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); + + it('single read replica $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.$count(users); + expect(spyRead1).toHaveBeenCalledTimes(2); + }); + + it('single read replica $count + primary $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.$primary.$count(users); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + }); + + it('always first read $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + const spyRead2 = vi.spyOn(read2, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + db.$count(users); + + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); diff --git a/integration-tests/tests/replicas/sqlite.test.ts b/integration-tests/tests/replicas/sqlite.test.ts index aab55bbfd..af7ef951c 100644 --- a/integration-tests/tests/replicas/sqlite.test.ts +++ b/integration-tests/tests/replicas/sqlite.test.ts @@ -799,3 +799,111 @@ describe('[findMany] read replicas sqlite', () => { expect(query2.toSQL().sql).toEqual('select "id", "name", "verified" from "users" "usersTable"'); }); }); + +describe('[$count] read replicas postgres', () => { + it('primary $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2]); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + const spyRead2 = vi.spyOn(read2, '$count'); + + db.$primary.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(0); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); + + it('random replica $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const randomMockReplica = vi.fn().mockReturnValueOnce(read1).mockReturnValueOnce(read2); + + const db = withReplicas(primaryDb, [read1, read2], () => { + return randomMockReplica(); + }); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + const spyRead2 = vi.spyOn(read2, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + db.select().from(users); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); + + it('single read replica $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.$count(users); + expect(spyRead1).toHaveBeenCalledTimes(2); + }); + + it('single read replica $count + primary $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1]); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + + db.$primary.$count(users); + expect(spyPrimary).toHaveBeenCalledTimes(1); + expect(spyRead1).toHaveBeenCalledTimes(1); + }); + + it('always first read $count', () => { + const primaryDb = drizzle.mock(); + const read1 = drizzle.mock(); + const read2 = drizzle.mock(); + + const db = withReplicas(primaryDb, [read1, read2], (replicas) => { + return replicas[0]!; + }); + + const spyPrimary = vi.spyOn(primaryDb, '$count'); + const spyRead1 = vi.spyOn(read1, '$count'); + const spyRead2 = vi.spyOn(read2, '$count'); + + db.$count(users); + + expect(spyPrimary).toHaveBeenCalledTimes(0); + expect(spyRead1).toHaveBeenCalledTimes(1); + expect(spyRead2).toHaveBeenCalledTimes(0); + + db.$count(users); + + expect(spyRead1).toHaveBeenCalledTimes(2); + expect(spyRead2).toHaveBeenCalledTimes(0); + }); +}); diff --git a/integration-tests/tests/singlestore/singlestore-common.ts b/integration-tests/tests/singlestore/singlestore-common.ts index b8fe39608..395260e84 100644 --- a/integration-tests/tests/singlestore/singlestore-common.ts +++ b/integration-tests/tests/singlestore/singlestore-common.ts @@ -3520,5 +3520,40 @@ export function tests(driver?: string) { expect(users.length).toBeGreaterThan(0); }); + + test('sql operator as cte', async (ctx) => { + const { db } = ctx.singlestore; + + const users = singlestoreTable('users', { + id: serial('id').primaryKey(), + name: text('name').notNull(), + }); + + await db.execute(sql`drop table if exists ${users}`); + await db.execute(sql`create table ${users} (id serial not null primary key, name text not null)`); + await db.insert(users).values([ + { name: 'John' }, + { name: 'Jane' }, + ]); + + const sq1 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(sql`select * from ${users} where ${users.name} = 'John'`); + const result1 = await db.with(sq1).select().from(sq1); + + const sq2 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); + const result2 = await db.with(sq2).select().from(sq1); + + expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); + expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); + }); }); } diff --git a/integration-tests/tests/sqlite/sqlite-common.ts b/integration-tests/tests/sqlite/sqlite-common.ts index c6d67cee3..2419b1cc2 100644 --- a/integration-tests/tests/sqlite/sqlite-common.ts +++ b/integration-tests/tests/sqlite/sqlite-common.ts @@ -3433,4 +3433,39 @@ export function tests() { await db.run(sql`drop table users`); }); + + test('sql operator as cte', async (ctx) => { + const { db } = ctx.sqlite; + + const users = sqliteTable('users', { + id: integer('id').primaryKey({ autoIncrement: true }), + name: text('name').notNull(), + }); + + await db.run(sql`drop table if exists ${users}`); + await db.run(sql`create table ${users} (id integer not null primary key autoincrement, name text not null)`); + await db.insert(users).values([ + { name: 'John' }, + { name: 'Jane' }, + ]); + + const sq1 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(sql`select * from ${users} where ${users.name} = 'John'`); + const result1 = await db.with(sq1).select().from(sq1); + + const sq2 = db.$with('sq', { + userId: users.id, + data: { + name: users.name, + }, + }).as(() => sql`select * from ${users} where ${users.name} = 'Jane'`); + const result2 = await db.with(sq2).select().from(sq1); + + expect(result1).toEqual([{ userId: 1, data: { name: 'John' } }]); + expect(result2).toEqual([{ userId: 2, data: { name: 'Jane' } }]); + }); } diff --git a/integration-tests/vitest.config.ts b/integration-tests/vitest.config.ts index e8a8be220..878c78510 100644 --- a/integration-tests/vitest.config.ts +++ b/integration-tests/vitest.config.ts @@ -61,9 +61,6 @@ export default defineConfig({ 'tests/mysql/tidb-serverless.test.ts', // waiting for json_array from singlestore team 'tests/relational/singlestore.test.ts', - // get back when planetscale will open free tier for our CI/CD - 'tests/mysql/mysql-planetscale.test.ts', - 'tests/relational/mysql.planetscale.test.ts', 'js-tests/driver-init/module/planetscale.test.mjs', 'js-tests/driver-init/module/planetscale.test.cjs', 'js-tests/driver-init/commonjs/planetscale.test.cjs', diff --git a/package.json b/package.json index 6b4715b0e..2818e66f9 100755 --- a/package.json +++ b/package.json @@ -18,7 +18,7 @@ "@typescript-eslint/eslint-plugin": "^6.7.3", "@typescript-eslint/experimental-utils": "^5.62.0", "@typescript-eslint/parser": "^6.7.3", - "bun-types": "^1.0.3", + "bun-types": "^1.2.0", "concurrently": "^8.2.1", "dprint": "^0.46.2", "drizzle-kit": "^0.19.13", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index a267d4379..430c60b26 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -24,8 +24,8 @@ importers: specifier: ^6.7.3 version: 6.7.3(eslint@8.50.0)(typescript@5.6.3) bun-types: - specifier: ^1.0.3 - version: 1.0.3 + specifier: ^1.2.0 + version: 1.2.0 concurrently: specifier: ^8.2.1 version: 8.2.1 @@ -40,7 +40,7 @@ importers: version: link:drizzle-orm/dist drizzle-orm-old: specifier: npm:drizzle-orm@^0.27.2 - version: drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20241112.0)(@libsql/client@0.10.0)(@neondatabase/serverless@0.10.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.12)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@11.5.0)(bun-types@1.0.3)(knex@2.5.1(better-sqlite3@11.5.0)(mysql2@3.11.0)(pg@8.13.1)(sqlite3@5.1.7))(kysely@0.25.0)(mysql2@3.11.0)(pg@8.13.1)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7) + version: drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20241112.0)(@libsql/client@0.10.0)(@neondatabase/serverless@0.10.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.12)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@11.5.0)(bun-types@1.2.0)(knex@2.5.1(better-sqlite3@11.5.0)(mysql2@3.11.0)(pg@8.13.1)(sqlite3@5.1.7))(kysely@0.25.0)(mysql2@3.11.0)(pg@8.13.1)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7) eslint: specifier: ^8.50.0 version: 8.50.0 @@ -364,8 +364,8 @@ importers: specifier: ^8.4.0 version: 8.7.0 bun-types: - specifier: ^0.6.6 - version: 0.6.14 + specifier: ^1.2.0 + version: 1.2.0 cpy: specifier: ^10.1.0 version: 10.1.0 @@ -4992,8 +4992,8 @@ packages: bun-types@0.6.14: resolution: {integrity: sha512-sRdvu+t59+H/TVOe7FSGFWYITbqkhiCx9NxVUHt2+JOXM9gUOe5uMPvVvcr/hGngnh+/yb5a7uPE4JaS6uxujg==} - bun-types@1.0.3: - resolution: {integrity: sha512-XlyKVdYCHa7K5PHYGcwOVOrGE/bMnLS51y7zFA3ZAAXyiQ6dTaNXNCWTTufgII/6ruN770uhAXphQmzvU/r2fQ==} + bun-types@1.2.0: + resolution: {integrity: sha512-KEaJxyZfbV/c4eyG0vyehDpYmBGreNiQbZIqvVHJwZ4BmeuWlNZ7EAzMN2Zcd7ailmS/tGVW0BgYbGf+lGEpWw==} bundle-require@4.0.2: resolution: {integrity: sha512-jwzPOChofl67PSTW2SGubV9HBQAhhR2i6nskiOThauo9dzwDUgOWQScFVaJkjEfYX+UXiD+LEx8EblQMc2wIag==} @@ -9428,12 +9428,15 @@ packages: sudo-prompt@8.2.5: resolution: {integrity: sha512-rlBo3HU/1zAJUrkY6jNxDOC9eVYliG6nS4JA8u8KAshITd07tafMc/Br7xQwCSseXwJ2iCcHCE8SNWX3q8Z+kw==} + deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info. sudo-prompt@9.1.1: resolution: {integrity: sha512-es33J1g2HjMpyAhz8lOR+ICmXXAqTuKbuXuUWLhOLew20oN9oUCgCJx615U/v7aioZg7IX5lIh9x34vwneu4pA==} + deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info. sudo-prompt@9.2.1: resolution: {integrity: sha512-Mu7R0g4ig9TUuGSxJavny5Rv0egCEtpZRNMrZaYS1vxkiIxGiGUwoezU3LazIQ+KE04hTrTfNPgxU5gzi7F5Pw==} + deprecated: Package no longer supported. Contact Support at https://www.npmjs.com/support for more info. superjson@2.2.1: resolution: {integrity: sha512-8iGv75BYOa0xRJHK5vRLEjE2H/i4lulTjzpUXic3Eg8akftYjkmQDa8JARQ42rlczXyFR3IeRoeFCc7RxHsYZA==} @@ -10674,7 +10677,7 @@ snapshots: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 '@aws-sdk/client-sso-oidc': 3.583.0 - '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/client-sts': 3.583.0 '@aws-sdk/core': 3.582.0 '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0) '@aws-sdk/middleware-host-header': 3.577.0 @@ -10764,7 +10767,7 @@ snapshots: dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/client-sts': 3.583.0 '@aws-sdk/core': 3.582.0 '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0) '@aws-sdk/middleware-host-header': 3.577.0 @@ -10805,6 +10808,52 @@ snapshots: transitivePeerDependencies: - aws-crt + '@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)': + dependencies: + '@aws-crypto/sha256-browser': 3.0.0 + '@aws-crypto/sha256-js': 3.0.0 + '@aws-sdk/client-sts': 3.583.0 + '@aws-sdk/core': 3.582.0 + '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/middleware-host-header': 3.577.0 + '@aws-sdk/middleware-logger': 3.577.0 + '@aws-sdk/middleware-recursion-detection': 3.577.0 + '@aws-sdk/middleware-user-agent': 3.583.0 + '@aws-sdk/region-config-resolver': 3.577.0 + '@aws-sdk/types': 3.577.0 + '@aws-sdk/util-endpoints': 3.583.0 + '@aws-sdk/util-user-agent-browser': 3.577.0 + '@aws-sdk/util-user-agent-node': 3.577.0 + '@smithy/config-resolver': 3.0.0 + '@smithy/core': 2.0.1 + '@smithy/fetch-http-handler': 3.0.1 + '@smithy/hash-node': 3.0.0 + '@smithy/invalid-dependency': 3.0.0 + '@smithy/middleware-content-length': 3.0.0 + '@smithy/middleware-endpoint': 3.0.0 + '@smithy/middleware-retry': 3.0.1 + '@smithy/middleware-serde': 3.0.0 + '@smithy/middleware-stack': 3.0.0 + '@smithy/node-config-provider': 3.0.0 + '@smithy/node-http-handler': 3.0.0 + '@smithy/protocol-http': 4.0.0 + '@smithy/smithy-client': 3.0.1 + '@smithy/types': 3.0.0 + '@smithy/url-parser': 3.0.0 + '@smithy/util-base64': 3.0.0 + '@smithy/util-body-length-browser': 3.0.0 + '@smithy/util-body-length-node': 3.0.0 + '@smithy/util-defaults-mode-browser': 3.0.1 + '@smithy/util-defaults-mode-node': 3.0.1 + '@smithy/util-endpoints': 2.0.0 + '@smithy/util-middleware': 3.0.0 + '@smithy/util-retry': 3.0.0 + '@smithy/util-utf8': 3.0.0 + tslib: 2.8.1 + transitivePeerDependencies: + - '@aws-sdk/client-sts' + - aws-crt + '@aws-sdk/client-sso@3.478.0': dependencies: '@aws-crypto/sha256-browser': 3.0.0 @@ -11069,13 +11118,13 @@ snapshots: - '@aws-sdk/client-sso-oidc' - aws-crt - '@aws-sdk/client-sts@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)': + '@aws-sdk/client-sts@3.583.0': dependencies: '@aws-crypto/sha256-browser': 3.0.0 '@aws-crypto/sha256-js': 3.0.0 - '@aws-sdk/client-sso-oidc': 3.583.0 + '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) '@aws-sdk/core': 3.582.0 - '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/credential-provider-node': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0) '@aws-sdk/middleware-host-header': 3.577.0 '@aws-sdk/middleware-logger': 3.577.0 '@aws-sdk/middleware-recursion-detection': 3.577.0 @@ -11112,7 +11161,6 @@ snapshots: '@smithy/util-utf8': 3.0.0 tslib: 2.8.1 transitivePeerDependencies: - - '@aws-sdk/client-sso-oidc' - aws-crt '@aws-sdk/core@3.477.0': @@ -11265,9 +11313,26 @@ snapshots: - '@aws-sdk/client-sso-oidc' - aws-crt + '@aws-sdk/credential-provider-ini@3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0)': + dependencies: + '@aws-sdk/client-sts': 3.583.0 + '@aws-sdk/credential-provider-env': 3.577.0 + '@aws-sdk/credential-provider-process': 3.577.0 + '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) + '@aws-sdk/credential-provider-web-identity': 3.577.0(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/types': 3.577.0 + '@smithy/credential-provider-imds': 3.0.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.8.1 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + '@aws-sdk/credential-provider-ini@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0)': dependencies: - '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/client-sts': 3.583.0 '@aws-sdk/credential-provider-env': 3.577.0 '@aws-sdk/credential-provider-process': 3.577.0 '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) @@ -11355,6 +11420,25 @@ snapshots: - '@aws-sdk/client-sts' - aws-crt + '@aws-sdk/credential-provider-node@3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0)': + dependencies: + '@aws-sdk/credential-provider-env': 3.577.0 + '@aws-sdk/credential-provider-http': 3.582.0 + '@aws-sdk/credential-provider-ini': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/credential-provider-process': 3.577.0 + '@aws-sdk/credential-provider-sso': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) + '@aws-sdk/credential-provider-web-identity': 3.577.0(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/types': 3.577.0 + '@smithy/credential-provider-imds': 3.0.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.8.1 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - '@aws-sdk/client-sts' + - aws-crt + '@aws-sdk/credential-provider-node@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)(@aws-sdk/client-sts@3.583.0)': dependencies: '@aws-sdk/credential-provider-env': 3.577.0 @@ -11436,6 +11520,19 @@ snapshots: - '@aws-sdk/client-sso-oidc' - aws-crt + '@aws-sdk/credential-provider-sso@3.583.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))': + dependencies: + '@aws-sdk/client-sso': 3.583.0 + '@aws-sdk/token-providers': 3.577.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0)) + '@aws-sdk/types': 3.577.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.8.1 + transitivePeerDependencies: + - '@aws-sdk/client-sso-oidc' + - aws-crt + '@aws-sdk/credential-provider-sso@3.583.0(@aws-sdk/client-sso-oidc@3.583.0)': dependencies: '@aws-sdk/client-sso': 3.583.0 @@ -11474,7 +11571,7 @@ snapshots: '@aws-sdk/credential-provider-web-identity@3.577.0(@aws-sdk/client-sts@3.583.0)': dependencies: - '@aws-sdk/client-sts': 3.583.0(@aws-sdk/client-sso-oidc@3.583.0) + '@aws-sdk/client-sts': 3.583.0 '@aws-sdk/types': 3.577.0 '@smithy/property-provider': 3.0.0 '@smithy/types': 3.0.0 @@ -11682,6 +11779,15 @@ snapshots: '@smithy/types': 2.12.0 tslib: 2.8.1 + '@aws-sdk/token-providers@3.577.0(@aws-sdk/client-sso-oidc@3.583.0(@aws-sdk/client-sts@3.583.0))': + dependencies: + '@aws-sdk/client-sso-oidc': 3.583.0(@aws-sdk/client-sts@3.583.0) + '@aws-sdk/types': 3.577.0 + '@smithy/property-provider': 3.0.0 + '@smithy/shared-ini-file-loader': 3.0.0 + '@smithy/types': 3.0.0 + tslib: 2.8.1 + '@aws-sdk/token-providers@3.577.0(@aws-sdk/client-sso-oidc@3.583.0)': dependencies: '@aws-sdk/client-sso-oidc': 3.583.0 @@ -16176,7 +16282,10 @@ snapshots: bun-types@0.6.14: {} - bun-types@1.0.3: {} + bun-types@1.2.0: + dependencies: + '@types/node': 20.12.12 + '@types/ws': 8.5.11 bundle-require@4.0.2(esbuild@0.18.20): dependencies: @@ -16928,7 +17037,7 @@ snapshots: transitivePeerDependencies: - supports-color - drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20241112.0)(@libsql/client@0.10.0)(@neondatabase/serverless@0.10.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.12)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@11.5.0)(bun-types@1.0.3)(knex@2.5.1(better-sqlite3@11.5.0)(mysql2@3.11.0)(pg@8.13.1)(sqlite3@5.1.7))(kysely@0.25.0)(mysql2@3.11.0)(pg@8.13.1)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7): + drizzle-orm@0.27.2(@aws-sdk/client-rds-data@3.583.0)(@cloudflare/workers-types@4.20241112.0)(@libsql/client@0.10.0)(@neondatabase/serverless@0.10.3)(@opentelemetry/api@1.8.0)(@planetscale/database@1.18.0)(@types/better-sqlite3@7.6.12)(@types/pg@8.11.6)(@types/sql.js@1.4.9)(@vercel/postgres@0.8.0)(better-sqlite3@11.5.0)(bun-types@1.2.0)(knex@2.5.1(better-sqlite3@11.5.0)(mysql2@3.11.0)(pg@8.13.1)(sqlite3@5.1.7))(kysely@0.25.0)(mysql2@3.11.0)(pg@8.13.1)(postgres@3.4.4)(sql.js@1.10.3)(sqlite3@5.1.7): optionalDependencies: '@aws-sdk/client-rds-data': 3.583.0 '@cloudflare/workers-types': 4.20241112.0 @@ -16941,7 +17050,7 @@ snapshots: '@types/sql.js': 1.4.9 '@vercel/postgres': 0.8.0 better-sqlite3: 11.5.0 - bun-types: 1.0.3 + bun-types: 1.2.0 knex: 2.5.1(better-sqlite3@11.5.0)(mysql2@3.11.0)(pg@8.13.1)(sqlite3@5.1.7) kysely: 0.25.0 mysql2: 3.11.0