Migration cleanup & move to Typescript

This commit is contained in:
Chris Anderson 2023-03-26 10:19:44 -05:00
parent e683d8563f
commit d80db07e4e
12 changed files with 39 additions and 69 deletions

View file

@ -0,0 +1,9 @@
import type { Knex } from 'knex'
exports.up = async function(knex: Knex) {
}
exports.down = async function(knex: Knex) {
}

View file

@ -1,7 +1,3 @@
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.up = async function(knex) { exports.up = async function(knex) {
await knex.schema.renameTable('refresh_tokens', 'access_tokens') await knex.schema.renameTable('refresh_tokens', 'access_tokens')
await knex.schema.alterTable('access_tokens', function(table) { await knex.schema.alterTable('access_tokens', function(table) {
@ -10,10 +6,6 @@ exports.up = async function(knex) {
}) })
} }
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.down = function(knex) { exports.down = function(knex) {
return knex.schema.dropTable('revoked_access_tokens') return knex.schema.dropTable('revoked_access_tokens')
} }

View file

@ -1,7 +1,3 @@
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.up = function(knex) { exports.up = function(knex) {
return knex.schema return knex.schema
.createTable('tags', function(table) { .createTable('tags', function(table) {
@ -33,10 +29,6 @@ exports.up = function(knex) {
}) })
} }
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.down = function(knex) { exports.down = function(knex) {
return knex.schema return knex.schema
.dropTable('entity_tags') .dropTable('entity_tags')

View file

@ -1,7 +1,3 @@
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.up = async function(knex) { exports.up = async function(knex) {
await knex.schema.alterTable('journey_steps', function(table) { await knex.schema.alterTable('journey_steps', function(table) {
table.uuid('uuid').defaultTo() table.uuid('uuid').defaultTo()
@ -36,10 +32,6 @@ exports.up = async function(knex) {
}) })
} }
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.down = function(knex) { exports.down = function(knex) {
return knex.schema return knex.schema
.alterTable('journey_steps', function(table) { .alterTable('journey_steps', function(table) {

View file

@ -1,7 +1,3 @@
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.up = function(knex) { exports.up = function(knex) {
return knex.schema return knex.schema
.alterTable('journey_steps', function(table) { .alterTable('journey_steps', function(table) {
@ -10,10 +6,6 @@ exports.up = function(knex) {
}) })
} }
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.down = function(knex) { exports.down = function(knex) {
return knex.schema return knex.schema
.alterTable('journey_steps', function(table) { .alterTable('journey_steps', function(table) {

View file

@ -1,7 +1,3 @@
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.up = function(knex) { exports.up = function(knex) {
return knex.schema.alterTable('journey_steps', function(table) { return knex.schema.alterTable('journey_steps', function(table) {
table.renameColumn('uuid', 'external_id') table.renameColumn('uuid', 'external_id')
@ -10,10 +6,6 @@ exports.up = function(knex) {
})) }))
} }
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.down = function(knex) { exports.down = function(knex) {
return knex.schema.alterTable('journey_steps', function(table) { return knex.schema.alterTable('journey_steps', function(table) {
table.renameColumn('external_id', 'uuid') table.renameColumn('external_id', 'uuid')

View file

@ -1,7 +1,3 @@
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.up = function(knex) { exports.up = function(knex) {
return knex.schema return knex.schema
.alterTable('journey_step_child', function(table) { .alterTable('journey_step_child', function(table) {
@ -11,10 +7,6 @@ exports.up = function(knex) {
}) })
} }
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.down = function(knex) { exports.down = function(knex) {
return knex.schema return knex.schema
.alterTable('journey_step_child', function(table) { .alterTable('journey_step_child', function(table) {

View file

@ -1,7 +1,3 @@
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.up = function(knex) { exports.up = function(knex) {
return knex.schema return knex.schema
.alterTable('project_admins', function(table) { .alterTable('project_admins', function(table) {
@ -12,10 +8,6 @@ exports.up = function(knex) {
}) })
} }
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.down = function(knex) { exports.down = function(knex) {
return knex.schema return knex.schema
.alterTable('project_admins', function(table) { .alterTable('project_admins', function(table) {

View file

@ -1,7 +1,7 @@
import knex from 'knex' import knex from 'knex'
const connection = knex({ const connection = knex({
client: 'mysql2', client: process.env.DB_CLIENT ?? 'mysql2',
connection: { connection: {
host: process.env.DB_HOST, host: process.env.DB_HOST,
user: process.env.DB_USERNAME, user: process.env.DB_USERNAME,
@ -14,6 +14,8 @@ const connection = knex({
const migrationConfig = { const migrationConfig = {
directory: './db/migrations', directory: './db/migrations',
tableName: 'migrations', tableName: 'migrations',
stub: './db/migration.stub',
extension: 'ts',
} }
const name = process.argv[2] const name = process.argv[2]
@ -21,6 +23,7 @@ if (!name) {
console.log('migration: please include a name for migration') console.log('migration: please include a name for migration')
process.exit(9) process.exit(9)
} }
connection.migrate.make(name, migrationConfig) connection.migrate.make(name, migrationConfig)
.then(() => { .then(() => {
console.log('migration create finished') console.log('migration create finished')

View file

@ -1,4 +1,6 @@
import knex, { Knex as Database } from 'knex' import knex, { Knex as Database } from 'knex'
import { removeKey } from '../utilities'
import { logger } from './logger'
export { Database } export { Database }
@ -16,9 +18,9 @@ export interface DatabaseConfig {
} }
const connect = (config: DatabaseConfig, withDB = true) => { const connect = (config: DatabaseConfig, withDB = true) => {
const connection = config.connection let connection = config.connection
if (!withDB) { if (!withDB) {
delete connection.database connection = removeKey('database', connection)
} }
return knex({ return knex({
client: config.client, client: config.client,
@ -35,11 +37,13 @@ const connect = (config: DatabaseConfig, withDB = true) => {
}) })
} }
const migrate = async (db: Database, fresh = false) => { const migrate = async (config: DatabaseConfig, db: Database, fresh = false) => {
if (fresh) await db.raw('CREATE DATABASE parcelvoy') console.warn(fresh, config)
if (fresh) await db.raw(`CREATE DATABASE ${config.connection.database}`)
return db.migrate.latest({ return db.migrate.latest({
directory: './db/migrations', directory: './db/migrations',
tableName: 'migrations', tableName: 'migrations',
loadExtensions: ['.js', '.ts'],
}) })
} }
@ -48,15 +52,19 @@ export default async (config: DatabaseConfig) => {
// Attempt to connect & migrate // Attempt to connect & migrate
try { try {
const db = connect(config) const db = connect(config)
await migrate(db) await migrate(config, db)
return db return db
} catch (error) { } catch (error: any) {
console.error(error) logger.error(error)
// On error, try to create the database and try again if (error?.errno === 1049) {
const db = connect(config, false) // On error, try to create the database and try again
await migrate(db, true) const db = connect(config, false)
return connect(config) await migrate(config, db, true)
return connect(config)
} else {
throw error
}
} }
} }

View file

@ -100,7 +100,11 @@ class SchedulerLock {
// Randomly run this job to reduce chance of deadlocks // Randomly run this job to reduce chance of deadlocks
if (randomInt() < 10) { if (randomInt() < 10) {
await sleep(randomInt(5, 20)) await sleep(randomInt(5, 20))
await JobLock.delete(qb => qb.where('expiration', '<=', new Date()).orderBy('id')) const locks = await JobLock.all(
qb => qb.where('expiration', '<=', new Date())
.orderBy('id'),
)
await JobLock.delete(qb => qb.whereIn('id', locks.map(item => item.id)))
} }
return acquired return acquired

View file

@ -74,6 +74,8 @@ export const combineURLs = (parts: string[], sep = '/'): string => {
export const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms)) export const sleep = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms))
export const removeKey = <T, O extends keyof T>(propKey: O, { [propKey]: propValue, ...rest }: T): Omit<T, O> => rest
export function extractQueryParams<T extends Record<string, any>>(search: URLSearchParams | Record<string, undefined | string | string[]>, schema: JSONSchemaType<T>) { export function extractQueryParams<T extends Record<string, any>>(search: URLSearchParams | Record<string, undefined | string | string[]>, schema: JSONSchemaType<T>) {
return validate(schema, Object.entries<JSONSchemaType<any>>(schema.properties).reduce((a, [name, def]) => { return validate(schema, Object.entries<JSONSchemaType<any>>(schema.properties).reduce((a, [name, def]) => {
let values: string[] let values: string[]