mirror of
https://github.com/discourse/discourse.git
synced 2025-10-03 17:21:20 +08:00
DEV: Add ability to generate enums for IntermediateDB
This also adds the ability to generate YARD documentation for IntermediateDB models.
This commit is contained in:
parent
480e05e67a
commit
5b3dbb2c6f
11 changed files with 433 additions and 65 deletions
|
@ -4,6 +4,8 @@ output:
|
|||
schema_file: "db/intermediate_db_schema/100-base-schema.sql"
|
||||
models_directory: "lib/database/intermediate_db"
|
||||
models_namespace: Migrations::Database::IntermediateDB
|
||||
enums_directory: nil
|
||||
enums_namespace: nil
|
||||
|
||||
schema:
|
||||
tables:
|
||||
|
|
|
@ -36,12 +36,20 @@
|
|||
},
|
||||
"models_namespace": {
|
||||
"type": "string"
|
||||
},
|
||||
"enums_directory": {
|
||||
"type": "string"
|
||||
},
|
||||
"enums_namespace": {
|
||||
"type": "string"
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"schema_file",
|
||||
"models_directory",
|
||||
"models_namespace"
|
||||
"models_namespace",
|
||||
"enums_directory",
|
||||
"enums_namespace"
|
||||
]
|
||||
},
|
||||
"schema": {
|
||||
|
@ -84,6 +92,9 @@
|
|||
"datatype": {
|
||||
"$ref": "#/$defs/datatypes"
|
||||
},
|
||||
"enum": {
|
||||
"type": "string"
|
||||
},
|
||||
"nullable": {
|
||||
"type": "boolean"
|
||||
}
|
||||
|
@ -98,12 +109,23 @@
|
|||
"datatype"
|
||||
]
|
||||
},
|
||||
{
|
||||
"required": [
|
||||
"enum"
|
||||
]
|
||||
},
|
||||
{
|
||||
"required": [
|
||||
"nullable"
|
||||
]
|
||||
}
|
||||
]
|
||||
],
|
||||
"not": {
|
||||
"required": [
|
||||
"datatype",
|
||||
"enum"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"add": {
|
||||
|
@ -117,6 +139,9 @@
|
|||
"datatype": {
|
||||
"$ref": "#/$defs/datatypes"
|
||||
},
|
||||
"enum": {
|
||||
"type": "string"
|
||||
},
|
||||
"nullable": {
|
||||
"type": "boolean"
|
||||
},
|
||||
|
@ -126,8 +151,19 @@
|
|||
},
|
||||
"additionalProperties": false,
|
||||
"required": [
|
||||
"name",
|
||||
"datatype"
|
||||
"name"
|
||||
],
|
||||
"oneOf": [
|
||||
{
|
||||
"required": [
|
||||
"datatype"
|
||||
]
|
||||
},
|
||||
{
|
||||
"required": [
|
||||
"enum"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
@ -287,9 +323,43 @@
|
|||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
},
|
||||
"enums": {
|
||||
"type": "object",
|
||||
"additionalProperties": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"values": {
|
||||
"oneOf": [
|
||||
{ "type": "array", "items": { "type": "string" } },
|
||||
{ "type": "object", "additionalProperties": { "type": "integer" } }
|
||||
]
|
||||
},
|
||||
"strings": {
|
||||
"oneOf": [
|
||||
{ "type": "array", "items": { "type": "string" } },
|
||||
{ "type": "object", "additionalProperties": { "type": "string" } }
|
||||
]
|
||||
},
|
||||
"source": { "type": "string" }
|
||||
},
|
||||
"not": {
|
||||
"anyOf": [
|
||||
{ "required": ["values", "strings"] },
|
||||
{ "required": ["values", "source"] },
|
||||
{ "required": ["strings", "source"] }
|
||||
]
|
||||
},
|
||||
"additionalProperties": false,
|
||||
"oneOf": [
|
||||
{ "required": ["values"] },
|
||||
{ "required": ["strings"] },
|
||||
{ "required": ["source"] }
|
||||
]
|
||||
}
|
||||
},
|
||||
"additionalProperties": false
|
||||
}
|
||||
},
|
||||
"plugins": {
|
||||
"type": "array",
|
||||
|
|
|
@ -21,6 +21,7 @@ module Migrations::CLI
|
|||
|
||||
write_db_schema(config, header, schema)
|
||||
write_db_models(config, header, schema)
|
||||
write_enums(config, header, schema)
|
||||
|
||||
validate_schema(db)
|
||||
|
||||
|
@ -46,20 +47,34 @@ module Migrations::CLI
|
|||
writer = Schema::TableWriter.new(schema_file)
|
||||
writer.output_file_header(header)
|
||||
|
||||
schema.each { |table| writer.output_table(table) }
|
||||
schema.tables.each { |table| writer.output_table(table) }
|
||||
end
|
||||
end
|
||||
|
||||
def write_db_models(config, header, schema)
|
||||
writer = Schema::ModelWriter.new(config.dig(:output, :models_namespace), header)
|
||||
model_namespace = config.dig(:output, :models_namespace)
|
||||
enum_namespace = config.dig(:output, :enums_namespace)
|
||||
writer = Schema::ModelWriter.new(model_namespace, enum_namespace, header)
|
||||
models_path = File.expand_path(config.dig(:output, :models_directory), ::Migrations.root_path)
|
||||
|
||||
schema.each do |table|
|
||||
schema.tables.each do |table|
|
||||
model_file_path = File.join(models_path, Schema::ModelWriter.filename_for(table))
|
||||
File.open(model_file_path, "w") { |model_file| writer.output_table(table, model_file) }
|
||||
end
|
||||
|
||||
Schema::ModelWriter.format_files(models_path)
|
||||
Schema.format_ruby_files(models_path)
|
||||
end
|
||||
|
||||
def write_enums(config, header, schema)
|
||||
writer = Schema::EnumWriter.new(config.dig(:output, :enums_namespace), header)
|
||||
enums_path = File.expand_path(config.dig(:output, :enums_directory), ::Migrations.root_path)
|
||||
|
||||
schema.enums.each do |enum|
|
||||
enum_file_path = File.join(enums_path, Schema::EnumWriter.filename_for(enum))
|
||||
File.open(enum_file_path, "w") { |enum_file| writer.output_enum(enum, enum_file) }
|
||||
end
|
||||
|
||||
Schema.format_ruby_files(enums_path)
|
||||
end
|
||||
|
||||
def relative_config_path(db)
|
||||
|
|
43
migrations/lib/common/enum.rb
Normal file
43
migrations/lib/common/enum.rb
Normal file
|
@ -0,0 +1,43 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Migrations
|
||||
# Module that adds enumeration functionality to modules that extend it.
|
||||
# When extended, adds methods for checking and retrieving enum values.
|
||||
#
|
||||
# @example
|
||||
# module MyEnum
|
||||
# extend ::Migrations::Enum
|
||||
#
|
||||
# FIRST = 0
|
||||
# SECOND = 1
|
||||
# end
|
||||
#
|
||||
# @!method valid?(value)
|
||||
# Checks if the provided value is a valid enum value
|
||||
# @param value [Integer, String] The value to check
|
||||
# @return [Boolean] true if the value is included in the enum values
|
||||
#
|
||||
# @!method values
|
||||
# Returns all values defined in the enum
|
||||
# @return [Array<Integer, String>] Array of all enum values
|
||||
module Enum
|
||||
def self.extended(base)
|
||||
TracePoint
|
||||
.new(:end) do |tp|
|
||||
if tp.self == base
|
||||
enum_values =
|
||||
base.constants.map { |c| base.const_get(c) }.select { |v| !v.is_a?(Module) }.freeze
|
||||
|
||||
values = base.const_set(:ALL_ENUM_VALUES__, enum_values)
|
||||
base.private_constant :ALL_ENUM_VALUES__
|
||||
|
||||
base.define_singleton_method(:valid?) { |value| values.include?(value) }
|
||||
base.define_singleton_method(:values) { values }
|
||||
|
||||
tp.disable
|
||||
end
|
||||
end
|
||||
.enable
|
||||
end
|
||||
end
|
||||
end
|
|
@ -2,15 +2,17 @@
|
|||
|
||||
module Migrations::Database
|
||||
module Schema
|
||||
Table =
|
||||
Definition = Data.define(:tables, :enums)
|
||||
TableDefinition =
|
||||
Data.define(:name, :columns, :indexes, :primary_key_column_names, :constraints) do
|
||||
def sorted_columns
|
||||
columns.sort_by { |c| [c.is_primary_key ? 0 : 1, c.name] }
|
||||
end
|
||||
end
|
||||
Column = Data.define(:name, :datatype, :nullable, :max_length, :is_primary_key)
|
||||
Index = Data.define(:name, :column_names, :unique, :condition)
|
||||
Constraint = Data.define(:name, :type, :condition)
|
||||
ColumnDefinition = Data.define(:name, :datatype, :nullable, :max_length, :is_primary_key, :enum)
|
||||
IndexDefinition = Data.define(:name, :column_names, :unique, :condition)
|
||||
ConstraintDefinition = Data.define(:name, :type, :condition)
|
||||
EnumDefinition = Data.define(:name, :values, :datatype)
|
||||
|
||||
class ConfigError < StandardError
|
||||
end
|
||||
|
@ -172,5 +174,30 @@ module Migrations::Database
|
|||
identifier
|
||||
end
|
||||
end
|
||||
|
||||
def self.to_singular_classname(snake_case_string)
|
||||
snake_case_string.downcase.singularize.camelize
|
||||
end
|
||||
|
||||
def self.to_const_name(name)
|
||||
name.parameterize.underscore.upcase
|
||||
end
|
||||
|
||||
def self.format_ruby_files(path)
|
||||
glob_pattern = File.join(path, "*.rb")
|
||||
|
||||
system(
|
||||
"bundle",
|
||||
"exec",
|
||||
"stree",
|
||||
"write",
|
||||
glob_pattern,
|
||||
exception: true,
|
||||
out: File::NULL,
|
||||
err: File::NULL,
|
||||
)
|
||||
rescue StandardError
|
||||
raise "Failed to run `bundle exec stree write '#{glob_pattern}'`"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
|
72
migrations/lib/database/schema/enum_resolver.rb
Normal file
72
migrations/lib/database/schema/enum_resolver.rb
Normal file
|
@ -0,0 +1,72 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
module Migrations::Database::Schema
|
||||
class EnumResolver
|
||||
class EnumSourceError < StandardError
|
||||
end
|
||||
|
||||
def initialize(config)
|
||||
@config = config || {}
|
||||
end
|
||||
|
||||
def resolve
|
||||
@config.map do |name, entry|
|
||||
name = name.to_s
|
||||
values_hash = resolve_values(entry)
|
||||
datatype = values_hash.values.first.is_a?(String) ? :text : :integer
|
||||
EnumDefinition.new(name:, values: values_hash, datatype:)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def resolve_values(entry)
|
||||
if entry.key?(:values)
|
||||
normalize_values(entry[:values])
|
||||
elsif entry.key?(:strings)
|
||||
normalize_strings(entry[:strings])
|
||||
elsif entry.key?(:source)
|
||||
fetch_source(entry[:source])
|
||||
else
|
||||
raise EnumSourceError, "Enum must define :values, :strings, or :source"
|
||||
end
|
||||
end
|
||||
|
||||
def normalize_values(values)
|
||||
case values
|
||||
when Array
|
||||
values.each_with_index.to_h { |k, i| [k.to_s, i] }
|
||||
when Hash
|
||||
values.transform_keys(&:to_s)
|
||||
else
|
||||
raise EnumSourceError, "Invalid :values format: #{values.inspect}"
|
||||
end
|
||||
end
|
||||
|
||||
def normalize_strings(values)
|
||||
case values
|
||||
when Array
|
||||
values.to_h { |k| [k.to_s, k.to_s] }
|
||||
when Hash
|
||||
values.transform_keys(&:to_s).transform_values(&:to_s)
|
||||
else
|
||||
raise EnumSourceError, "Invalid :strings format: #{values.inspect}"
|
||||
end
|
||||
end
|
||||
|
||||
def fetch_source(source_code)
|
||||
values = eval(source_code, TOPLEVEL_BINDING) # rubocop:disable Security/Eval
|
||||
|
||||
case values
|
||||
when Hash
|
||||
values.transform_keys(&:to_s)
|
||||
when Array
|
||||
values.each_with_index.to_h { |k, i| [k.to_s, i] }
|
||||
else
|
||||
raise EnumSourceError, "Eval #{source_code} must return Hash or Array"
|
||||
end
|
||||
rescue StandardError => e
|
||||
raise EnumSourceError, "Failed to evaluate source #{source_code}: #{e.message}"
|
||||
end
|
||||
end
|
||||
end
|
45
migrations/lib/database/schema/enum_writer.rb
Normal file
45
migrations/lib/database/schema/enum_writer.rb
Normal file
|
@ -0,0 +1,45 @@
|
|||
# frozen_string_literal: true
|
||||
|
||||
require "rake"
|
||||
require "syntax_tree/rake_tasks"
|
||||
|
||||
module Migrations::Database::Schema
|
||||
class EnumWriter
|
||||
def initialize(namespace, header)
|
||||
@namespace = namespace
|
||||
@header = header.gsub(/^/, "# ")
|
||||
end
|
||||
|
||||
def self.filename_for(enum)
|
||||
"#{enum.name.downcase.underscore}.rb"
|
||||
end
|
||||
|
||||
def output_enum(enum, output_stream)
|
||||
module_name = ::Migrations::Database::Schema.to_singular_classname(enum.name)
|
||||
|
||||
output_stream.puts "# frozen_string_literal: true"
|
||||
output_stream.puts
|
||||
output_stream.puts @header
|
||||
output_stream.puts
|
||||
output_stream.puts "module #{@namespace}"
|
||||
output_stream.puts " module #{module_name}"
|
||||
output_stream.puts " extend ::Migrations::Enum"
|
||||
output_stream.puts
|
||||
output_stream.puts enum_values(enum.values)
|
||||
output_stream.puts " end"
|
||||
output_stream.puts "end"
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def enum_values(values)
|
||||
values
|
||||
.sort_by { |_k, v| v }
|
||||
.map do |name, value|
|
||||
value = %Q|"#{value}"| if value.is_a?(String)
|
||||
" #{::Migrations::Database::Schema.to_const_name(name)} = #{value}"
|
||||
end
|
||||
.join("\n")
|
||||
end
|
||||
end
|
||||
end
|
|
@ -8,9 +8,23 @@ module Migrations::Database::Schema
|
|||
end
|
||||
|
||||
def load_schema
|
||||
enums = load_enums
|
||||
tables = load_tables
|
||||
Definition.new(tables:, enums:)
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def load_enums
|
||||
enums = EnumResolver.new(@schema_config[:enums]).resolve
|
||||
@enums_by_name = enums.map { |enum| [enum.name, enum] }.to_h
|
||||
enums
|
||||
end
|
||||
|
||||
def load_tables
|
||||
@db = ActiveRecord::Base.lease_connection
|
||||
|
||||
schema = []
|
||||
tables = []
|
||||
existing_table_names = @db.tables.to_set
|
||||
|
||||
@schema_config[:tables].sort.each do |table_name, config|
|
||||
|
@ -24,34 +38,36 @@ module Migrations::Database::Schema
|
|||
end
|
||||
|
||||
if existing_table_names.include?(table_name)
|
||||
schema << table(table_name, config, table_alias)
|
||||
tables << table(table_name, config, table_alias)
|
||||
end
|
||||
end
|
||||
|
||||
@db = nil
|
||||
ActiveRecord::Base.release_connection
|
||||
|
||||
schema
|
||||
tables
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def table(table_name, config, table_alias = nil)
|
||||
primary_key_column_names =
|
||||
config[:primary_key_column_names].presence || @db.primary_keys(table_name)
|
||||
|
||||
columns =
|
||||
filtered_columns_of(table_name, config).map do |column|
|
||||
Column.new(
|
||||
modified_column = modified_column_for(column, config)
|
||||
enum = @enums_by_name[modified_column[:enum]] if modified_column&.key?(:enum)
|
||||
|
||||
ColumnDefinition.new(
|
||||
name: name_for(column),
|
||||
datatype: datatype_for(column),
|
||||
nullable: nullable_for(column, config),
|
||||
datatype: datatype_for(column, modified_column, enum),
|
||||
nullable: nullable_for(column, modified_column),
|
||||
max_length: column.type == :text ? column.limit : nil,
|
||||
is_primary_key: primary_key_column_names.include?(column.name),
|
||||
enum:,
|
||||
)
|
||||
end + added_columns(config, primary_key_column_names)
|
||||
|
||||
Table.new(
|
||||
TableDefinition.new(
|
||||
table_alias || table_name,
|
||||
columns,
|
||||
indexes(config),
|
||||
|
@ -65,6 +81,8 @@ module Migrations::Database::Schema
|
|||
columns_by_name.except!(*@global.excluded_column_names)
|
||||
|
||||
if (included_columns = config.dig(:columns, :include))
|
||||
modified_columns = config.dig(:columns, :modify)&.map { |c| c[:name] }
|
||||
included_columns = included_columns + modified_columns if modified_columns
|
||||
columns_by_name.slice!(*included_columns)
|
||||
elsif (excluded_columns = config.dig(:columns, :exclude))
|
||||
columns_by_name.except!(*excluded_columns)
|
||||
|
@ -76,13 +94,16 @@ module Migrations::Database::Schema
|
|||
def added_columns(config, primary_key_column_names)
|
||||
columns = config.dig(:columns, :add) || []
|
||||
columns.map do |column|
|
||||
datatype = column[:datatype].to_sym
|
||||
Column.new(
|
||||
enum = @enums_by_name[column[:enum]] if column[:enum]
|
||||
datatype = enum ? enum.datatype : column[:datatype].to_sym
|
||||
|
||||
ColumnDefinition.new(
|
||||
name: column[:name],
|
||||
datatype:,
|
||||
nullable: column.fetch(:nullable, true),
|
||||
max_length: datatype == :text ? column[:max_length] : nil,
|
||||
is_primary_key: primary_key_column_names.include?(column[:name]),
|
||||
enum:,
|
||||
)
|
||||
end
|
||||
end
|
||||
|
@ -91,8 +112,10 @@ module Migrations::Database::Schema
|
|||
@global.modified_name(column.name) || column.name
|
||||
end
|
||||
|
||||
def datatype_for(column)
|
||||
datatype = @global.modified_datatype(column.name) || column.type
|
||||
def datatype_for(column, modified_column, enum)
|
||||
datatype = enum.datatype if enum
|
||||
datatype ||= modified_column[:datatype]&.to_sym if modified_column
|
||||
datatype ||= @global.modified_datatype(column.name) || column.type
|
||||
|
||||
case datatype
|
||||
when :binary
|
||||
|
@ -108,8 +131,11 @@ module Migrations::Database::Schema
|
|||
end
|
||||
end
|
||||
|
||||
def nullable_for(column, config)
|
||||
modified_column = config.dig(:columns, :modify)&.find { |col| col[:name] == column.name }
|
||||
def modified_column_for(column, config)
|
||||
config.dig(:columns, :modify)&.find { |col| col[:name] == column.name }
|
||||
end
|
||||
|
||||
def nullable_for(column, modified_column)
|
||||
return modified_column[:nullable] if modified_column&.key?(:nullable)
|
||||
|
||||
global_nullable = @global.modified_nullable(column.name)
|
||||
|
@ -120,7 +146,7 @@ module Migrations::Database::Schema
|
|||
|
||||
def indexes(config)
|
||||
config[:indexes]&.map do |index|
|
||||
Index.new(
|
||||
IndexDefinition.new(
|
||||
name: index[:name],
|
||||
column_names: Array.wrap(index[:columns]),
|
||||
unique: index.fetch(:unique, false),
|
||||
|
@ -131,7 +157,7 @@ module Migrations::Database::Schema
|
|||
|
||||
def constraints(config)
|
||||
config[:constraints]&.map do |constraint|
|
||||
Constraint.new(
|
||||
ConstraintDefinition.new(
|
||||
name: constraint[:name],
|
||||
type: constraint.fetch(:type, :check).to_sym,
|
||||
condition: constraint[:condition],
|
||||
|
|
|
@ -5,8 +5,9 @@ require "syntax_tree/rake_tasks"
|
|||
|
||||
module Migrations::Database::Schema
|
||||
class ModelWriter
|
||||
def initialize(namespace, header)
|
||||
@namespace = namespace
|
||||
def initialize(model_namespace, enum_namespace, header)
|
||||
@model_namespace = model_namespace
|
||||
@enum_namespace = enum_namespace
|
||||
@header = header.gsub(/^/, "# ")
|
||||
end
|
||||
|
||||
|
@ -14,32 +15,16 @@ module Migrations::Database::Schema
|
|||
"#{table.name.singularize}.rb"
|
||||
end
|
||||
|
||||
def self.format_files(path)
|
||||
glob_pattern = File.join(path, "**/*.rb")
|
||||
|
||||
system(
|
||||
"bundle",
|
||||
"exec",
|
||||
"stree",
|
||||
"write",
|
||||
glob_pattern,
|
||||
exception: true,
|
||||
out: File::NULL,
|
||||
err: File::NULL,
|
||||
)
|
||||
rescue StandardError
|
||||
raise "Failed to run `bundle exec stree write '#{glob_pattern}'`"
|
||||
end
|
||||
|
||||
def output_table(table, output_stream)
|
||||
module_name = ::Migrations::Database::Schema.to_singular_classname(table.name)
|
||||
columns = table.sorted_columns
|
||||
|
||||
output_stream.puts "# frozen_string_literal: true"
|
||||
output_stream.puts
|
||||
output_stream.puts @header
|
||||
output_stream.puts
|
||||
output_stream.puts "module #{@namespace}"
|
||||
output_stream.puts " module #{to_singular_classname(table.name)}"
|
||||
output_stream.puts "module #{@model_namespace}"
|
||||
output_stream.puts " module #{module_name}"
|
||||
output_stream.puts " SQL = <<~SQL"
|
||||
output_stream.puts " INSERT INTO #{escape_identifier(table.name)} ("
|
||||
output_stream.puts column_names(columns)
|
||||
|
@ -50,6 +35,7 @@ module Migrations::Database::Schema
|
|||
output_stream.puts " SQL"
|
||||
output_stream.puts " private_constant :SQL"
|
||||
output_stream.puts
|
||||
output_stream.puts method_documentation(table.name, columns)
|
||||
output_stream.puts " def self.create("
|
||||
output_stream.puts method_parameters(columns)
|
||||
output_stream.puts " )"
|
||||
|
@ -64,10 +50,6 @@ module Migrations::Database::Schema
|
|||
|
||||
private
|
||||
|
||||
def to_singular_classname(snake_case_string)
|
||||
snake_case_string.downcase.singularize.camelize
|
||||
end
|
||||
|
||||
def column_names(columns)
|
||||
columns.map { |c| " #{escape_identifier(c.name)}" }.join(",\n")
|
||||
end
|
||||
|
@ -97,6 +79,81 @@ module Migrations::Database::Schema
|
|||
placeholders
|
||||
end
|
||||
|
||||
def method_documentation(table_name, columns)
|
||||
max_column_name_length = columns.map { |c| c.name.length }.max
|
||||
|
||||
documentation = +" # Creates a new `#{table_name}` record in the IntermediateDB.\n"
|
||||
documentation << " #\n"
|
||||
|
||||
param_documentation =
|
||||
columns.map do |c|
|
||||
param_name = c.name.ljust(max_column_name_length)
|
||||
datatypes = datatypes_for_documentation(c)
|
||||
" # @param #{param_name} [#{datatypes}]"
|
||||
end
|
||||
|
||||
max_line_length = param_documentation.map(&:length).max
|
||||
see_documenation = []
|
||||
|
||||
columns.each_with_index do |column, index|
|
||||
if (enum = column.enum)
|
||||
enum_module_name = ::Migrations::Database::Schema.to_singular_classname(enum.name)
|
||||
enum_value_names = enum.values.sort_by { |_k, v| v }.map(&:first)
|
||||
first_const_name = ::Migrations::Database::Schema.to_const_name(enum_value_names.first)
|
||||
|
||||
enum_documentation =
|
||||
" # Any constant from #{enum_module_name} (e.g. #{enum_module_name}::#{first_const_name})"
|
||||
|
||||
line = param_documentation[index].ljust(max_line_length)
|
||||
param_documentation[index] = "#{line}\n#{enum_documentation}"
|
||||
|
||||
see_documenation << "#{@enum_namespace}::#{enum_module_name}"
|
||||
end
|
||||
end
|
||||
|
||||
documentation << param_documentation.join("\n")
|
||||
documentation << "\n #\n"
|
||||
documentation << " # @return [void]"
|
||||
|
||||
if see_documenation.any?
|
||||
documentation << "\n #\n"
|
||||
documentation << see_documenation.map { |see| " # @see #{see}" }.join("\n")
|
||||
end
|
||||
|
||||
documentation
|
||||
end
|
||||
|
||||
def datatypes_for_documentation(column)
|
||||
datatypes =
|
||||
Array(
|
||||
case column.datatype
|
||||
when :datetime, :date
|
||||
"Time"
|
||||
when :boolean
|
||||
"Boolean"
|
||||
when :inet
|
||||
"IPAddr"
|
||||
when :blob
|
||||
"String"
|
||||
when :json
|
||||
"Object"
|
||||
when :float
|
||||
"Float"
|
||||
when :integer
|
||||
"Integer"
|
||||
when :numeric
|
||||
%w[Integer String]
|
||||
when :text
|
||||
"String"
|
||||
else
|
||||
raise "Unknown datatype: #{column.datatype}"
|
||||
end,
|
||||
)
|
||||
|
||||
datatypes << "nil" if column.nullable
|
||||
datatypes.join(", ")
|
||||
end
|
||||
|
||||
def method_parameters(columns)
|
||||
columns
|
||||
.map do |c|
|
||||
|
@ -126,7 +183,7 @@ module Migrations::Database::Schema
|
|||
when :float, :integer, :numeric, :text
|
||||
c.name
|
||||
else
|
||||
raise "Unknown dataype: #{type}"
|
||||
raise "Unknown datatype: #{c.datatype}"
|
||||
end
|
||||
" #{argument},"
|
||||
end
|
||||
|
|
|
@ -51,12 +51,12 @@ module Migrations::Database::Schema
|
|||
has_composite_primary_key = table.primary_key_column_names.size > 1
|
||||
|
||||
max_column_name_length = columns.map { |c| escape_identifier(c.name).length }.max
|
||||
max_datatype_length = columns.map { |c| convert_datatype(c.datatype).length }.max
|
||||
max_datatype_length = columns.map { |c| convert_datatype(c.datatype, c.enum).length }.max
|
||||
|
||||
columns.map do |c|
|
||||
definition = [
|
||||
escape_identifier(c.name).ljust(max_column_name_length),
|
||||
convert_datatype(c.datatype).ljust(max_datatype_length),
|
||||
convert_datatype(c.datatype, c.enum).ljust(max_datatype_length),
|
||||
]
|
||||
|
||||
if c.is_primary_key && !has_composite_primary_key
|
||||
|
@ -73,10 +73,11 @@ module Migrations::Database::Schema
|
|||
end
|
||||
end
|
||||
|
||||
def convert_datatype(type)
|
||||
def convert_datatype(type, enum)
|
||||
case type
|
||||
when :blob, :boolean, :date, :datetime, :float, :integer, :numeric, :text
|
||||
type.to_s.upcase
|
||||
datatype = type.to_s.upcase
|
||||
enum ? "ENUM_#{datatype}" : datatype
|
||||
when :inet
|
||||
"INET_TEXT"
|
||||
when :json
|
||||
|
|
|
@ -13,7 +13,7 @@ module Migrations::Database::Schema::Validation
|
|||
schema = load_json_schema
|
||||
schemer = ::JSONSchemer.schema(schema)
|
||||
response = schemer.validate(@config)
|
||||
response.each { |r| @errors << transform_json_schema_errors(r.fetch("error")) }
|
||||
response.each { |r| @errors << transform_json_schema_errors(r) }
|
||||
end
|
||||
|
||||
private
|
||||
|
@ -23,9 +23,19 @@ module Migrations::Database::Schema::Validation
|
|||
JSON.load_file(schema_path)
|
||||
end
|
||||
|
||||
def transform_json_schema_errors(error_message)
|
||||
def transform_json_schema_errors(response)
|
||||
error_message = response.fetch("error")
|
||||
|
||||
error_message.gsub!(/value at (`.+?`) matches `not` schema/) do
|
||||
I18n.t("schema.validator.include_exclude_not_allowed", path: $1)
|
||||
path = $1
|
||||
schema_pointer = response.fetch("schema_pointer")
|
||||
|
||||
case schema_pointer
|
||||
in %r{/tables/}
|
||||
I18n.t("schema.validator.include_exclude_not_allowed", path:)
|
||||
else
|
||||
$1
|
||||
end
|
||||
end
|
||||
error_message
|
||||
end
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue