1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
|
require 'stringio'
require 'active_support/core_ext/big_decimal'
module ActiveRecord
# This class is used to dump the database schema for some connection to some
# output format (i.e., ActiveRecord::Schema).
class SchemaDumper #:nodoc:
private_class_method :new
##
# :singleton-method:
# A list of tables which should not be dumped to the schema.
# Acceptable values are strings as well as regexp.
# This setting is only used if ActiveRecord::Base.schema_format == :ruby
cattr_accessor :ignore_tables
@@ignore_tables = []
def self.dump(connection=ActiveRecord::Base.connection, stream=STDOUT)
new(connection).dump(stream)
stream
end
def dump(stream)
header(stream)
tables(stream)
trailer(stream)
stream
end
private
def initialize(connection)
@connection = connection
@types = @connection.native_database_types
@version = Migrator::current_version rescue nil
end
def header(stream)
define_params = @version ? ":version => #{@version}" : ""
stream.puts <<HEADER
# This file is auto-generated from the current state of the database. Instead of editing this file,
# please use the migrations feature of Active Record to incrementally modify your database, and
# then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your database schema. If you need
# to create the application database on another system, you should be using db:schema:load, not running
# all the migrations from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended to check this file into your version control system.
ActiveRecord::Schema.define(#{define_params}) do
HEADER
end
def trailer(stream)
stream.puts "end"
end
def tables(stream)
@connection.tables.sort.each do |tbl|
next if ['schema_migrations', ignore_tables].flatten.any? do |ignored|
case ignored
when String; tbl == ignored
when Regexp; tbl =~ ignored
else
raise StandardError, 'ActiveRecord::SchemaDumper.ignore_tables accepts an array of String and / or Regexp values.'
end
end
table(tbl, stream)
end
end
def table(table, stream)
columns = @connection.columns(table)
begin
tbl = StringIO.new
# first dump primary key column
if @connection.respond_to?(:pk_and_sequence_for)
pk, pk_seq = @connection.pk_and_sequence_for(table)
elsif @connection.respond_to?(:primary_key)
pk = @connection.primary_key(table)
end
pk ||= 'id'
tbl.print " create_table #{table.inspect}"
if columns.detect { |c| c.name == pk }
if pk != 'id'
tbl.print %Q(, :primary_key => "#{pk}")
end
else
tbl.print ", :id => false"
end
tbl.print ", :force => true"
tbl.puts " do |t|"
# then dump all non-primary key columns
column_specs = columns.map do |column|
raise StandardError, "Unknown type '#{column.sql_type}' for column '#{column.name}'" if @types[column.type].nil?
next if column.name == pk
spec = {}
spec[:name] = column.name.inspect
# AR has an optimisation which handles zero-scale decimals as integers. This
# code ensures that the dumper still dumps the column as a decimal.
spec[:type] = if column.type == :integer && [/^numeric/, /^decimal/].any? { |e| e.match(column.sql_type) }
'decimal'
else
column.type.to_s
end
spec[:limit] = column.limit.inspect if column.limit != @types[column.type][:limit] && spec[:type] != 'decimal'
spec[:precision] = column.precision.inspect if !column.precision.nil?
spec[:scale] = column.scale.inspect if !column.scale.nil?
spec[:null] = 'false' if !column.null
spec[:default] = default_string(column.default) if column.has_default?
(spec.keys - [:name, :type]).each{ |k| spec[k].insert(0, "#{k.inspect} => ")}
spec
end.compact
# find all migration keys used in this table
keys = [:name, :limit, :precision, :scale, :default, :null] & column_specs.map(&:keys).flatten
# figure out the lengths for each column based on above keys
lengths = keys.map{ |key| column_specs.map{ |spec| spec[key] ? spec[key].length + 2 : 0 }.max }
# the string we're going to sprintf our values against, with standardized column widths
format_string = lengths.map{ |len| "%-#{len}s" }
# find the max length for the 'type' column, which is special
type_length = column_specs.map{ |column| column[:type].length }.max
# add column type definition to our format string
format_string.unshift " t.%-#{type_length}s "
format_string *= ''
column_specs.each do |colspec|
values = keys.zip(lengths).map{ |key, len| colspec.key?(key) ? colspec[key] + ", " : " " * len }
values.unshift colspec[:type]
tbl.print((format_string % values).gsub(/,\s*$/, ''))
tbl.puts
end
tbl.puts " end"
tbl.puts
indexes(table, tbl)
tbl.rewind
stream.print tbl.read
rescue => e
stream.puts "# Could not dump table #{table.inspect} because of following #{e.class}"
stream.puts "# #{e.message}"
stream.puts
end
stream
end
def default_string(value)
case value
when BigDecimal
value.to_s
when Date, DateTime, Time
"'" + value.to_s(:db) + "'"
else
value.inspect
end
end
def indexes(table, stream)
if (indexes = @connection.indexes(table)).any?
add_index_statements = indexes.map do |index|
statment_parts = [ ('add_index ' + index.table.inspect) ]
statment_parts << index.columns.inspect
statment_parts << (':name => ' + index.name.inspect)
statment_parts << ':unique => true' if index.unique
' ' + statment_parts.join(', ')
end
stream.puts add_index_statements.sort.join("\n")
stream.puts
end
end
end
end
|