aboutsummaryrefslogtreecommitdiffstats
path: root/activerecord/lib/active_record/connection_adapters/abstract
diff options
context:
space:
mode:
Diffstat (limited to 'activerecord/lib/active_record/connection_adapters/abstract')
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb375
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb2
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb18
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb28
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/quoting.rb84
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb8
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb91
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb68
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb131
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/transaction.rb6
10 files changed, 404 insertions, 407 deletions
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb b/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
index dc5b305843..d0c5bbe17d 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
@@ -1,6 +1,6 @@
-require 'thread'
-require 'concurrent/map'
-require 'monitor'
+require "thread"
+require "concurrent/map"
+require "monitor"
module ActiveRecord
# Raised when a connection could not be obtained within the connection
@@ -150,61 +150,61 @@ module ActiveRecord
private
- def internal_poll(timeout)
- no_wait_poll || (timeout && wait_poll(timeout))
- end
+ def internal_poll(timeout)
+ no_wait_poll || (timeout && wait_poll(timeout))
+ end
- def synchronize(&block)
- @lock.synchronize(&block)
- end
+ def synchronize(&block)
+ @lock.synchronize(&block)
+ end
# Test if the queue currently contains any elements.
- def any?
- !@queue.empty?
- end
+ def any?
+ !@queue.empty?
+ end
# A thread can remove an element from the queue without
# waiting if and only if the number of currently available
# connections is strictly greater than the number of waiting
# threads.
- def can_remove_no_wait?
- @queue.size > @num_waiting
- end
+ def can_remove_no_wait?
+ @queue.size > @num_waiting
+ end
# Removes and returns the head of the queue if possible, or nil.
- def remove
- @queue.shift
- end
+ def remove
+ @queue.shift
+ end
# Remove and return the head the queue if the number of
# available elements is strictly greater than the number of
# threads currently waiting. Otherwise, return nil.
- def no_wait_poll
- remove if can_remove_no_wait?
- end
+ def no_wait_poll
+ remove if can_remove_no_wait?
+ end
# Waits on the queue up to +timeout+ seconds, then removes and
# returns the head of the queue.
- def wait_poll(timeout)
- @num_waiting += 1
+ def wait_poll(timeout)
+ @num_waiting += 1
- t0 = Time.now
- elapsed = 0
- loop do
- @cond.wait(timeout - elapsed)
+ t0 = Time.now
+ elapsed = 0
+ loop do
+ @cond.wait(timeout - elapsed)
- return remove if any?
+ return remove if any?
- elapsed = Time.now - t0
- if elapsed >= timeout
- msg = 'could not obtain a connection from the pool within %0.3f seconds (waited %0.3f seconds); all pooled connections were in use' %
- [timeout, elapsed]
- raise ConnectionTimeoutError, msg
+ elapsed = Time.now - t0
+ if elapsed >= timeout
+ msg = "could not obtain a connection from the pool within %0.3f seconds (waited %0.3f seconds); all pooled connections were in use" %
+ [timeout, elapsed]
+ raise ConnectionTimeoutError, msg
+ end
end
+ ensure
+ @num_waiting -= 1
end
- ensure
- @num_waiting -= 1
- end
end
# Adds the ability to turn a basic fair FIFO queue into one
@@ -274,11 +274,11 @@ module ActiveRecord
include BiasableQueue
private
- def internal_poll(timeout)
- conn = super
- conn.lease if conn
- conn
- end
+ def internal_poll(timeout)
+ conn = super
+ conn.lease if conn
+ conn
+ end
end
# Every +frequency+ seconds, the reaper will call +reap+ on +pool+.
@@ -339,7 +339,7 @@ module ActiveRecord
# that case +conn.owner+ attr should be consulted.
# Access and modification of +@thread_cached_conns+ does not require
# synchronization.
- @thread_cached_conns = Concurrent::Map.new(:initial_capacity => @size)
+ @thread_cached_conns = Concurrent::Map.new(initial_capacity: @size)
@connections = []
@automatic_reconnect = true
@@ -584,111 +584,111 @@ module ActiveRecord
private
#--
# this is unfortunately not concurrent
- def bulk_make_new_connections(num_new_conns_needed)
- num_new_conns_needed.times do
- # try_to_checkout_new_connection will not exceed pool's @size limit
- if new_conn = try_to_checkout_new_connection
- # make the new_conn available to the starving threads stuck @available Queue
- checkin(new_conn)
+ def bulk_make_new_connections(num_new_conns_needed)
+ num_new_conns_needed.times do
+ # try_to_checkout_new_connection will not exceed pool's @size limit
+ if new_conn = try_to_checkout_new_connection
+ # make the new_conn available to the starving threads stuck @available Queue
+ checkin(new_conn)
+ end
end
end
- end
#--
# From the discussion on GitHub:
# https://github.com/rails/rails/pull/14938#commitcomment-6601951
# This hook-in method allows for easier monkey-patching fixes needed by
# JRuby users that use Fibers.
- def connection_cache_key(thread)
- thread
- end
+ def connection_cache_key(thread)
+ thread
+ end
# Take control of all existing connections so a "group" action such as
# reload/disconnect can be performed safely. It is no longer enough to
# wrap it in +synchronize+ because some pool's actions are allowed
# to be performed outside of the main +synchronize+ block.
- def with_exclusively_acquired_all_connections(raise_on_acquisition_timeout = true)
- with_new_connections_blocked do
- attempt_to_checkout_all_existing_connections(raise_on_acquisition_timeout)
- yield
+ def with_exclusively_acquired_all_connections(raise_on_acquisition_timeout = true)
+ with_new_connections_blocked do
+ attempt_to_checkout_all_existing_connections(raise_on_acquisition_timeout)
+ yield
+ end
end
- end
- def attempt_to_checkout_all_existing_connections(raise_on_acquisition_timeout = true)
- collected_conns = synchronize do
- # account for our own connections
- @connections.select {|conn| conn.owner == Thread.current}
- end
+ def attempt_to_checkout_all_existing_connections(raise_on_acquisition_timeout = true)
+ collected_conns = synchronize do
+ # account for our own connections
+ @connections.select { |conn| conn.owner == Thread.current }
+ end
- newly_checked_out = []
- timeout_time = Time.now + (@checkout_timeout * 2)
+ newly_checked_out = []
+ timeout_time = Time.now + (@checkout_timeout * 2)
- @available.with_a_bias_for(Thread.current) do
- loop do
- synchronize do
- return if collected_conns.size == @connections.size && @now_connecting == 0
- remaining_timeout = timeout_time - Time.now
- remaining_timeout = 0 if remaining_timeout < 0
- conn = checkout_for_exclusive_access(remaining_timeout)
- collected_conns << conn
- newly_checked_out << conn
+ @available.with_a_bias_for(Thread.current) do
+ loop do
+ synchronize do
+ return if collected_conns.size == @connections.size && @now_connecting == 0
+ remaining_timeout = timeout_time - Time.now
+ remaining_timeout = 0 if remaining_timeout < 0
+ conn = checkout_for_exclusive_access(remaining_timeout)
+ collected_conns << conn
+ newly_checked_out << conn
+ end
end
end
- end
- rescue ExclusiveConnectionTimeoutError
- # <tt>raise_on_acquisition_timeout == false</tt> means we are directed to ignore any
- # timeouts and are expected to just give up: we've obtained as many connections
- # as possible, note that in a case like that we don't return any of the
- # +newly_checked_out+ connections.
-
- if raise_on_acquisition_timeout
+ rescue ExclusiveConnectionTimeoutError
+ # <tt>raise_on_acquisition_timeout == false</tt> means we are directed to ignore any
+ # timeouts and are expected to just give up: we've obtained as many connections
+ # as possible, note that in a case like that we don't return any of the
+ # +newly_checked_out+ connections.
+
+ if raise_on_acquisition_timeout
+ release_newly_checked_out = true
+ raise
+ end
+ rescue Exception # if something else went wrong
+ # this can't be a "naked" rescue, because we have should return conns
+ # even for non-StandardErrors
release_newly_checked_out = true
raise
+ ensure
+ if release_newly_checked_out && newly_checked_out
+ # releasing only those conns that were checked out in this method, conns
+ # checked outside this method (before it was called) are not for us to release
+ newly_checked_out.each { |conn| checkin(conn) }
+ end
end
- rescue Exception # if something else went wrong
- # this can't be a "naked" rescue, because we have should return conns
- # even for non-StandardErrors
- release_newly_checked_out = true
- raise
- ensure
- if release_newly_checked_out && newly_checked_out
- # releasing only those conns that were checked out in this method, conns
- # checked outside this method (before it was called) are not for us to release
- newly_checked_out.each {|conn| checkin(conn)}
- end
- end
#--
# Must be called in a synchronize block.
- def checkout_for_exclusive_access(checkout_timeout)
- checkout(checkout_timeout)
- rescue ConnectionTimeoutError
- # this block can't be easily moved into attempt_to_checkout_all_existing_connections's
- # rescue block, because doing so would put it outside of synchronize section, without
- # being in a critical section thread_report might become inaccurate
- msg = "could not obtain ownership of all database connections in #{checkout_timeout} seconds"
-
- thread_report = []
- @connections.each do |conn|
- unless conn.owner == Thread.current
- thread_report << "#{conn} is owned by #{conn.owner}"
+ def checkout_for_exclusive_access(checkout_timeout)
+ checkout(checkout_timeout)
+ rescue ConnectionTimeoutError
+ # this block can't be easily moved into attempt_to_checkout_all_existing_connections's
+ # rescue block, because doing so would put it outside of synchronize section, without
+ # being in a critical section thread_report might become inaccurate
+ msg = "could not obtain ownership of all database connections in #{checkout_timeout} seconds"
+
+ thread_report = []
+ @connections.each do |conn|
+ unless conn.owner == Thread.current
+ thread_report << "#{conn} is owned by #{conn.owner}"
+ end
end
- end
- msg << " (#{thread_report.join(', ')})" if thread_report.any?
+ msg << " (#{thread_report.join(', ')})" if thread_report.any?
- raise ExclusiveConnectionTimeoutError, msg
- end
+ raise ExclusiveConnectionTimeoutError, msg
+ end
- def with_new_connections_blocked
- previous_value = nil
- synchronize do
- previous_value, @new_cons_enabled = @new_cons_enabled, false
+ def with_new_connections_blocked
+ previous_value = nil
+ synchronize do
+ previous_value, @new_cons_enabled = @new_cons_enabled, false
+ end
+ yield
+ ensure
+ synchronize { @new_cons_enabled = previous_value }
end
- yield
- ensure
- synchronize { @new_cons_enabled = previous_value }
- end
# Acquire a connection by one of 1) immediately removing one
# from the queue of available connections, 2) creating a new
@@ -701,86 +701,86 @@ module ActiveRecord
#--
# Implementation detail: the connection returned by +acquire_connection+
# will already be "+connection.lease+ -ed" to the current thread.
- def acquire_connection(checkout_timeout)
- # NOTE: we rely on +@available.poll+ and +try_to_checkout_new_connection+ to
- # +conn.lease+ the returned connection (and to do this in a +synchronized+
- # section). This is not the cleanest implementation, as ideally we would
- # <tt>synchronize { conn.lease }</tt> in this method, but by leaving it to +@available.poll+
- # and +try_to_checkout_new_connection+ we can piggyback on +synchronize+ sections
- # of the said methods and avoid an additional +synchronize+ overhead.
- if conn = @available.poll || try_to_checkout_new_connection
- conn
- else
- reap
- @available.poll(checkout_timeout)
+ def acquire_connection(checkout_timeout)
+ # NOTE: we rely on +@available.poll+ and +try_to_checkout_new_connection+ to
+ # +conn.lease+ the returned connection (and to do this in a +synchronized+
+ # section). This is not the cleanest implementation, as ideally we would
+ # <tt>synchronize { conn.lease }</tt> in this method, but by leaving it to +@available.poll+
+ # and +try_to_checkout_new_connection+ we can piggyback on +synchronize+ sections
+ # of the said methods and avoid an additional +synchronize+ overhead.
+ if conn = @available.poll || try_to_checkout_new_connection
+ conn
+ else
+ reap
+ @available.poll(checkout_timeout)
+ end
end
- end
#--
# if owner_thread param is omitted, this must be called in synchronize block
- def remove_connection_from_thread_cache(conn, owner_thread = conn.owner)
- @thread_cached_conns.delete_pair(connection_cache_key(owner_thread), conn)
- end
- alias_method :release, :remove_connection_from_thread_cache
+ def remove_connection_from_thread_cache(conn, owner_thread = conn.owner)
+ @thread_cached_conns.delete_pair(connection_cache_key(owner_thread), conn)
+ end
+ alias_method :release, :remove_connection_from_thread_cache
- def new_connection
- Base.send(spec.adapter_method, spec.config).tap do |conn|
- conn.schema_cache = schema_cache.dup if schema_cache
+ def new_connection
+ Base.send(spec.adapter_method, spec.config).tap do |conn|
+ conn.schema_cache = schema_cache.dup if schema_cache
+ end
end
- end
# If the pool is not at a +@size+ limit, establish new connection. Connecting
# to the DB is done outside main synchronized section.
#--
# Implementation constraint: a newly established connection returned by this
# method must be in the +.leased+ state.
- def try_to_checkout_new_connection
- # first in synchronized section check if establishing new conns is allowed
- # and increment @now_connecting, to prevent overstepping this pool's @size
- # constraint
- do_checkout = synchronize do
- if @new_cons_enabled && (@connections.size + @now_connecting) < @size
- @now_connecting += 1
- end
- end
- if do_checkout
- begin
- # if successfully incremented @now_connecting establish new connection
- # outside of synchronized section
- conn = checkout_new_connection
- ensure
- synchronize do
- if conn
- adopt_connection(conn)
- # returned conn needs to be already leased
- conn.lease
+ def try_to_checkout_new_connection
+ # first in synchronized section check if establishing new conns is allowed
+ # and increment @now_connecting, to prevent overstepping this pool's @size
+ # constraint
+ do_checkout = synchronize do
+ if @new_cons_enabled && (@connections.size + @now_connecting) < @size
+ @now_connecting += 1
+ end
+ end
+ if do_checkout
+ begin
+ # if successfully incremented @now_connecting establish new connection
+ # outside of synchronized section
+ conn = checkout_new_connection
+ ensure
+ synchronize do
+ if conn
+ adopt_connection(conn)
+ # returned conn needs to be already leased
+ conn.lease
+ end
+ @now_connecting -= 1
end
- @now_connecting -= 1
end
end
end
- end
- def adopt_connection(conn)
- conn.pool = self
- @connections << conn
- end
+ def adopt_connection(conn)
+ conn.pool = self
+ @connections << conn
+ end
- def checkout_new_connection
- raise ConnectionNotEstablished unless @automatic_reconnect
- new_connection
- end
+ def checkout_new_connection
+ raise ConnectionNotEstablished unless @automatic_reconnect
+ new_connection
+ end
- def checkout_and_verify(c)
- c._run_checkout_callbacks do
- c.verify!
+ def checkout_and_verify(c)
+ c._run_checkout_callbacks do
+ c.verify!
+ end
+ c
+ rescue
+ remove c
+ c.disconnect!
+ raise
end
- c
- rescue
- remove c
- c.disconnect!
- raise
- end
end
# ConnectionHandler is a collection of ConnectionPool objects. It is used
@@ -833,8 +833,8 @@ module ActiveRecord
class ConnectionHandler
def initialize
# These caches are keyed by spec.name (ConnectionSpecification#name).
- @owner_to_pool = Concurrent::Map.new(:initial_capacity => 2) do |h,k|
- h[k] = Concurrent::Map.new(:initial_capacity => 2)
+ @owner_to_pool = Concurrent::Map.new(initial_capacity: 2) do |h,k|
+ h[k] = Concurrent::Map.new(initial_capacity: 2)
end
end
@@ -848,7 +848,6 @@ module ActiveRecord
spec = resolver.spec(config)
remove_connection(spec.name)
- owner_to_pool[spec.name] = ConnectionAdapters::ConnectionPool.new(spec)
message_bus = ActiveSupport::Notifications.instrumenter
payload = {
@@ -859,7 +858,7 @@ module ActiveRecord
payload[:config] = spec.config
end
- message_bus.instrument('!connection.active_record', payload) do
+ message_bus.instrument("!connection.active_record", payload) do
owner_to_pool[spec.name] = ConnectionAdapters::ConnectionPool.new(spec)
end
@@ -943,14 +942,14 @@ module ActiveRecord
private
- def owner_to_pool
- @owner_to_pool[Process.pid]
- end
+ def owner_to_pool
+ @owner_to_pool[Process.pid]
+ end
- def pool_from_any_process_for(spec_name)
- owner_to_pool = @owner_to_pool.values.find { |v| v[spec_name] }
- owner_to_pool && owner_to_pool[spec_name]
- end
+ def pool_from_any_process_for(spec_name)
+ owner_to_pool = @owner_to_pool.values.find { |v| v[spec_name] }
+ owner_to_pool && owner_to_pool[spec_name]
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb b/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb
index 6711049588..95c72f1e20 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb
@@ -1,7 +1,6 @@
module ActiveRecord
module ConnectionAdapters # :nodoc:
module DatabaseLimits
-
# Returns the maximum length of a table alias.
def table_alias_length
255
@@ -61,7 +60,6 @@ module ActiveRecord
def joins_per_query
256
end
-
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb b/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
index e667e16964..126047584e 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
@@ -82,21 +82,22 @@ module ActiveRecord
# Executes +sql+ statement in the context of this connection using
# +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
- def exec_query(sql, name = 'SQL', binds = [], prepare: false)
+ def exec_query(sql, name = "SQL", binds = [], prepare: false)
raise NotImplementedError
end
# Executes insert +sql+ statement in the context of this connection using
# +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
- def exec_insert(sql, name, binds, pk = nil, sequence_name = nil)
+ def exec_insert(sql, name = nil, binds = [], pk = nil, sequence_name = nil)
+ sql, binds = sql_for_insert(sql, pk, nil, sequence_name, binds)
exec_query(sql, name, binds)
end
# Executes delete +sql+ statement in the context of this connection using
# +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
- def exec_delete(sql, name, binds)
+ def exec_delete(sql, name = nil, binds = [])
exec_query(sql, name, binds)
end
@@ -108,7 +109,7 @@ module ActiveRecord
# Executes update +sql+ statement in the context of this connection using
# +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
- def exec_update(sql, name, binds)
+ def exec_update(sql, name = nil, binds = [])
exec_query(sql, name, binds)
end
@@ -121,8 +122,7 @@ module ActiveRecord
# If the next id was calculated in advance (as in Oracle), it should be
# passed in as +id_value+.
def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])
- sql, binds, pk, sequence_name = sql_for_insert(to_sql(arel, binds), pk, id_value, sequence_name, binds)
- value = exec_insert(sql, name, binds, pk, sequence_name)
+ value = exec_insert(to_sql(arel, binds), name, binds, pk, sequence_name)
id_value || last_inserted_id(value)
end
alias create insert
@@ -324,7 +324,7 @@ module ActiveRecord
end
end
- execute "INSERT INTO #{quote_table_name(table_name)} (#{key_list.join(', ')}) VALUES (#{value_list.join(', ')})", 'Fixture Insert'
+ execute "INSERT INTO #{quote_table_name(table_name)} (#{key_list.join(', ')}) VALUES (#{value_list.join(', ')})", "Fixture Insert"
end
def empty_insert_statement_value
@@ -343,8 +343,8 @@ module ActiveRecord
def sanitize_limit(limit)
if limit.is_a?(Integer) || limit.is_a?(Arel::Nodes::SqlLiteral)
limit
- elsif limit.to_s.include?(',')
- Arel.sql limit.to_s.split(',').map{ |i| Integer(i) }.join(',')
+ elsif limit.to_s.include?(",")
+ Arel.sql limit.to_s.split(",").map { |i| Integer(i) }.join(",")
else
Integer(limit)
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb b/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb
index 0bdfd4f900..10c60080d5 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb
@@ -73,23 +73,23 @@ module ActiveRecord
private
- def cache_sql(sql, binds)
- result =
- if @query_cache[sql].key?(binds)
- ActiveSupport::Notifications.instrument("sql.active_record",
- :sql => sql, :binds => binds, :name => "CACHE", :connection_id => object_id)
- @query_cache[sql][binds]
- else
- @query_cache[sql][binds] = yield
- end
- result.dup
- end
+ def cache_sql(sql, binds)
+ result =
+ if @query_cache[sql].key?(binds)
+ ActiveSupport::Notifications.instrument("sql.active_record",
+ sql: sql, binds: binds, name: "CACHE", connection_id: object_id)
+ @query_cache[sql][binds]
+ else
+ @query_cache[sql][binds] = yield
+ end
+ result.dup
+ end
# If arel is locked this is a SELECT ... FOR UPDATE or somesuch. Such
# queries should not be cached.
- def locked?(arel)
- arel.respond_to?(:locked) && arel.locked
- end
+ def locked?(arel)
+ arel.respond_to?(:locked) && arel.locked
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb b/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb
index eda6af08e3..bbd52b8a91 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb
@@ -1,4 +1,4 @@
-require 'active_support/core_ext/big_decimal/conversions'
+require "active_support/core_ext/big_decimal/conversions"
module ActiveRecord
module ConnectionAdapters # :nodoc:
@@ -116,7 +116,7 @@ module ActiveRecord
end
def unquoted_true
- 't'.freeze
+ "t".freeze
end
def quoted_false
@@ -124,7 +124,7 @@ module ActiveRecord
end
def unquoted_false
- 'f'.freeze
+ "f".freeze
end
# Quote date/time values for use in SQL input. Includes microseconds
@@ -147,52 +147,52 @@ module ActiveRecord
end
def quoted_time(value) # :nodoc:
- quoted_date(value).sub(/\A2000-01-01 /, '')
+ quoted_date(value).sub(/\A2000-01-01 /, "")
end
private
- def type_casted_binds(binds)
- binds.map { |attr| type_cast(attr.value_for_database) }
- end
-
- def types_which_need_no_typecasting
- [nil, Numeric, String]
- end
-
- def _quote(value)
- case value
- when String, ActiveSupport::Multibyte::Chars, Type::Binary::Data
- "'#{quote_string(value.to_s)}'"
- when true then quoted_true
- when false then quoted_false
- when nil then "NULL"
- # BigDecimals need to be put in a non-normalized form and quoted.
- when BigDecimal then value.to_s('F')
- when Numeric, ActiveSupport::Duration then value.to_s
- when Type::Time::Value then "'#{quoted_time(value)}'"
- when Date, Time then "'#{quoted_date(value)}'"
- when Symbol then "'#{quote_string(value.to_s)}'"
- when Class then "'#{value}'"
- else raise TypeError, "can't quote #{value.class.name}"
+ def type_casted_binds(binds)
+ binds.map { |attr| type_cast(attr.value_for_database) }
end
- end
- def _type_cast(value)
- case value
- when Symbol, ActiveSupport::Multibyte::Chars, Type::Binary::Data
- value.to_s
- when true then unquoted_true
- when false then unquoted_false
- # BigDecimals need to be put in a non-normalized form and quoted.
- when BigDecimal then value.to_s('F')
- when Type::Time::Value then quoted_time(value)
- when Date, Time then quoted_date(value)
- when *types_which_need_no_typecasting
- value
- else raise TypeError
+ def types_which_need_no_typecasting
+ [nil, Numeric, String]
+ end
+
+ def _quote(value)
+ case value
+ when String, ActiveSupport::Multibyte::Chars, Type::Binary::Data
+ "'#{quote_string(value.to_s)}'"
+ when true then quoted_true
+ when false then quoted_false
+ when nil then "NULL"
+ # BigDecimals need to be put in a non-normalized form and quoted.
+ when BigDecimal then value.to_s("F")
+ when Numeric, ActiveSupport::Duration then value.to_s
+ when Type::Time::Value then "'#{quoted_time(value)}'"
+ when Date, Time then "'#{quoted_date(value)}'"
+ when Symbol then "'#{quote_string(value.to_s)}'"
+ when Class then "'#{value}'"
+ else raise TypeError, "can't quote #{value.class.name}"
+ end
+ end
+
+ def _type_cast(value)
+ case value
+ when Symbol, ActiveSupport::Multibyte::Chars, Type::Binary::Data
+ value.to_s
+ when true then unquoted_true
+ when false then unquoted_false
+ # BigDecimals need to be put in a non-normalized form and quoted.
+ when BigDecimal then value.to_s("F")
+ when Type::Time::Value then quoted_time(value)
+ when Date, Time then quoted_date(value)
+ when *types_which_need_no_typecasting
+ value
+ else raise TypeError
+ end
end
- end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb
index 6add697eeb..322684672f 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb
@@ -1,4 +1,4 @@
-require 'active_support/core_ext/string/strip'
+require "active_support/core_ext/string/strip"
module ActiveRecord
module ConnectionAdapters
@@ -23,9 +23,9 @@ module ActiveRecord
def visit_AlterTable(o)
sql = "ALTER TABLE #{quote_table_name(o.name)} "
- sql << o.adds.map { |col| accept col }.join(' ')
- sql << o.foreign_key_adds.map { |fk| visit_AddForeignKey fk }.join(' ')
- sql << o.foreign_key_drops.map { |fk| visit_DropForeignKey fk }.join(' ')
+ sql << o.adds.map { |col| accept col }.join(" ")
+ sql << o.foreign_key_adds.map { |fk| visit_AddForeignKey fk }.join(" ")
+ sql << o.foreign_key_drops.map { |fk| visit_DropForeignKey fk }.join(" ")
end
def visit_ColumnDefinition(o)
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
index 8dbafc5a4b..d9a799676f 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
@@ -11,7 +11,6 @@ module ActiveRecord
# +columns+ attribute of said TableDefinition object, in order to be used
# for generating a number of table creation or table changing SQL statements.
class ColumnDefinition < Struct.new(:name, :type, :limit, :precision, :scale, :default, :null, :first, :after, :auto_increment, :primary_key, :collation, :sql_type, :comment) #:nodoc:
-
def primary_key?
primary_key || type.to_sym == :primary_key
end
@@ -61,9 +60,9 @@ module ActiveRecord
end
private
- def default_primary_key
- "id"
- end
+ def default_primary_key
+ "id"
+ end
end
class ReferenceDefinition # :nodoc:
@@ -103,51 +102,51 @@ module ActiveRecord
protected
- attr_reader :name, :polymorphic, :index, :foreign_key, :type, :options
+ attr_reader :name, :polymorphic, :index, :foreign_key, :type, :options
private
- def as_options(value, default = {})
- if value.is_a?(Hash)
- value
- else
- default
+ def as_options(value, default = {})
+ if value.is_a?(Hash)
+ value
+ else
+ default
+ end
end
- end
- def polymorphic_options
- as_options(polymorphic, options)
- end
+ def polymorphic_options
+ as_options(polymorphic, options)
+ end
- def index_options
- as_options(index)
- end
+ def index_options
+ as_options(index)
+ end
- def foreign_key_options
- as_options(foreign_key).merge(column: column_name)
- end
+ def foreign_key_options
+ as_options(foreign_key).merge(column: column_name)
+ end
- def columns
- result = [[column_name, type, options]]
- if polymorphic
- result.unshift(["#{name}_type", :string, polymorphic_options])
+ def columns
+ result = [[column_name, type, options]]
+ if polymorphic
+ result.unshift(["#{name}_type", :string, polymorphic_options])
+ end
+ result
end
- result
- end
- def column_name
- "#{name}_id"
- end
+ def column_name
+ "#{name}_id"
+ end
- def column_names
- columns.map(&:first)
- end
+ def column_names
+ columns.map(&:first)
+ end
- def foreign_table_name
- foreign_key_options.fetch(:to_table) do
- Base.pluralize_table_names ? name.to_s.pluralize : name
+ def foreign_table_name
+ foreign_key_options.fetch(:to_table) do
+ Base.pluralize_table_names ? name.to_s.pluralize : name
+ end
end
- end
end
module ColumnMethods
@@ -212,7 +211,7 @@ module ActiveRecord
def initialize(name, temporary = false, options = nil, as = nil, comment: nil)
@columns_hash = {}
- @indexes = {}
+ @indexes = []
@foreign_keys = []
@primary_keys = nil
@temporary = temporary
@@ -328,7 +327,7 @@ module ActiveRecord
#
# index(:account_id, name: 'index_projects_on_account_id')
def index(column_name, options = {})
- indexes[column_name] = options
+ indexes << [column_name, options]
end
def foreign_key(table_name, options = {}) # :nodoc:
@@ -342,9 +341,7 @@ module ActiveRecord
# <tt>:updated_at</tt> to the table. See {connection.add_timestamps}[rdoc-ref:SchemaStatements#add_timestamps]
#
# t.timestamps null: false
- def timestamps(*args)
- options = args.extract_options!
-
+ def timestamps(**options)
options[:null] = false if options[:null].nil?
column(:created_at, :datetime, options)
@@ -383,13 +380,13 @@ module ActiveRecord
end
private
- def create_column_definition(name, type)
- ColumnDefinition.new name, type
- end
+ def create_column_definition(name, type)
+ ColumnDefinition.new name, type
+ end
- def aliased_types(name, fallback)
- 'timestamp' == name ? :datetime : fallback
- end
+ def aliased_types(name, fallback)
+ "timestamp" == name ? :datetime : fallback
+ end
end
class AlterTable # :nodoc:
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb
index 677a4c6bd0..8bb7362c2e 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb
@@ -40,7 +40,7 @@ module ActiveRecord
default = schema_default(column) if column.has_default?
spec[:default] = default unless default.nil?
- spec[:null] = 'false' unless column.null
+ spec[:null] = "false" unless column.null
if collation = schema_collation(column)
spec[:collation] = collation
@@ -58,48 +58,48 @@ module ActiveRecord
private
- def default_primary_key?(column)
- schema_type(column) == :integer
- end
+ def default_primary_key?(column)
+ schema_type(column) == :integer
+ end
- def schema_type(column)
- if column.bigint?
- :bigint
- else
- column.type
+ def schema_type(column)
+ if column.bigint?
+ :bigint
+ else
+ column.type
+ end
end
- end
- def schema_limit(column)
- limit = column.limit unless column.bigint?
- limit.inspect if limit && limit != native_database_types[column.type][:limit]
- end
+ def schema_limit(column)
+ limit = column.limit unless column.bigint?
+ limit.inspect if limit && limit != native_database_types[column.type][:limit]
+ end
- def schema_precision(column)
- column.precision.inspect if column.precision
- end
+ def schema_precision(column)
+ column.precision.inspect if column.precision
+ end
- def schema_scale(column)
- column.scale.inspect if column.scale
- end
+ def schema_scale(column)
+ column.scale.inspect if column.scale
+ end
- def schema_default(column)
- type = lookup_cast_type_from_column(column)
- default = type.deserialize(column.default)
- if default.nil?
- schema_expression(column)
- else
- type.type_cast_for_schema(default)
+ def schema_default(column)
+ type = lookup_cast_type_from_column(column)
+ default = type.deserialize(column.default)
+ if default.nil?
+ schema_expression(column)
+ else
+ type.type_cast_for_schema(default)
+ end
end
- end
- def schema_expression(column)
- "-> { #{column.default_function.inspect} }" if column.default_function
- end
+ def schema_expression(column)
+ "-> { #{column.default_function.inspect} }" if column.default_function
+ end
- def schema_collation(column)
- column.collation.inspect if column.collation
- end
+ def schema_collation(column)
+ column.collation.inspect if column.collation
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
index 353cae0f3d..afa0860707 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
@@ -1,6 +1,6 @@
-require 'active_record/migration/join_table'
-require 'active_support/core_ext/string/access'
-require 'digest'
+require "active_record/migration/join_table"
+require "active_support/core_ext/string/access"
+require "digest"
module ActiveRecord
module ConnectionAdapters # :nodoc:
@@ -25,7 +25,7 @@ module ActiveRecord
# Truncates a table alias according to the limits of the current adapter.
def table_alias_for(table_name)
- table_name[0...table_alias_length].tr('.', '_')
+ table_name[0...table_alias_length].tr(".", "_")
end
# Returns the relation names useable to back Active Record models.
@@ -278,7 +278,7 @@ module ActiveRecord
result = execute schema_creation.accept td
unless supports_indexes_in_create?
- td.indexes.each_pair do |column_name, index_options|
+ td.indexes.each do |column_name, index_options|
add_index(table_name, column_name, index_options)
end
end
@@ -339,7 +339,7 @@ module ActiveRecord
column_options.reverse_merge!(null: false)
type = column_options.delete(:type) || :integer
- t1_column, t2_column = [table_1, table_2].map{ |t| t.to_s.singularize.foreign_key }
+ t1_column, t2_column = [table_1, table_2].map { |t| t.to_s.singularize.foreign_key }
create_table(join_table_name, options.merge!(id: false)) do |td|
td.send type, t1_column, column_options
@@ -533,6 +533,10 @@ module ActiveRecord
# add_column(:measurements, :huge_integer, :decimal, precision: 30)
# # ALTER TABLE "measurements" ADD "huge_integer" decimal(30)
#
+ # # Defines a column that stores an array of a type.
+ # add_column(:users, :skills, :text, array: true)
+ # # ALTER TABLE "users" ADD "skills" text[]
+ #
# # Defines a column with a database-specific type.
# add_column(:shapes, :triangle, 'polygon')
# # ALTER TABLE "shapes" ADD "triangle" polygon
@@ -762,7 +766,7 @@ module ActiveRecord
raise ArgumentError, "You must specify the index name"
end
else
- index_name(table_name, :column => options)
+ index_name(table_name, column: options)
end
end
@@ -785,7 +789,7 @@ module ActiveRecord
# [<tt>:type</tt>]
# The reference column type. Defaults to +:integer+.
# [<tt>:index</tt>]
- # Add an appropriate index. Defaults to false.
+ # Add an appropriate index. Defaults to true.
# See #add_index for usage of this option.
# [<tt>:foreign_key</tt>]
# Add an appropriate foreign key constraint. Defaults to false.
@@ -962,7 +966,7 @@ module ActiveRecord
def foreign_key_for(from_table, options_or_to_table = {}) # :nodoc:
return unless supports_foreign_keys?
- foreign_keys(from_table).detect {|fk| fk.defined_for? options_or_to_table }
+ foreign_keys(from_table).detect { |fk| fk.defined_for? options_or_to_table }
end
def foreign_key_for!(from_table, options_or_to_table = {}) # :nodoc:
@@ -985,7 +989,7 @@ module ActiveRecord
end
def dump_schema_information #:nodoc:
- versions = ActiveRecord::SchemaMigration.order('version').pluck(:version)
+ versions = ActiveRecord::SchemaMigration.order("version").pluck(:version)
insert_versions_sql(versions)
end
@@ -994,7 +998,7 @@ module ActiveRecord
if supports_multi_insert?
sql = "INSERT INTO #{sm_table} (version) VALUES\n"
- sql << versions.map {|v| "('#{v}')" }.join(",\n")
+ sql << versions.map { |v| "('#{v}')" }.join(",\n")
sql << ";\n\n"
sql
else
@@ -1024,18 +1028,18 @@ module ActiveRecord
sm_table = quote_table_name(ActiveRecord::Migrator.schema_migrations_table_name)
migrated = select_values("SELECT version FROM #{sm_table}").map(&:to_i)
- paths = migrations_paths.map {|p| "#{p}/[0-9]*_*.rb" }
+ paths = migrations_paths.map { |p| "#{p}/[0-9]*_*.rb" }
versions = Dir[*paths].map do |filename|
- filename.split('/').last.split('_').first.to_i
+ filename.split("/").last.split("_").first.to_i
end
unless migrated.include?(version)
execute "INSERT INTO #{sm_table} (version) VALUES ('#{version}')"
end
- inserting = (versions - migrated).select {|v| v < version}
+ inserting = (versions - migrated).select { |v| v < version }
if inserting.any?
- if (duplicate = inserting.detect {|v| inserting.count(v) > 1})
+ if (duplicate = inserting.detect { |v| inserting.count(v) > 1 })
raise "Duplicate migration #{duplicate}. Please renumber your migrations to resolve the conflict."
end
execute insert_versions_sql(inserting)
@@ -1161,31 +1165,34 @@ module ActiveRecord
end
protected
- def add_index_sort_order(option_strings, column_names, options = {})
- if options.is_a?(Hash) && order = options[:order]
+
+ def add_index_sort_order(quoted_columns, **options)
+ if order = options[:order]
case order
when Hash
- column_names.each {|name| option_strings[name] += " #{order[name].upcase}" if order.has_key?(name)}
+ quoted_columns.each { |name, column| column << " #{order[name].upcase}" if order[name].present? }
when String
- column_names.each {|name| option_strings[name] += " #{order.upcase}"}
+ quoted_columns.each { |name, column| column << " #{order.upcase}" if order.present? }
end
end
- return option_strings
+ quoted_columns
end
# Overridden by the MySQL adapter for supporting index lengths
- def quoted_columns_for_index(column_names, options = {})
- return [column_names] if column_names.is_a?(String)
-
- option_strings = Hash[column_names.map {|name| [name, '']}]
-
- # add index sort order if supported
+ def add_options_for_index_columns(quoted_columns, **options)
if supports_index_sort_order?
- option_strings = add_index_sort_order(option_strings, column_names, options)
+ quoted_columns = add_index_sort_order(quoted_columns, options)
end
- column_names.map {|name| quote_column_name(name) + option_strings[name]}
+ quoted_columns
+ end
+
+ def quoted_columns_for_index(column_names, **options)
+ return [column_names] if column_names.is_a?(String)
+
+ quoted_columns = Hash[column_names.map { |name| [name, quote_column_name(name).dup] }]
+ add_options_for_index_columns(quoted_columns, options).values
end
def index_name_for_remove(table_name, options = {})
@@ -1205,10 +1212,10 @@ module ActiveRecord
end
if column_names.any?
- checks << lambda { |i| i.columns.join('_and_') == column_names.join('_and_') }
+ checks << lambda { |i| i.columns.join("_and_") == column_names.join("_and_") }
end
- raise ArgumentError "No name or columns specified" if checks.none?
+ raise ArgumentError, "No name or columns specified" if checks.none?
matching_indexes = indexes(table_name).select { |i| checks.all? { |check| check[i] } }
@@ -1245,49 +1252,49 @@ module ActiveRecord
end
private
- def create_table_definition(*args)
- TableDefinition.new(*args)
- end
-
- def create_alter_table(name)
- AlterTable.new create_table_definition(name)
- end
+ def create_table_definition(*args)
+ TableDefinition.new(*args)
+ end
- def index_name_options(column_names) # :nodoc:
- if column_names.is_a?(String)
- column_names = column_names.scan(/\w+/).join('_')
+ def create_alter_table(name)
+ AlterTable.new create_table_definition(name)
end
- { column: column_names }
- end
+ def index_name_options(column_names) # :nodoc:
+ if column_names.is_a?(String)
+ column_names = column_names.scan(/\w+/).join("_")
+ end
- def foreign_key_name(table_name, options) # :nodoc:
- identifier = "#{table_name}_#{options.fetch(:column)}_fk"
- hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
- options.fetch(:name) do
- "fk_rails_#{hashed_identifier}"
+ { column: column_names }
end
- end
- def validate_index_length!(table_name, new_name, internal = false) # :nodoc:
- max_index_length = internal ? index_name_length : allowed_index_name_length
+ def foreign_key_name(table_name, options) # :nodoc:
+ identifier = "#{table_name}_#{options.fetch(:column)}_fk"
+ hashed_identifier = Digest::SHA256.hexdigest(identifier).first(10)
+ options.fetch(:name) do
+ "fk_rails_#{hashed_identifier}"
+ end
+ end
- if new_name.length > max_index_length
- raise ArgumentError, "Index name '#{new_name}' on table '#{table_name}' is too long; the limit is #{allowed_index_name_length} characters"
+ def validate_index_length!(table_name, new_name, internal = false) # :nodoc:
+ max_index_length = internal ? index_name_length : allowed_index_name_length
+
+ if new_name.length > max_index_length
+ raise ArgumentError, "Index name '#{new_name}' on table '#{table_name}' is too long; the limit is #{allowed_index_name_length} characters"
+ end
end
- end
- def extract_new_default_value(default_or_changes)
- if default_or_changes.is_a?(Hash) && default_or_changes.has_key?(:from) && default_or_changes.has_key?(:to)
- default_or_changes[:to]
- else
- default_or_changes
+ def extract_new_default_value(default_or_changes)
+ if default_or_changes.is_a?(Hash) && default_or_changes.has_key?(:from) && default_or_changes.has_key?(:to)
+ default_or_changes[:to]
+ else
+ default_or_changes
+ end
end
- end
- def can_remove_index_by_name?(options)
- options.is_a?(Hash) && options.key?(:name) && options.except(:name, :algorithm).empty?
- end
+ def can_remove_index_by_name?(options)
+ options.is_a?(Hash) && options.key?(:name) && options.except(:name, :algorithm).empty?
+ end
end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb b/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb
index ca795cb1ad..6bb072dd73 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb
@@ -41,7 +41,6 @@ module ActiveRecord
end
class Transaction #:nodoc:
-
attr_reader :connection, :state, :records, :savepoint_name
attr_writer :joinable
@@ -101,7 +100,6 @@ module ActiveRecord
end
class SavepointTransaction < Transaction
-
def initialize(connection, savepoint_name, options, *args)
super(connection, options, *args)
if options[:isolation]
@@ -124,7 +122,6 @@ module ActiveRecord
end
class RealTransaction < Transaction
-
def initialize(connection, options, *args)
super
if options[:isolation]
@@ -195,7 +192,7 @@ module ActiveRecord
raise
ensure
unless error
- if Thread.current.status == 'aborting'
+ if Thread.current.status == "aborting"
rollback_transaction if transaction
else
begin
@@ -226,7 +223,6 @@ module ActiveRecord
return unless error.is_a?(ActiveRecord::PreparedStatementCacheExpired)
@connection.clear_cache!
end
-
end
end
end