aboutsummaryrefslogtreecommitdiffstats
path: root/activerecord/lib/active_record/connection_adapters
diff options
context:
space:
mode:
Diffstat (limited to 'activerecord/lib/active_record/connection_adapters')
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb59
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb26
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb24
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb5
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract_adapter.rb7
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb26
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb3
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb568
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite_adapter.rb563
9 files changed, 660 insertions, 621 deletions
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb b/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
index 561e48d52e..a609867898 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
@@ -2,6 +2,7 @@ require 'thread'
require 'monitor'
require 'set'
require 'active_support/core_ext/module/deprecation'
+require 'timeout'
module ActiveRecord
# Raised when a connection could not be obtained within the connection
@@ -11,9 +12,6 @@ module ActiveRecord
# Raised when a connection pool is full and another connection is requested
class PoolFullError < ConnectionNotEstablished
- def initialize size, timeout
- super("Connection pool of size #{size} and timeout #{timeout}s is full")
- end
end
module ConnectionAdapters
@@ -94,6 +92,21 @@ module ActiveRecord
attr_accessor :automatic_reconnect, :timeout
attr_reader :spec, :connections, :size, :reaper
+ class Latch # :nodoc:
+ def initialize
+ @mutex = Mutex.new
+ @cond = ConditionVariable.new
+ end
+
+ def release
+ @mutex.synchronize { @cond.broadcast }
+ end
+
+ def await
+ @mutex.synchronize { @cond.wait @mutex }
+ end
+ end
+
# Creates a new ConnectionPool object. +spec+ is a ConnectionSpecification
# object which describes database connection information (e.g. adapter,
# host name, username, password, etc), as well as the maximum size for
@@ -115,6 +128,7 @@ module ActiveRecord
# default max pool size to 5
@size = (spec.config[:pool] && spec.config[:pool].to_i) || 5
+ @latch = Latch.new
@connections = []
@automatic_reconnect = true
end
@@ -139,8 +153,10 @@ module ActiveRecord
# #release_connection releases the connection-thread association
# and returns the connection to the pool.
def release_connection(with_id = current_connection_id)
- conn = @reserved_connections.delete(with_id)
- checkin conn if conn
+ synchronize do
+ conn = @reserved_connections.delete(with_id)
+ checkin conn if conn
+ end
end
# If a connection already exists yield it to the block. If no connection
@@ -205,23 +221,23 @@ module ActiveRecord
# Raises:
# - PoolFullError: no connection can be obtained from the pool.
def checkout
- # Checkout an available connection
- synchronize do
- # Try to find a connection that hasn't been leased, and lease it
- conn = connections.find { |c| c.lease }
-
- # If all connections were leased, and we have room to expand,
- # create a new connection and lease it.
- if !conn && connections.size < size
- conn = checkout_new_connection
- conn.lease
- end
+ loop do
+ # Checkout an available connection
+ synchronize do
+ # Try to find a connection that hasn't been leased, and lease it
+ conn = connections.find { |c| c.lease }
+
+ # If all connections were leased, and we have room to expand,
+ # create a new connection and lease it.
+ if !conn && connections.size < size
+ conn = checkout_new_connection
+ conn.lease
+ end
- if conn
- checkout_and_verify conn
- else
- raise PoolFullError.new(size, timeout)
+ return checkout_and_verify(conn) if conn
end
+
+ Timeout.timeout(@timeout, PoolFullError) { @latch.await }
end
end
@@ -238,6 +254,7 @@ module ActiveRecord
release conn
end
+ @latch.release
end
# Remove a connection from the connection pool. The connection will
@@ -250,6 +267,7 @@ module ActiveRecord
# from the reserved hash will be a little easier.
release conn
end
+ @latch.release
end
# Removes dead connections from the pool. A dead connection can occur
@@ -262,6 +280,7 @@ module ActiveRecord
remove conn if conn.in_use? && stale > conn.last_use && !conn.active?
end
end
+ @latch.release
end
private
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb b/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
index f08040a1a7..7b2961a04a 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
@@ -57,21 +57,21 @@ module ActiveRecord
end
# Executes insert +sql+ statement in the context of this connection using
- # +binds+ as the bind substitutes. +name+ is the logged along with
+ # +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
def exec_insert(sql, name, binds, pk = nil, sequence_name = nil)
exec_query(sql, name, binds)
end
# Executes delete +sql+ statement in the context of this connection using
- # +binds+ as the bind substitutes. +name+ is the logged along with
+ # +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
def exec_delete(sql, name, binds)
exec_query(sql, name, binds)
end
# Executes update +sql+ statement in the context of this connection using
- # +binds+ as the bind substitutes. +name+ is the logged along with
+ # +binds+ as the bind substitutes. +name+ is logged along with
# the executed +sql+ statement.
def exec_update(sql, name, binds)
exec_query(sql, name, binds)
@@ -312,13 +312,27 @@ module ActiveRecord
# on mysql (even when aliasing the tables), but mysql allows using JOIN directly in
# an UPDATE statement, so in the mysql adapters we redefine this to do that.
def join_to_update(update, select) #:nodoc:
- subselect = select.clone
- subselect.projections = [update.key]
+ key = update.key
+ subselect = subquery_for(key, select)
- update.where update.key.in(subselect)
+ update.where key.in(subselect)
+ end
+
+ def join_to_delete(delete, select, key) #:nodoc:
+ subselect = subquery_for(key, select)
+
+ delete.where key.in(subselect)
end
protected
+
+ # Return a subquery for the given key using the join information.
+ def subquery_for(key, select)
+ subselect = select.clone
+ subselect.projections = [key]
+ subselect
+ end
+
# Returns an array of record hashes with the column names as keys and
# column values as values.
def select(sql, name = nil, binds = [])
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
index 7ee8f40631..3546873550 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
@@ -65,11 +65,12 @@ module ActiveRecord
class TableDefinition
# An array of ColumnDefinition objects, representing the column changes
# that have been defined.
- attr_accessor :columns
+ attr_accessor :columns, :indexes
def initialize(base)
@columns = []
@columns_hash = {}
+ @indexes = {}
@base = base
end
@@ -212,19 +213,22 @@ module ActiveRecord
#
# TableDefinition#references will add an appropriately-named _id column, plus a corresponding _type
# column if the <tt>:polymorphic</tt> option is supplied. If <tt>:polymorphic</tt> is a hash of
- # options, these will be used when creating the <tt>_type</tt> column. So what can be written like this:
+ # options, these will be used when creating the <tt>_type</tt> column. The <tt>:index</tt> option
+ # will also create an index, similar to calling <tt>add_index</tt>. So what can be written like this:
#
# create_table :taggings do |t|
# t.integer :tag_id, :tagger_id, :taggable_id
# t.string :tagger_type
# t.string :taggable_type, :default => 'Photo'
# end
+ # add_index :taggings, :tag_id, :name => 'index_taggings_on_tag_id'
+ # add_index :taggings, [:tagger_id, :tagger_type]
#
# Can also be written as follows using references:
#
# create_table :taggings do |t|
- # t.references :tag
- # t.references :tagger, :polymorphic => true
+ # t.references :tag, :index => { :name => 'index_taggings_on_tag_id' }
+ # t.references :tagger, :polymorphic => true, :index => true
# t.references :taggable, :polymorphic => { :default => 'Photo' }
# end
def column(name, type, options = {})
@@ -255,6 +259,14 @@ module ActiveRecord
end # end
EOV
end
+
+ # Adds index options to the indexes hash, keyed by column name
+ # This is primarily used to track indexes that need to be created after the table
+ # === Examples
+ # index(:account_id, :name => 'index_projects_on_account_id')
+ def index(column_name, options = {})
+ indexes[column_name] = options
+ end
# Appends <tt>:datetime</tt> columns <tt>:created_at</tt> and
# <tt>:updated_at</tt> to the table.
@@ -267,9 +279,11 @@ module ActiveRecord
def references(*args)
options = args.extract_options!
polymorphic = options.delete(:polymorphic)
+ index_options = options.delete(:index)
args.each do |col|
column("#{col}_id", :integer, options)
column("#{col}_type", :string, polymorphic.is_a?(Hash) ? polymorphic : options) unless polymorphic.nil?
+ index(polymorphic ? %w(id type).map { |t| "#{col}_#{t}" } : "#{col}_id", index_options.is_a?(Hash) ? index_options : nil) if index_options
end
end
alias :belongs_to :references
@@ -435,9 +449,11 @@ module ActiveRecord
def references(*args)
options = args.extract_options!
polymorphic = options.delete(:polymorphic)
+ index_options = options.delete(:index)
args.each do |col|
@base.add_column(@table_name, "#{col}_id", :integer, options)
@base.add_column(@table_name, "#{col}_type", :string, polymorphic.is_a?(Hash) ? polymorphic : options) unless polymorphic.nil?
+ @base.add_index(@table_name, polymorphic ? %w(id type).map { |t| "#{col}_#{t}" } : "#{col}_id", index_options.is_a?(Hash) ? index_options : nil) if index_options
end
end
alias :belongs_to :references
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
index 8b9e830040..30a4f9aa35 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
@@ -16,7 +16,7 @@ module ActiveRecord
# Truncates a table alias according to the limits of the current adapter.
def table_alias_for(table_name)
- table_name[0...table_alias_length].gsub(/\./, '_')
+ table_name[0...table_alias_length].tr('.', '_')
end
# Checks to see if the table +table_name+ exists on the database.
@@ -171,6 +171,7 @@ module ActiveRecord
create_sql << td.to_sql
create_sql << ") #{options[:options]}"
execute create_sql
+ td.indexes.each_pair { |c,o| add_index table_name, c, o }
end
# Creates a new join table with the name created using the lexical order of the first two
@@ -375,7 +376,7 @@ module ActiveRecord
# Note: SQLite doesn't support index length
#
# ====== Creating an index with a sort order (desc or asc, asc is the default)
- # add_index(:accounts, [:branch_id, :party_id, :surname], :order => {:branch_id => :desc, :part_id => :asc})
+ # add_index(:accounts, [:branch_id, :party_id, :surname], :order => {:branch_id => :desc, :party_id => :asc})
# generates
# CREATE INDEX by_branch_desc_party ON accounts(branch_id DESC, party_id ASC, surname)
#
diff --git a/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb b/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb
index 1d713e472b..c6faae77cc 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb
@@ -163,11 +163,6 @@ module ActiveRecord
# QUOTING ==================================================
- # Override to return the quoted table name. Defaults to column quoting.
- def quote_table_name(name)
- quote_column_name(name)
- end
-
# Returns a bind substitution value given a +column+ and list of current
# +binds+
def substitute_at(column, index)
@@ -299,7 +294,7 @@ module ActiveRecord
raise exception
end
- def translate_exception(e, message)
+ def translate_exception(exception, message)
# override in derived class
ActiveRecord::StatementInvalid.new(message)
end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb b/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb
index 64f922b7ad..a848838a4e 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb
@@ -294,19 +294,10 @@ module ActiveRecord
# In the simple case, MySQL allows us to place JOINs directly into the UPDATE
# query. However, this does not allow for LIMIT, OFFSET and ORDER. To support
- # these, we must use a subquery. However, MySQL is too stupid to create a
- # temporary table for this automatically, so we have to give it some prompting
- # in the form of a subsubquery. Ugh!
+ # these, we must use a subquery.
def join_to_update(update, select) #:nodoc:
if select.limit || select.offset || select.orders.any?
- subsubselect = select.clone
- subsubselect.projections = [update.key]
-
- subselect = Arel::SelectManager.new(select.engine)
- subselect.project Arel.sql(update.key.name)
- subselect.from subsubselect.as('__active_record_temp')
-
- update.where update.key.in(subselect)
+ super
else
update.table select.source
update.wheres = select.constraints
@@ -525,7 +516,7 @@ module ActiveRecord
def pk_and_sequence_for(table)
execute_and_free("SHOW CREATE TABLE #{quote_table_name(table)}", 'SCHEMA') do |result|
create_table = each_hash(result).first[:"Create Table"]
- if create_table.to_s =~ /PRIMARY KEY\s+\((.+)\)/
+ if create_table.to_s =~ /PRIMARY KEY\s+(?:USING\s+\w+\s+)?\((.+)\)/
keys = $1.split(",").map { |key| key.delete('`"') }
keys.length == 1 ? [keys.first, nil] : nil
else
@@ -558,6 +549,17 @@ module ActiveRecord
protected
+ # MySQL is too stupid to create a temporary table for use subquery, so we have
+ # to give it some prompting in the form of a subsubquery. Ugh!
+ def subquery_for(key, select)
+ subsubselect = select.clone
+ subsubselect.projections = [key]
+
+ subselect = Arel::SelectManager.new(select.engine)
+ subselect.project Arel.sql(key.name)
+ subselect.from subsubselect.as('__active_record_temp')
+ end
+
def add_index_length(option_strings, column_names, options = {})
if options.is_a?(Hash) && length = options[:length]
case length
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb b/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
index 78c00d9341..273c165084 100644
--- a/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
+++ b/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
@@ -88,9 +88,8 @@ module ActiveRecord
def escape_hstore(value)
value.nil? ? 'NULL'
- : value =~ /[=\s,>]/ ? '"%s"' % value.gsub(/(["\\])/, '\\\\\1')
: value == "" ? '""'
- : value.to_s.gsub(/(["\\])/, '\\\\\1')
+ : '"%s"' % value.to_s.gsub(/(["\\])/, '\\\\\1')
end
end
# :startdoc:
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb b/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb
index ee5d10859c..83f75e3505 100644
--- a/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb
@@ -1,6 +1,8 @@
-require 'active_record/connection_adapters/sqlite_adapter'
+require 'active_record/connection_adapters/abstract_adapter'
+require 'active_record/connection_adapters/statement_pool'
+require 'arel/visitors/bind_visitor'
-gem 'sqlite3', '~> 1.3.5'
+gem 'sqlite3', '~> 1.3.6'
require 'sqlite3'
module ActiveRecord
@@ -35,7 +37,184 @@ module ActiveRecord
end
module ConnectionAdapters #:nodoc:
- class SQLite3Adapter < SQLiteAdapter # :nodoc:
+ class SQLite3Column < Column #:nodoc:
+ class << self
+ def binary_to_string(value)
+ if value.encoding != Encoding::ASCII_8BIT
+ value = value.force_encoding(Encoding::ASCII_8BIT)
+ end
+ value
+ end
+ end
+ end
+
+ # The SQLite3 adapter works SQLite 3.6.16 or newer
+ # with the sqlite3-ruby drivers (available as gem from https://rubygems.org/gems/sqlite3).
+ #
+ # Options:
+ #
+ # * <tt>:database</tt> - Path to the database file.
+ class SQLite3Adapter < AbstractAdapter
+ class Version
+ include Comparable
+
+ def initialize(version_string)
+ @version = version_string.split('.').map { |v| v.to_i }
+ end
+
+ def <=>(version_string)
+ @version <=> version_string.split('.').map { |v| v.to_i }
+ end
+ end
+
+ class StatementPool < ConnectionAdapters::StatementPool
+ def initialize(connection, max)
+ super
+ @cache = Hash.new { |h,pid| h[pid] = {} }
+ end
+
+ def each(&block); cache.each(&block); end
+ def key?(key); cache.key?(key); end
+ def [](key); cache[key]; end
+ def length; cache.length; end
+
+ def []=(sql, key)
+ while @max <= cache.size
+ dealloc(cache.shift.last[:stmt])
+ end
+ cache[sql] = key
+ end
+
+ def clear
+ cache.values.each do |hash|
+ dealloc hash[:stmt]
+ end
+ cache.clear
+ end
+
+ private
+ def cache
+ @cache[$$]
+ end
+
+ def dealloc(stmt)
+ stmt.close unless stmt.closed?
+ end
+ end
+
+ class BindSubstitution < Arel::Visitors::SQLite # :nodoc:
+ include Arel::Visitors::BindVisitor
+ end
+
+ def initialize(connection, logger, config)
+ super(connection, logger)
+ @statements = StatementPool.new(@connection,
+ config.fetch(:statement_limit) { 1000 })
+ @config = config
+
+ if config.fetch(:prepared_statements) { true }
+ @visitor = Arel::Visitors::SQLite.new self
+ else
+ @visitor = BindSubstitution.new self
+ end
+ end
+
+ def adapter_name #:nodoc:
+ 'SQLite'
+ end
+
+ # Returns true
+ def supports_ddl_transactions?
+ true
+ end
+
+ # Returns true if SQLite version is '3.6.8' or greater, false otherwise.
+ def supports_savepoints?
+ sqlite_version >= '3.6.8'
+ end
+
+ # Returns true, since this connection adapter supports prepared statement
+ # caching.
+ def supports_statement_cache?
+ true
+ end
+
+ # Returns true, since this connection adapter supports migrations.
+ def supports_migrations? #:nodoc:
+ true
+ end
+
+ # Returns true.
+ def supports_primary_key? #:nodoc:
+ true
+ end
+
+ def requires_reloading?
+ true
+ end
+
+ # Returns true
+ def supports_add_column?
+ true
+ end
+
+ # Disconnects from the database if already connected. Otherwise, this
+ # method does nothing.
+ def disconnect!
+ super
+ clear_cache!
+ @connection.close rescue nil
+ end
+
+ # Clears the prepared statements cache.
+ def clear_cache!
+ @statements.clear
+ end
+
+ # Returns true
+ def supports_count_distinct? #:nodoc:
+ true
+ end
+
+ # Returns true
+ def supports_autoincrement? #:nodoc:
+ true
+ end
+
+ def supports_index_sort_order?
+ true
+ end
+
+ def native_database_types #:nodoc:
+ {
+ :primary_key => default_primary_key_type,
+ :string => { :name => "varchar", :limit => 255 },
+ :text => { :name => "text" },
+ :integer => { :name => "integer" },
+ :float => { :name => "float" },
+ :decimal => { :name => "decimal" },
+ :datetime => { :name => "datetime" },
+ :timestamp => { :name => "datetime" },
+ :time => { :name => "time" },
+ :date => { :name => "date" },
+ :binary => { :name => "blob" },
+ :boolean => { :name => "boolean" }
+ }
+ end
+
+ # Returns the current database encoding format as a string, eg: 'UTF-8'
+ def encoding
+ @connection.encoding.to_s
+ end
+
+ # Returns true.
+ def supports_explain?
+ true
+ end
+
+
+ # QUOTING ==================================================
+
def quote(value, column = nil)
if value.kind_of?(String) && column && column.type == :binary && column.class.respond_to?(:string_to_binary)
s = column.class.string_to_binary(value).unpack("H*")[0]
@@ -45,10 +224,387 @@ module ActiveRecord
end
end
- # Returns the current database encoding format as a string, eg: 'UTF-8'
- def encoding
- @connection.encoding.to_s
+
+ def quote_string(s) #:nodoc:
+ @connection.class.quote(s)
+ end
+
+ def quote_column_name(name) #:nodoc:
+ %Q("#{name.to_s.gsub('"', '""')}")
+ end
+
+ # Quote date/time values for use in SQL input. Includes microseconds
+ # if the value is a Time responding to usec.
+ def quoted_date(value) #:nodoc:
+ if value.respond_to?(:usec)
+ "#{super}.#{sprintf("%06d", value.usec)}"
+ else
+ super
+ end
+ end
+
+ def type_cast(value, column) # :nodoc:
+ return value.to_f if BigDecimal === value
+ return super unless String === value
+ return super unless column && value
+
+ value = super
+ if column.type == :string && value.encoding == Encoding::ASCII_8BIT
+ logger.error "Binary data inserted for `string` type on column `#{column.name}`" if logger
+ value.encode! 'utf-8'
+ end
+ value
+ end
+
+ # DATABASE STATEMENTS ======================================
+
+ def explain(arel, binds = [])
+ sql = "EXPLAIN QUERY PLAN #{to_sql(arel, binds)}"
+ ExplainPrettyPrinter.new.pp(exec_query(sql, 'EXPLAIN', binds))
+ end
+
+ class ExplainPrettyPrinter
+ # Pretty prints the result of a EXPLAIN QUERY PLAN in a way that resembles
+ # the output of the SQLite shell:
+ #
+ # 0|0|0|SEARCH TABLE users USING INTEGER PRIMARY KEY (rowid=?) (~1 rows)
+ # 0|1|1|SCAN TABLE posts (~100000 rows)
+ #
+ def pp(result) # :nodoc:
+ result.rows.map do |row|
+ row.join('|')
+ end.join("\n") + "\n"
+ end
+ end
+
+ def exec_query(sql, name = nil, binds = [])
+ log(sql, name, binds) do
+
+ # Don't cache statements without bind values
+ if binds.empty?
+ stmt = @connection.prepare(sql)
+ cols = stmt.columns
+ records = stmt.to_a
+ stmt.close
+ stmt = records
+ else
+ cache = @statements[sql] ||= {
+ :stmt => @connection.prepare(sql)
+ }
+ stmt = cache[:stmt]
+ cols = cache[:cols] ||= stmt.columns
+ stmt.reset!
+ stmt.bind_params binds.map { |col, val|
+ type_cast(val, col)
+ }
+ end
+
+ ActiveRecord::Result.new(cols, stmt.to_a)
+ end
+ end
+
+ def exec_delete(sql, name = 'SQL', binds = [])
+ exec_query(sql, name, binds)
+ @connection.changes
end
+ alias :exec_update :exec_delete
+
+ def last_inserted_id(result)
+ @connection.last_insert_row_id
+ end
+
+ def execute(sql, name = nil) #:nodoc:
+ log(sql, name) { @connection.execute(sql) }
+ end
+
+ def update_sql(sql, name = nil) #:nodoc:
+ super
+ @connection.changes
+ end
+
+ def delete_sql(sql, name = nil) #:nodoc:
+ sql += " WHERE 1=1" unless sql =~ /WHERE/i
+ super sql, name
+ end
+
+ def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
+ super
+ id_value || @connection.last_insert_row_id
+ end
+ alias :create :insert_sql
+
+ def select_rows(sql, name = nil)
+ exec_query(sql, name).rows
+ end
+
+ def create_savepoint
+ execute("SAVEPOINT #{current_savepoint_name}")
+ end
+
+ def rollback_to_savepoint
+ execute("ROLLBACK TO SAVEPOINT #{current_savepoint_name}")
+ end
+
+ def release_savepoint
+ execute("RELEASE SAVEPOINT #{current_savepoint_name}")
+ end
+
+ def begin_db_transaction #:nodoc:
+ log('begin transaction',nil) { @connection.transaction }
+ end
+
+ def commit_db_transaction #:nodoc:
+ log('commit transaction',nil) { @connection.commit }
+ end
+
+ def rollback_db_transaction #:nodoc:
+ log('rollback transaction',nil) { @connection.rollback }
+ end
+
+ # SCHEMA STATEMENTS ========================================
+
+ def tables(name = 'SCHEMA', table_name = nil) #:nodoc:
+ sql = <<-SQL
+ SELECT name
+ FROM sqlite_master
+ WHERE type = 'table' AND NOT name = 'sqlite_sequence'
+ SQL
+ sql << " AND name = #{quote_table_name(table_name)}" if table_name
+
+ exec_query(sql, name).map do |row|
+ row['name']
+ end
+ end
+
+ def table_exists?(name)
+ name && tables('SCHEMA', name).any?
+ end
+
+ # Returns an array of +SQLite3Column+ objects for the table specified by +table_name+.
+ def columns(table_name) #:nodoc:
+ table_structure(table_name).map do |field|
+ case field["dflt_value"]
+ when /^null$/i
+ field["dflt_value"] = nil
+ when /^'(.*)'$/
+ field["dflt_value"] = $1.gsub("''", "'")
+ when /^"(.*)"$/
+ field["dflt_value"] = $1.gsub('""', '"')
+ end
+
+ SQLite3Column.new(field['name'], field['dflt_value'], field['type'], field['notnull'].to_i == 0)
+ end
+ end
+
+ # Returns an array of indexes for the given table.
+ def indexes(table_name, name = nil) #:nodoc:
+ exec_query("PRAGMA index_list(#{quote_table_name(table_name)})", name).map do |row|
+ IndexDefinition.new(
+ table_name,
+ row['name'],
+ row['unique'] != 0,
+ exec_query("PRAGMA index_info('#{row['name']}')").map { |col|
+ col['name']
+ })
+ end
+ end
+
+ def primary_key(table_name) #:nodoc:
+ column = table_structure(table_name).find { |field|
+ field['pk'] == 1
+ }
+ column && column['name']
+ end
+
+ def remove_index!(table_name, index_name) #:nodoc:
+ exec_query "DROP INDEX #{quote_column_name(index_name)}"
+ end
+
+ # Renames a table.
+ #
+ # Example:
+ # rename_table('octopuses', 'octopi')
+ def rename_table(name, new_name)
+ exec_query "ALTER TABLE #{quote_table_name(name)} RENAME TO #{quote_table_name(new_name)}"
+ end
+
+ # See: http://www.sqlite.org/lang_altertable.html
+ # SQLite has an additional restriction on the ALTER TABLE statement
+ def valid_alter_table_options( type, options)
+ type.to_sym != :primary_key
+ end
+
+ def add_column(table_name, column_name, type, options = {}) #:nodoc:
+ if supports_add_column? && valid_alter_table_options( type, options )
+ super(table_name, column_name, type, options)
+ else
+ alter_table(table_name) do |definition|
+ definition.column(column_name, type, options)
+ end
+ end
+ end
+
+ def remove_column(table_name, *column_names) #:nodoc:
+ raise ArgumentError.new("You must specify at least one column name. Example: remove_column(:people, :first_name)") if column_names.empty?
+ column_names.flatten.each do |column_name|
+ alter_table(table_name) do |definition|
+ definition.columns.delete(definition[column_name])
+ end
+ end
+ end
+ alias :remove_columns :remove_column
+
+ def change_column_default(table_name, column_name, default) #:nodoc:
+ alter_table(table_name) do |definition|
+ definition[column_name].default = default
+ end
+ end
+
+ def change_column_null(table_name, column_name, null, default = nil)
+ unless null || default.nil?
+ exec_query("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
+ end
+ alter_table(table_name) do |definition|
+ definition[column_name].null = null
+ end
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ alter_table(table_name) do |definition|
+ include_default = options_include_default?(options)
+ definition[column_name].instance_eval do
+ self.type = type
+ self.limit = options[:limit] if options.include?(:limit)
+ self.default = options[:default] if include_default
+ self.null = options[:null] if options.include?(:null)
+ self.precision = options[:precision] if options.include?(:precision)
+ self.scale = options[:scale] if options.include?(:scale)
+ end
+ end
+ end
+
+ def rename_column(table_name, column_name, new_column_name) #:nodoc:
+ unless columns(table_name).detect{|c| c.name == column_name.to_s }
+ raise ActiveRecord::ActiveRecordError, "Missing column #{table_name}.#{column_name}"
+ end
+ alter_table(table_name, :rename => {column_name.to_s => new_column_name.to_s})
+ end
+
+ def empty_insert_statement_value
+ "VALUES(NULL)"
+ end
+
+ protected
+ def select(sql, name = nil, binds = []) #:nodoc:
+ exec_query(sql, name, binds)
+ end
+
+ def table_structure(table_name)
+ structure = exec_query("PRAGMA table_info(#{quote_table_name(table_name)})", 'SCHEMA').to_hash
+ raise(ActiveRecord::StatementInvalid, "Could not find table '#{table_name}'") if structure.empty?
+ structure
+ end
+
+ def alter_table(table_name, options = {}) #:nodoc:
+ altered_table_name = "altered_#{table_name}"
+ caller = lambda {|definition| yield definition if block_given?}
+
+ transaction do
+ move_table(table_name, altered_table_name,
+ options.merge(:temporary => true))
+ move_table(altered_table_name, table_name, &caller)
+ end
+ end
+
+ def move_table(from, to, options = {}, &block) #:nodoc:
+ copy_table(from, to, options, &block)
+ drop_table(from)
+ end
+
+ def copy_table(from, to, options = {}) #:nodoc:
+ options = options.merge(:id => (!columns(from).detect{|c| c.name == 'id'}.nil? && 'id' == primary_key(from).to_s))
+ create_table(to, options) do |definition|
+ @definition = definition
+ columns(from).each do |column|
+ column_name = options[:rename] ?
+ (options[:rename][column.name] ||
+ options[:rename][column.name.to_sym] ||
+ column.name) : column.name
+
+ @definition.column(column_name, column.type,
+ :limit => column.limit, :default => column.default,
+ :precision => column.precision, :scale => column.scale,
+ :null => column.null)
+ end
+ @definition.primary_key(primary_key(from)) if primary_key(from)
+ yield @definition if block_given?
+ end
+
+ copy_table_indexes(from, to, options[:rename] || {})
+ copy_table_contents(from, to,
+ @definition.columns.map {|column| column.name},
+ options[:rename] || {})
+ end
+
+ def copy_table_indexes(from, to, rename = {}) #:nodoc:
+ indexes(from).each do |index|
+ name = index.name
+ if to == "altered_#{from}"
+ name = "temp_#{name}"
+ elsif from == "altered_#{to}"
+ name = name[5..-1]
+ end
+
+ to_column_names = columns(to).map { |c| c.name }
+ columns = index.columns.map {|c| rename[c] || c }.select do |column|
+ to_column_names.include?(column)
+ end
+
+ unless columns.empty?
+ # index name can't be the same
+ opts = { :name => name.gsub(/_(#{from})_/, "_#{to}_") }
+ opts[:unique] = true if index.unique
+ add_index(to, columns, opts)
+ end
+ end
+ end
+
+ def copy_table_contents(from, to, columns, rename = {}) #:nodoc:
+ column_mappings = Hash[columns.map {|name| [name, name]}]
+ rename.each { |a| column_mappings[a.last] = a.first }
+ from_columns = columns(from).collect {|col| col.name}
+ columns = columns.find_all{|col| from_columns.include?(column_mappings[col])}
+ quoted_columns = columns.map { |col| quote_column_name(col) } * ','
+
+ quoted_to = quote_table_name(to)
+ exec_query("SELECT * FROM #{quote_table_name(from)}").each do |row|
+ sql = "INSERT INTO #{quoted_to} (#{quoted_columns}) VALUES ("
+ sql << columns.map {|col| quote row[column_mappings[col]]} * ', '
+ sql << ')'
+ exec_query sql
+ end
+ end
+
+ def sqlite_version
+ @sqlite_version ||= SQLite3Adapter::Version.new(select_value('select sqlite_version(*)'))
+ end
+
+ def default_primary_key_type
+ if supports_autoincrement?
+ 'INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL'
+ else
+ 'INTEGER PRIMARY KEY NOT NULL'
+ end
+ end
+
+ def translate_exception(exception, message)
+ case exception.message
+ when /column(s)? .* (is|are) not unique/
+ RecordNotUnique.new(message, exception)
+ else
+ super
+ end
+ end
end
end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite_adapter.rb b/activerecord/lib/active_record/connection_adapters/sqlite_adapter.rb
deleted file mode 100644
index 3d8dfab05c..0000000000
--- a/activerecord/lib/active_record/connection_adapters/sqlite_adapter.rb
+++ /dev/null
@@ -1,563 +0,0 @@
-require 'active_record/connection_adapters/abstract_adapter'
-require 'active_record/connection_adapters/statement_pool'
-require 'arel/visitors/bind_visitor'
-
-module ActiveRecord
- module ConnectionAdapters #:nodoc:
- class SQLiteColumn < Column #:nodoc:
- class << self
- def binary_to_string(value)
- if value.encoding != Encoding::ASCII_8BIT
- value = value.force_encoding(Encoding::ASCII_8BIT)
- end
- value
- end
- end
- end
-
- # The SQLite adapter works with both the 2.x and 3.x series of SQLite with the sqlite-ruby
- # drivers (available both as gems and from http://rubyforge.org/projects/sqlite-ruby/).
- #
- # Options:
- #
- # * <tt>:database</tt> - Path to the database file.
- class SQLiteAdapter < AbstractAdapter
- class Version
- include Comparable
-
- def initialize(version_string)
- @version = version_string.split('.').map { |v| v.to_i }
- end
-
- def <=>(version_string)
- @version <=> version_string.split('.').map { |v| v.to_i }
- end
- end
-
- class StatementPool < ConnectionAdapters::StatementPool
- def initialize(connection, max)
- super
- @cache = Hash.new { |h,pid| h[pid] = {} }
- end
-
- def each(&block); cache.each(&block); end
- def key?(key); cache.key?(key); end
- def [](key); cache[key]; end
- def length; cache.length; end
-
- def []=(sql, key)
- while @max <= cache.size
- dealloc(cache.shift.last[:stmt])
- end
- cache[sql] = key
- end
-
- def clear
- cache.values.each do |hash|
- dealloc hash[:stmt]
- end
- cache.clear
- end
-
- private
- def cache
- @cache[$$]
- end
-
- def dealloc(stmt)
- stmt.close unless stmt.closed?
- end
- end
-
- class BindSubstitution < Arel::Visitors::SQLite # :nodoc:
- include Arel::Visitors::BindVisitor
- end
-
- def initialize(connection, logger, config)
- super(connection, logger)
- @statements = StatementPool.new(@connection,
- config.fetch(:statement_limit) { 1000 })
- @config = config
-
- if config.fetch(:prepared_statements) { true }
- @visitor = Arel::Visitors::SQLite.new self
- else
- @visitor = BindSubstitution.new self
- end
- end
-
- def adapter_name #:nodoc:
- 'SQLite'
- end
-
- # Returns true if SQLite version is '2.0.0' or greater, false otherwise.
- def supports_ddl_transactions?
- sqlite_version >= '2.0.0'
- end
-
- # Returns true if SQLite version is '3.6.8' or greater, false otherwise.
- def supports_savepoints?
- sqlite_version >= '3.6.8'
- end
-
- # Returns true, since this connection adapter supports prepared statement
- # caching.
- def supports_statement_cache?
- true
- end
-
- # Returns true, since this connection adapter supports migrations.
- def supports_migrations? #:nodoc:
- true
- end
-
- # Returns true.
- def supports_primary_key? #:nodoc:
- true
- end
-
- # Returns true.
- def supports_explain?
- true
- end
-
- def requires_reloading?
- true
- end
-
- # Returns true if SQLite version is '3.1.6' or greater, false otherwise.
- def supports_add_column?
- sqlite_version >= '3.1.6'
- end
-
- # Disconnects from the database if already connected. Otherwise, this
- # method does nothing.
- def disconnect!
- super
- clear_cache!
- @connection.close rescue nil
- end
-
- # Clears the prepared statements cache.
- def clear_cache!
- @statements.clear
- end
-
- # Returns true if SQLite version is '3.2.6' or greater, false otherwise.
- def supports_count_distinct? #:nodoc:
- sqlite_version >= '3.2.6'
- end
-
- # Returns true if SQLite version is '3.1.0' or greater, false otherwise.
- def supports_autoincrement? #:nodoc:
- sqlite_version >= '3.1.0'
- end
-
- def supports_index_sort_order?
- sqlite_version >= '3.3.0'
- end
-
- def native_database_types #:nodoc:
- {
- :primary_key => default_primary_key_type,
- :string => { :name => "varchar", :limit => 255 },
- :text => { :name => "text" },
- :integer => { :name => "integer" },
- :float => { :name => "float" },
- :decimal => { :name => "decimal" },
- :datetime => { :name => "datetime" },
- :timestamp => { :name => "datetime" },
- :time => { :name => "time" },
- :date => { :name => "date" },
- :binary => { :name => "blob" },
- :boolean => { :name => "boolean" }
- }
- end
-
-
- # QUOTING ==================================================
-
- def quote_string(s) #:nodoc:
- @connection.class.quote(s)
- end
-
- def quote_column_name(name) #:nodoc:
- %Q("#{name.to_s.gsub('"', '""')}")
- end
-
- # Quote date/time values for use in SQL input. Includes microseconds
- # if the value is a Time responding to usec.
- def quoted_date(value) #:nodoc:
- if value.respond_to?(:usec)
- "#{super}.#{sprintf("%06d", value.usec)}"
- else
- super
- end
- end
-
- def type_cast(value, column) # :nodoc:
- return value.to_f if BigDecimal === value
- return super unless String === value
- return super unless column && value
-
- value = super
- if column.type == :string && value.encoding == Encoding::ASCII_8BIT
- logger.error "Binary data inserted for `string` type on column `#{column.name}`" if logger
- value.encode! 'utf-8'
- end
- value
- end
-
- # DATABASE STATEMENTS ======================================
-
- def explain(arel, binds = [])
- sql = "EXPLAIN QUERY PLAN #{to_sql(arel, binds)}"
- ExplainPrettyPrinter.new.pp(exec_query(sql, 'EXPLAIN', binds))
- end
-
- class ExplainPrettyPrinter
- # Pretty prints the result of a EXPLAIN QUERY PLAN in a way that resembles
- # the output of the SQLite shell:
- #
- # 0|0|0|SEARCH TABLE users USING INTEGER PRIMARY KEY (rowid=?) (~1 rows)
- # 0|1|1|SCAN TABLE posts (~100000 rows)
- #
- def pp(result) # :nodoc:
- result.rows.map do |row|
- row.join('|')
- end.join("\n") + "\n"
- end
- end
-
- def exec_query(sql, name = nil, binds = [])
- log(sql, name, binds) do
-
- # Don't cache statements without bind values
- if binds.empty?
- stmt = @connection.prepare(sql)
- cols = stmt.columns
- records = stmt.to_a
- stmt.close
- stmt = records
- else
- cache = @statements[sql] ||= {
- :stmt => @connection.prepare(sql)
- }
- stmt = cache[:stmt]
- cols = cache[:cols] ||= stmt.columns
- stmt.reset!
- stmt.bind_params binds.map { |col, val|
- type_cast(val, col)
- }
- end
-
- ActiveRecord::Result.new(cols, stmt.to_a)
- end
- end
-
- def exec_delete(sql, name = 'SQL', binds = [])
- exec_query(sql, name, binds)
- @connection.changes
- end
- alias :exec_update :exec_delete
-
- def last_inserted_id(result)
- @connection.last_insert_row_id
- end
-
- def execute(sql, name = nil) #:nodoc:
- log(sql, name) { @connection.execute(sql) }
- end
-
- def update_sql(sql, name = nil) #:nodoc:
- super
- @connection.changes
- end
-
- def delete_sql(sql, name = nil) #:nodoc:
- sql += " WHERE 1=1" unless sql =~ /WHERE/i
- super sql, name
- end
-
- def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
- super
- id_value || @connection.last_insert_row_id
- end
- alias :create :insert_sql
-
- def select_rows(sql, name = nil)
- exec_query(sql, name).rows
- end
-
- def create_savepoint
- execute("SAVEPOINT #{current_savepoint_name}")
- end
-
- def rollback_to_savepoint
- execute("ROLLBACK TO SAVEPOINT #{current_savepoint_name}")
- end
-
- def release_savepoint
- execute("RELEASE SAVEPOINT #{current_savepoint_name}")
- end
-
- def begin_db_transaction #:nodoc:
- log('begin transaction',nil) { @connection.transaction }
- end
-
- def commit_db_transaction #:nodoc:
- log('commit transaction',nil) { @connection.commit }
- end
-
- def rollback_db_transaction #:nodoc:
- log('rollback transaction',nil) { @connection.rollback }
- end
-
- # SCHEMA STATEMENTS ========================================
-
- def tables(name = 'SCHEMA', table_name = nil) #:nodoc:
- sql = <<-SQL
- SELECT name
- FROM sqlite_master
- WHERE type = 'table' AND NOT name = 'sqlite_sequence'
- SQL
- sql << " AND name = #{quote_table_name(table_name)}" if table_name
-
- exec_query(sql, name).map do |row|
- row['name']
- end
- end
-
- def table_exists?(name)
- name && tables('SCHEMA', name).any?
- end
-
- # Returns an array of +SQLiteColumn+ objects for the table specified by +table_name+.
- def columns(table_name) #:nodoc:
- table_structure(table_name).map do |field|
- case field["dflt_value"]
- when /^null$/i
- field["dflt_value"] = nil
- when /^'(.*)'$/
- field["dflt_value"] = $1.gsub(/''/, "'")
- when /^"(.*)"$/
- field["dflt_value"] = $1.gsub(/""/, '"')
- end
-
- SQLiteColumn.new(field['name'], field['dflt_value'], field['type'], field['notnull'].to_i == 0)
- end
- end
-
- # Returns an array of indexes for the given table.
- def indexes(table_name, name = nil) #:nodoc:
- exec_query("PRAGMA index_list(#{quote_table_name(table_name)})", name).map do |row|
- IndexDefinition.new(
- table_name,
- row['name'],
- row['unique'] != 0,
- exec_query("PRAGMA index_info('#{row['name']}')").map { |col|
- col['name']
- })
- end
- end
-
- def primary_key(table_name) #:nodoc:
- column = table_structure(table_name).find { |field|
- field['pk'] == 1
- }
- column && column['name']
- end
-
- def remove_index!(table_name, index_name) #:nodoc:
- exec_query "DROP INDEX #{quote_column_name(index_name)}"
- end
-
- # Renames a table.
- #
- # Example:
- # rename_table('octopuses', 'octopi')
- def rename_table(name, new_name)
- exec_query "ALTER TABLE #{quote_table_name(name)} RENAME TO #{quote_table_name(new_name)}"
- end
-
- # See: http://www.sqlite.org/lang_altertable.html
- # SQLite has an additional restriction on the ALTER TABLE statement
- def valid_alter_table_options( type, options)
- type.to_sym != :primary_key
- end
-
- def add_column(table_name, column_name, type, options = {}) #:nodoc:
- if supports_add_column? && valid_alter_table_options( type, options )
- super(table_name, column_name, type, options)
- else
- alter_table(table_name) do |definition|
- definition.column(column_name, type, options)
- end
- end
- end
-
- def remove_column(table_name, *column_names) #:nodoc:
- raise ArgumentError.new("You must specify at least one column name. Example: remove_column(:people, :first_name)") if column_names.empty?
- column_names.flatten.each do |column_name|
- alter_table(table_name) do |definition|
- definition.columns.delete(definition[column_name])
- end
- end
- end
- alias :remove_columns :remove_column
-
- def change_column_default(table_name, column_name, default) #:nodoc:
- alter_table(table_name) do |definition|
- definition[column_name].default = default
- end
- end
-
- def change_column_null(table_name, column_name, null, default = nil)
- unless null || default.nil?
- exec_query("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
- end
- alter_table(table_name) do |definition|
- definition[column_name].null = null
- end
- end
-
- def change_column(table_name, column_name, type, options = {}) #:nodoc:
- alter_table(table_name) do |definition|
- include_default = options_include_default?(options)
- definition[column_name].instance_eval do
- self.type = type
- self.limit = options[:limit] if options.include?(:limit)
- self.default = options[:default] if include_default
- self.null = options[:null] if options.include?(:null)
- self.precision = options[:precision] if options.include?(:precision)
- self.scale = options[:scale] if options.include?(:scale)
- end
- end
- end
-
- def rename_column(table_name, column_name, new_column_name) #:nodoc:
- unless columns(table_name).detect{|c| c.name == column_name.to_s }
- raise ActiveRecord::ActiveRecordError, "Missing column #{table_name}.#{column_name}"
- end
- alter_table(table_name, :rename => {column_name.to_s => new_column_name.to_s})
- end
-
- def empty_insert_statement_value
- "VALUES(NULL)"
- end
-
- protected
- def select(sql, name = nil, binds = []) #:nodoc:
- exec_query(sql, name, binds)
- end
-
- def table_structure(table_name)
- structure = exec_query("PRAGMA table_info(#{quote_table_name(table_name)})", 'SCHEMA').to_hash
- raise(ActiveRecord::StatementInvalid, "Could not find table '#{table_name}'") if structure.empty?
- structure
- end
-
- def alter_table(table_name, options = {}) #:nodoc:
- altered_table_name = "altered_#{table_name}"
- caller = lambda {|definition| yield definition if block_given?}
-
- transaction do
- move_table(table_name, altered_table_name,
- options.merge(:temporary => true))
- move_table(altered_table_name, table_name, &caller)
- end
- end
-
- def move_table(from, to, options = {}, &block) #:nodoc:
- copy_table(from, to, options, &block)
- drop_table(from)
- end
-
- def copy_table(from, to, options = {}) #:nodoc:
- options = options.merge(:id => (!columns(from).detect{|c| c.name == 'id'}.nil? && 'id' == primary_key(from).to_s))
- create_table(to, options) do |definition|
- @definition = definition
- columns(from).each do |column|
- column_name = options[:rename] ?
- (options[:rename][column.name] ||
- options[:rename][column.name.to_sym] ||
- column.name) : column.name
-
- @definition.column(column_name, column.type,
- :limit => column.limit, :default => column.default,
- :precision => column.precision, :scale => column.scale,
- :null => column.null)
- end
- @definition.primary_key(primary_key(from)) if primary_key(from)
- yield @definition if block_given?
- end
-
- copy_table_indexes(from, to, options[:rename] || {})
- copy_table_contents(from, to,
- @definition.columns.map {|column| column.name},
- options[:rename] || {})
- end
-
- def copy_table_indexes(from, to, rename = {}) #:nodoc:
- indexes(from).each do |index|
- name = index.name
- if to == "altered_#{from}"
- name = "temp_#{name}"
- elsif from == "altered_#{to}"
- name = name[5..-1]
- end
-
- to_column_names = columns(to).map { |c| c.name }
- columns = index.columns.map {|c| rename[c] || c }.select do |column|
- to_column_names.include?(column)
- end
-
- unless columns.empty?
- # index name can't be the same
- opts = { :name => name.gsub(/_(#{from})_/, "_#{to}_") }
- opts[:unique] = true if index.unique
- add_index(to, columns, opts)
- end
- end
- end
-
- def copy_table_contents(from, to, columns, rename = {}) #:nodoc:
- column_mappings = Hash[columns.map {|name| [name, name]}]
- rename.each { |a| column_mappings[a.last] = a.first }
- from_columns = columns(from).collect {|col| col.name}
- columns = columns.find_all{|col| from_columns.include?(column_mappings[col])}
- quoted_columns = columns.map { |col| quote_column_name(col) } * ','
-
- quoted_to = quote_table_name(to)
- exec_query("SELECT * FROM #{quote_table_name(from)}").each do |row|
- sql = "INSERT INTO #{quoted_to} (#{quoted_columns}) VALUES ("
- sql << columns.map {|col| quote row[column_mappings[col]]} * ', '
- sql << ')'
- exec_query sql
- end
- end
-
- def sqlite_version
- @sqlite_version ||= SQLiteAdapter::Version.new(select_value('select sqlite_version(*)'))
- end
-
- def default_primary_key_type
- if supports_autoincrement?
- 'INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL'
- else
- 'INTEGER PRIMARY KEY NOT NULL'
- end
- end
-
- def translate_exception(exception, message)
- case exception.message
- when /column(s)? .* (is|are) not unique/
- RecordNotUnique.new(message, exception)
- else
- super
- end
- end
-
- end
- end
-end