aboutsummaryrefslogtreecommitdiffstats
path: root/activerecord/lib
diff options
context:
space:
mode:
Diffstat (limited to 'activerecord/lib')
-rw-r--r--activerecord/lib/active_record.rb173
-rw-r--r--activerecord/lib/active_record/aggregations.rb266
-rw-r--r--activerecord/lib/active_record/association_relation.rb22
-rw-r--r--activerecord/lib/active_record/associations.rb1630
-rw-r--r--activerecord/lib/active_record/associations/alias_tracker.rb96
-rw-r--r--activerecord/lib/active_record/associations/association.rb253
-rw-r--r--activerecord/lib/active_record/associations/association_scope.rb182
-rw-r--r--activerecord/lib/active_record/associations/belongs_to_association.rb111
-rw-r--r--activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb40
-rw-r--r--activerecord/lib/active_record/associations/builder/association.rb149
-rw-r--r--activerecord/lib/active_record/associations/builder/belongs_to.rb116
-rw-r--r--activerecord/lib/active_record/associations/builder/collection_association.rb91
-rw-r--r--activerecord/lib/active_record/associations/builder/has_and_belongs_to_many.rb124
-rw-r--r--activerecord/lib/active_record/associations/builder/has_many.rb15
-rw-r--r--activerecord/lib/active_record/associations/builder/has_one.rb23
-rw-r--r--activerecord/lib/active_record/associations/builder/singular_association.rb38
-rw-r--r--activerecord/lib/active_record/associations/collection_association.rb606
-rw-r--r--activerecord/lib/active_record/associations/collection_proxy.rb1030
-rw-r--r--activerecord/lib/active_record/associations/has_many_association.rb184
-rw-r--r--activerecord/lib/active_record/associations/has_many_through_association.rb235
-rw-r--r--activerecord/lib/active_record/associations/has_one_association.rb105
-rw-r--r--activerecord/lib/active_record/associations/has_one_through_association.rb36
-rw-r--r--activerecord/lib/active_record/associations/join_dependency.rb273
-rw-r--r--activerecord/lib/active_record/associations/join_dependency/join_association.rb122
-rw-r--r--activerecord/lib/active_record/associations/join_dependency/join_base.rb22
-rw-r--r--activerecord/lib/active_record/associations/join_dependency/join_part.rb72
-rw-r--r--activerecord/lib/active_record/associations/preloader.rb193
-rw-r--r--activerecord/lib/active_record/associations/preloader/association.rb167
-rw-r--r--activerecord/lib/active_record/associations/preloader/belongs_to.rb17
-rw-r--r--activerecord/lib/active_record/associations/preloader/collection_association.rb24
-rw-r--r--activerecord/lib/active_record/associations/preloader/has_many.rb17
-rw-r--r--activerecord/lib/active_record/associations/preloader/has_many_through.rb19
-rw-r--r--activerecord/lib/active_record/associations/preloader/has_one.rb23
-rw-r--r--activerecord/lib/active_record/associations/preloader/has_one_through.rb9
-rw-r--r--activerecord/lib/active_record/associations/preloader/singular_association.rb21
-rw-r--r--activerecord/lib/active_record/associations/preloader/through_association.rb95
-rw-r--r--activerecord/lib/active_record/associations/singular_association.rb80
-rw-r--r--activerecord/lib/active_record/associations/through_association.rb92
-rw-r--r--activerecord/lib/active_record/attribute.rb120
-rw-r--r--activerecord/lib/active_record/attribute_assignment.rb212
-rw-r--r--activerecord/lib/active_record/attribute_decorators.rb66
-rw-r--r--activerecord/lib/active_record/attribute_methods.rb433
-rw-r--r--activerecord/lib/active_record/attribute_methods/before_type_cast.rb71
-rw-r--r--activerecord/lib/active_record/attribute_methods/dirty.rb167
-rw-r--r--activerecord/lib/active_record/attribute_methods/primary_key.rb127
-rw-r--r--activerecord/lib/active_record/attribute_methods/query.rb40
-rw-r--r--activerecord/lib/active_record/attribute_methods/read.rb97
-rw-r--r--activerecord/lib/active_record/attribute_methods/serialization.rb70
-rw-r--r--activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb63
-rw-r--r--activerecord/lib/active_record/attribute_methods/write.rb83
-rw-r--r--activerecord/lib/active_record/attribute_set.rb77
-rw-r--r--activerecord/lib/active_record/attribute_set/builder.rb32
-rw-r--r--activerecord/lib/active_record/attributes.rb122
-rw-r--r--activerecord/lib/active_record/autosave_association.rb437
-rw-r--r--activerecord/lib/active_record/base.rb317
-rw-r--r--activerecord/lib/active_record/callbacks.rb313
-rw-r--r--activerecord/lib/active_record/coders/json.rb13
-rw-r--r--activerecord/lib/active_record/coders/yaml_column.rb38
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb657
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb67
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb371
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb95
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/quoting.rb133
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/savepoints.rb21
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb125
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb564
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb46
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb979
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract/transaction.rb197
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract_adapter.rb479
-rw-r--r--activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb841
-rw-r--r--activerecord/lib/active_record/connection_adapters/column.rb62
-rw-r--r--activerecord/lib/active_record/connection_adapters/connection_specification.rb270
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb281
-rw-r--r--activerecord/lib/active_record/connection_adapters/mysql_adapter.rb487
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/array_parser.rb93
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/column.rb20
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb236
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid.rb36
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/array.rb96
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/bit.rb52
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/bit_varying.rb13
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/bytea.rb14
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/cidr.rb46
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/date.rb11
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/date_time.rb27
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/decimal.rb13
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/enum.rb17
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/float.rb21
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/hstore.rb59
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/inet.rb13
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/infinity.rb13
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/integer.rb11
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/json.rb35
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/jsonb.rb23
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/money.rb43
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/point.rb43
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb76
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/specialized_string.rb15
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/time.rb11
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/type_map_initializer.rb85
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/uuid.rb26
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/vector.rb26
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/oid/xml.rb28
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb118
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/referential_integrity.rb30
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/schema_definitions.rb154
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb560
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql/utils.rb66
-rw-r--r--activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb743
-rw-r--r--activerecord/lib/active_record/connection_adapters/schema_cache.rb94
-rw-r--r--activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb633
-rw-r--r--activerecord/lib/active_record/connection_adapters/statement_pool.rb40
-rw-r--r--activerecord/lib/active_record/connection_handling.rb132
-rw-r--r--activerecord/lib/active_record/core.rb552
-rw-r--r--activerecord/lib/active_record/counter_cache.rb175
-rw-r--r--activerecord/lib/active_record/dynamic_matchers.rb140
-rw-r--r--activerecord/lib/active_record/enum.rb198
-rw-r--r--activerecord/lib/active_record/errors.rb231
-rw-r--r--activerecord/lib/active_record/explain.rb38
-rw-r--r--activerecord/lib/active_record/explain_registry.rb30
-rw-r--r--activerecord/lib/active_record/explain_subscriber.rb29
-rw-r--r--activerecord/lib/active_record/fixture_set/file.rb56
-rw-r--r--activerecord/lib/active_record/fixtures.rb1030
-rw-r--r--activerecord/lib/active_record/gem_version.rb15
-rw-r--r--activerecord/lib/active_record/inheritance.rb247
-rw-r--r--activerecord/lib/active_record/integration.rb113
-rw-r--r--activerecord/lib/active_record/locale/en.yml47
-rw-r--r--activerecord/lib/active_record/locking/optimistic.rb204
-rw-r--r--activerecord/lib/active_record/locking/pessimistic.rb77
-rw-r--r--activerecord/lib/active_record/log_subscriber.rb75
-rw-r--r--activerecord/lib/active_record/migration.rb1045
-rw-r--r--activerecord/lib/active_record/migration/command_recorder.rb197
-rw-r--r--activerecord/lib/active_record/migration/join_table.rb15
-rw-r--r--activerecord/lib/active_record/model_schema.rb339
-rw-r--r--activerecord/lib/active_record/nested_attributes.rb548
-rw-r--r--activerecord/lib/active_record/no_touching.rb52
-rw-r--r--activerecord/lib/active_record/null_relation.rb81
-rw-r--r--activerecord/lib/active_record/persistence.rb532
-rw-r--r--activerecord/lib/active_record/query_cache.rb56
-rw-r--r--activerecord/lib/active_record/querying.rb58
-rw-r--r--activerecord/lib/active_record/railtie.rb164
-rw-r--r--activerecord/lib/active_record/railties/console_sandbox.rb5
-rw-r--r--activerecord/lib/active_record/railties/controller_runtime.rb50
-rw-r--r--activerecord/lib/active_record/railties/databases.rake390
-rw-r--r--activerecord/lib/active_record/railties/jdbcmysql_error.rb16
-rw-r--r--activerecord/lib/active_record/readonly_attributes.rb23
-rw-r--r--activerecord/lib/active_record/reflection.rb867
-rw-r--r--activerecord/lib/active_record/relation.rb674
-rw-r--r--activerecord/lib/active_record/relation/batches.rb138
-rw-r--r--activerecord/lib/active_record/relation/calculations.rb402
-rw-r--r--activerecord/lib/active_record/relation/delegation.rb140
-rw-r--r--activerecord/lib/active_record/relation/finder_methods.rb515
-rw-r--r--activerecord/lib/active_record/relation/merger.rb182
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder.rb126
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/array_handler.rb34
-rw-r--r--activerecord/lib/active_record/relation/predicate_builder/relation_handler.rb13
-rw-r--r--activerecord/lib/active_record/relation/query_methods.rb1141
-rw-r--r--activerecord/lib/active_record/relation/spawn_methods.rb75
-rw-r--r--activerecord/lib/active_record/result.rb127
-rw-r--r--activerecord/lib/active_record/runtime_registry.rb22
-rw-r--r--activerecord/lib/active_record/sanitization.rb188
-rw-r--r--activerecord/lib/active_record/schema.rb64
-rw-r--r--activerecord/lib/active_record/schema_dumper.rb262
-rw-r--r--activerecord/lib/active_record/schema_migration.rb56
-rw-r--r--activerecord/lib/active_record/scoping.rb87
-rw-r--r--activerecord/lib/active_record/scoping/default.rb134
-rw-r--r--activerecord/lib/active_record/scoping/named.rb160
-rw-r--r--activerecord/lib/active_record/serialization.rb22
-rw-r--r--activerecord/lib/active_record/serializers/xml_serializer.rb193
-rw-r--r--activerecord/lib/active_record/statement_cache.rb100
-rw-r--r--activerecord/lib/active_record/store.rb205
-rw-r--r--activerecord/lib/active_record/tasks/database_tasks.rb276
-rw-r--r--activerecord/lib/active_record/tasks/mysql_database_tasks.rb144
-rw-r--r--activerecord/lib/active_record/tasks/postgresql_database_tasks.rb90
-rw-r--r--activerecord/lib/active_record/tasks/sqlite_database_tasks.rb55
-rw-r--r--activerecord/lib/active_record/timestamp.rb120
-rw-r--r--activerecord/lib/active_record/transactions.rb397
-rw-r--r--activerecord/lib/active_record/translation.rb22
-rw-r--r--activerecord/lib/active_record/type.rb20
-rw-r--r--activerecord/lib/active_record/type/binary.rb40
-rw-r--r--activerecord/lib/active_record/type/boolean.rb19
-rw-r--r--activerecord/lib/active_record/type/date.rb46
-rw-r--r--activerecord/lib/active_record/type/date_time.rb43
-rw-r--r--activerecord/lib/active_record/type/decimal.rb40
-rw-r--r--activerecord/lib/active_record/type/decimal_without_scale.rb11
-rw-r--r--activerecord/lib/active_record/type/float.rb19
-rw-r--r--activerecord/lib/active_record/type/hash_lookup_type_map.rb19
-rw-r--r--activerecord/lib/active_record/type/integer.rb23
-rw-r--r--activerecord/lib/active_record/type/mutable.rb16
-rw-r--r--activerecord/lib/active_record/type/numeric.rb36
-rw-r--r--activerecord/lib/active_record/type/serialized.rb51
-rw-r--r--activerecord/lib/active_record/type/string.rb36
-rw-r--r--activerecord/lib/active_record/type/text.rb11
-rw-r--r--activerecord/lib/active_record/type/time.rb26
-rw-r--r--activerecord/lib/active_record/type/time_value.rb38
-rw-r--r--activerecord/lib/active_record/type/type_map.rb48
-rw-r--r--activerecord/lib/active_record/type/value.rb94
-rw-r--r--activerecord/lib/active_record/validations.rb89
-rw-r--r--activerecord/lib/active_record/validations/associated.rb49
-rw-r--r--activerecord/lib/active_record/validations/presence.rb65
-rw-r--r--activerecord/lib/active_record/validations/uniqueness.rb229
-rw-r--r--activerecord/lib/active_record/version.rb8
-rw-r--r--activerecord/lib/rails/generators/active_record.rb17
-rw-r--r--activerecord/lib/rails/generators/active_record/migration.rb18
-rw-r--r--activerecord/lib/rails/generators/active_record/migration/migration_generator.rb70
-rw-r--r--activerecord/lib/rails/generators/active_record/migration/templates/create_table_migration.rb19
-rw-r--r--activerecord/lib/rails/generators/active_record/migration/templates/migration.rb39
-rw-r--r--activerecord/lib/rails/generators/active_record/model/model_generator.rb52
-rw-r--r--activerecord/lib/rails/generators/active_record/model/templates/model.rb10
-rw-r--r--activerecord/lib/rails/generators/active_record/model/templates/module.rb7
211 files changed, 34283 insertions, 0 deletions
diff --git a/activerecord/lib/active_record.rb b/activerecord/lib/active_record.rb
new file mode 100644
index 0000000000..9028970a3d
--- /dev/null
+++ b/activerecord/lib/active_record.rb
@@ -0,0 +1,173 @@
+#--
+# Copyright (c) 2004-2014 David Heinemeier Hansson
+#
+# Permission is hereby granted, free of charge, to any person obtaining
+# a copy of this software and associated documentation files (the
+# "Software"), to deal in the Software without restriction, including
+# without limitation the rights to use, copy, modify, merge, publish,
+# distribute, sublicense, and/or sell copies of the Software, and to
+# permit persons to whom the Software is furnished to do so, subject to
+# the following conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+#++
+
+require 'active_support'
+require 'active_support/rails'
+require 'active_model'
+require 'arel'
+
+require 'active_record/version'
+require 'active_record/attribute_set'
+
+module ActiveRecord
+ extend ActiveSupport::Autoload
+
+ autoload :Attribute
+ autoload :Base
+ autoload :Callbacks
+ autoload :Core
+ autoload :ConnectionHandling
+ autoload :CounterCache
+ autoload :DynamicMatchers
+ autoload :Enum
+ autoload :Explain
+ autoload :Inheritance
+ autoload :Integration
+ autoload :Migration
+ autoload :Migrator, 'active_record/migration'
+ autoload :ModelSchema
+ autoload :NestedAttributes
+ autoload :NoTouching
+ autoload :Persistence
+ autoload :QueryCache
+ autoload :Querying
+ autoload :ReadonlyAttributes
+ autoload :Reflection
+ autoload :RuntimeRegistry
+ autoload :Sanitization
+ autoload :Schema
+ autoload :SchemaDumper
+ autoload :SchemaMigration
+ autoload :Scoping
+ autoload :Serialization
+ autoload :StatementCache
+ autoload :Store
+ autoload :Timestamp
+ autoload :Transactions
+ autoload :Translation
+ autoload :Validations
+
+ eager_autoload do
+ autoload :ActiveRecordError, 'active_record/errors'
+ autoload :ConnectionNotEstablished, 'active_record/errors'
+ autoload :ConnectionAdapters, 'active_record/connection_adapters/abstract_adapter'
+
+ autoload :Aggregations
+ autoload :Associations
+ autoload :AttributeAssignment
+ autoload :AttributeMethods
+ autoload :AutosaveAssociation
+
+ autoload :Relation
+ autoload :AssociationRelation
+ autoload :NullRelation
+
+ autoload_under 'relation' do
+ autoload :QueryMethods
+ autoload :FinderMethods
+ autoload :Calculations
+ autoload :PredicateBuilder
+ autoload :SpawnMethods
+ autoload :Batches
+ autoload :Delegation
+ end
+
+ autoload :Result
+ end
+
+ module Coders
+ autoload :YAMLColumn, 'active_record/coders/yaml_column'
+ autoload :JSON, 'active_record/coders/json'
+ end
+
+ module AttributeMethods
+ extend ActiveSupport::Autoload
+
+ eager_autoload do
+ autoload :BeforeTypeCast
+ autoload :Dirty
+ autoload :PrimaryKey
+ autoload :Query
+ autoload :Read
+ autoload :TimeZoneConversion
+ autoload :Write
+ autoload :Serialization
+ end
+ end
+
+ module Locking
+ extend ActiveSupport::Autoload
+
+ eager_autoload do
+ autoload :Optimistic
+ autoload :Pessimistic
+ end
+ end
+
+ module ConnectionAdapters
+ extend ActiveSupport::Autoload
+
+ eager_autoload do
+ autoload :AbstractAdapter
+ autoload :ConnectionManagement, "active_record/connection_adapters/abstract/connection_pool"
+ end
+ end
+
+ module Scoping
+ extend ActiveSupport::Autoload
+
+ eager_autoload do
+ autoload :Named
+ autoload :Default
+ end
+ end
+
+ module Tasks
+ extend ActiveSupport::Autoload
+
+ autoload :DatabaseTasks
+ autoload :SQLiteDatabaseTasks, 'active_record/tasks/sqlite_database_tasks'
+ autoload :MySQLDatabaseTasks, 'active_record/tasks/mysql_database_tasks'
+ autoload :PostgreSQLDatabaseTasks,
+ 'active_record/tasks/postgresql_database_tasks'
+ end
+
+ autoload :TestFixtures, 'active_record/fixtures'
+
+ def self.eager_load!
+ super
+ ActiveRecord::Locking.eager_load!
+ ActiveRecord::Scoping.eager_load!
+ ActiveRecord::Associations.eager_load!
+ ActiveRecord::AttributeMethods.eager_load!
+ ActiveRecord::ConnectionAdapters.eager_load!
+ end
+end
+
+ActiveSupport.on_load(:active_record) do
+ Arel::Table.engine = self
+end
+
+ActiveSupport.on_load(:i18n) do
+ I18n.load_path << File.dirname(__FILE__) + '/active_record/locale/en.yml'
+end
diff --git a/activerecord/lib/active_record/aggregations.rb b/activerecord/lib/active_record/aggregations.rb
new file mode 100644
index 0000000000..e576ec4d40
--- /dev/null
+++ b/activerecord/lib/active_record/aggregations.rb
@@ -0,0 +1,266 @@
+module ActiveRecord
+ # = Active Record Aggregations
+ module Aggregations # :nodoc:
+ extend ActiveSupport::Concern
+
+ def clear_aggregation_cache #:nodoc:
+ @aggregation_cache.clear if persisted?
+ end
+
+ # Active Record implements aggregation through a macro-like class method called +composed_of+
+ # for representing attributes as value objects. It expresses relationships like "Account [is]
+ # composed of Money [among other things]" or "Person [is] composed of [an] address". Each call
+ # to the macro adds a description of how the value objects are created from the attributes of
+ # the entity object (when the entity is initialized either as a new object or from finding an
+ # existing object) and how it can be turned back into attributes (when the entity is saved to
+ # the database).
+ #
+ # class Customer < ActiveRecord::Base
+ # composed_of :balance, class_name: "Money", mapping: %w(balance amount)
+ # composed_of :address, mapping: [ %w(address_street street), %w(address_city city) ]
+ # end
+ #
+ # The customer class now has the following methods to manipulate the value objects:
+ # * <tt>Customer#balance, Customer#balance=(money)</tt>
+ # * <tt>Customer#address, Customer#address=(address)</tt>
+ #
+ # These methods will operate with value objects like the ones described below:
+ #
+ # class Money
+ # include Comparable
+ # attr_reader :amount, :currency
+ # EXCHANGE_RATES = { "USD_TO_DKK" => 6 }
+ #
+ # def initialize(amount, currency = "USD")
+ # @amount, @currency = amount, currency
+ # end
+ #
+ # def exchange_to(other_currency)
+ # exchanged_amount = (amount * EXCHANGE_RATES["#{currency}_TO_#{other_currency}"]).floor
+ # Money.new(exchanged_amount, other_currency)
+ # end
+ #
+ # def ==(other_money)
+ # amount == other_money.amount && currency == other_money.currency
+ # end
+ #
+ # def <=>(other_money)
+ # if currency == other_money.currency
+ # amount <=> other_money.amount
+ # else
+ # amount <=> other_money.exchange_to(currency).amount
+ # end
+ # end
+ # end
+ #
+ # class Address
+ # attr_reader :street, :city
+ # def initialize(street, city)
+ # @street, @city = street, city
+ # end
+ #
+ # def close_to?(other_address)
+ # city == other_address.city
+ # end
+ #
+ # def ==(other_address)
+ # city == other_address.city && street == other_address.street
+ # end
+ # end
+ #
+ # Now it's possible to access attributes from the database through the value objects instead. If
+ # you choose to name the composition the same as the attribute's name, it will be the only way to
+ # access that attribute. That's the case with our +balance+ attribute. You interact with the value
+ # objects just like you would with any other attribute:
+ #
+ # customer.balance = Money.new(20) # sets the Money value object and the attribute
+ # customer.balance # => Money value object
+ # customer.balance.exchange_to("DKK") # => Money.new(120, "DKK")
+ # customer.balance > Money.new(10) # => true
+ # customer.balance == Money.new(20) # => true
+ # customer.balance < Money.new(5) # => false
+ #
+ # Value objects can also be composed of multiple attributes, such as the case of Address. The order
+ # of the mappings will determine the order of the parameters.
+ #
+ # customer.address_street = "Hyancintvej"
+ # customer.address_city = "Copenhagen"
+ # customer.address # => Address.new("Hyancintvej", "Copenhagen")
+ #
+ # customer.address_street = "Vesterbrogade"
+ # customer.address # => Address.new("Hyancintvej", "Copenhagen")
+ # customer.clear_aggregation_cache
+ # customer.address # => Address.new("Vesterbrogade", "Copenhagen")
+ #
+ # customer.address = Address.new("May Street", "Chicago")
+ # customer.address_street # => "May Street"
+ # customer.address_city # => "Chicago"
+ #
+ # == Writing value objects
+ #
+ # Value objects are immutable and interchangeable objects that represent a given value, such as
+ # a Money object representing $5. Two Money objects both representing $5 should be equal (through
+ # methods such as <tt>==</tt> and <tt><=></tt> from Comparable if ranking makes sense). This is
+ # unlike entity objects where equality is determined by identity. An entity class such as Customer can
+ # easily have two different objects that both have an address on Hyancintvej. Entity identity is
+ # determined by object or relational unique identifiers (such as primary keys). Normal
+ # ActiveRecord::Base classes are entity objects.
+ #
+ # It's also important to treat the value objects as immutable. Don't allow the Money object to have
+ # its amount changed after creation. Create a new Money object with the new value instead. The
+ # Money#exchange_to method is an example of this. It returns a new value object instead of changing
+ # its own values. Active Record won't persist value objects that have been changed through means
+ # other than the writer method.
+ #
+ # The immutable requirement is enforced by Active Record by freezing any object assigned as a value
+ # object. Attempting to change it afterwards will result in a RuntimeError.
+ #
+ # Read more about value objects on http://c2.com/cgi/wiki?ValueObject and on the dangers of not
+ # keeping value objects immutable on http://c2.com/cgi/wiki?ValueObjectsShouldBeImmutable
+ #
+ # == Custom constructors and converters
+ #
+ # By default value objects are initialized by calling the <tt>new</tt> constructor of the value
+ # class passing each of the mapped attributes, in the order specified by the <tt>:mapping</tt>
+ # option, as arguments. If the value class doesn't support this convention then +composed_of+ allows
+ # a custom constructor to be specified.
+ #
+ # When a new value is assigned to the value object, the default assumption is that the new value
+ # is an instance of the value class. Specifying a custom converter allows the new value to be automatically
+ # converted to an instance of value class if necessary.
+ #
+ # For example, the NetworkResource model has +network_address+ and +cidr_range+ attributes that should be
+ # aggregated using the NetAddr::CIDR value class (http://www.ruby-doc.org/gems/docs/n/netaddr-1.5.0/NetAddr/CIDR.html).
+ # The constructor for the value class is called +create+ and it expects a CIDR address string as a parameter.
+ # New values can be assigned to the value object using either another NetAddr::CIDR object, a string
+ # or an array. The <tt>:constructor</tt> and <tt>:converter</tt> options can be used to meet
+ # these requirements:
+ #
+ # class NetworkResource < ActiveRecord::Base
+ # composed_of :cidr,
+ # class_name: 'NetAddr::CIDR',
+ # mapping: [ %w(network_address network), %w(cidr_range bits) ],
+ # allow_nil: true,
+ # constructor: Proc.new { |network_address, cidr_range| NetAddr::CIDR.create("#{network_address}/#{cidr_range}") },
+ # converter: Proc.new { |value| NetAddr::CIDR.create(value.is_a?(Array) ? value.join('/') : value) }
+ # end
+ #
+ # # This calls the :constructor
+ # network_resource = NetworkResource.new(network_address: '192.168.0.1', cidr_range: 24)
+ #
+ # # These assignments will both use the :converter
+ # network_resource.cidr = [ '192.168.2.1', 8 ]
+ # network_resource.cidr = '192.168.0.1/24'
+ #
+ # # This assignment won't use the :converter as the value is already an instance of the value class
+ # network_resource.cidr = NetAddr::CIDR.create('192.168.2.1/8')
+ #
+ # # Saving and then reloading will use the :constructor on reload
+ # network_resource.save
+ # network_resource.reload
+ #
+ # == Finding records by a value object
+ #
+ # Once a +composed_of+ relationship is specified for a model, records can be loaded from the database
+ # by specifying an instance of the value object in the conditions hash. The following example
+ # finds all customers with +balance_amount+ equal to 20 and +balance_currency+ equal to "USD":
+ #
+ # Customer.where(balance: Money.new(20, "USD"))
+ #
+ module ClassMethods
+ # Adds reader and writer methods for manipulating a value object:
+ # <tt>composed_of :address</tt> adds <tt>address</tt> and <tt>address=(new_address)</tt> methods.
+ #
+ # Options are:
+ # * <tt>:class_name</tt> - Specifies the class name of the association. Use it only if that name
+ # can't be inferred from the part id. So <tt>composed_of :address</tt> will by default be linked
+ # to the Address class, but if the real class name is CompanyAddress, you'll have to specify it
+ # with this option.
+ # * <tt>:mapping</tt> - Specifies the mapping of entity attributes to attributes of the value
+ # object. Each mapping is represented as an array where the first item is the name of the
+ # entity attribute and the second item is the name of the attribute in the value object. The
+ # order in which mappings are defined determines the order in which attributes are sent to the
+ # value class constructor.
+ # * <tt>:allow_nil</tt> - Specifies that the value object will not be instantiated when all mapped
+ # attributes are +nil+. Setting the value object to +nil+ has the effect of writing +nil+ to all
+ # mapped attributes.
+ # This defaults to +false+.
+ # * <tt>:constructor</tt> - A symbol specifying the name of the constructor method or a Proc that
+ # is called to initialize the value object. The constructor is passed all of the mapped attributes,
+ # in the order that they are defined in the <tt>:mapping option</tt>, as arguments and uses them
+ # to instantiate a <tt>:class_name</tt> object.
+ # The default is <tt>:new</tt>.
+ # * <tt>:converter</tt> - A symbol specifying the name of a class method of <tt>:class_name</tt>
+ # or a Proc that is called when a new value is assigned to the value object. The converter is
+ # passed the single value that is used in the assignment and is only called if the new value is
+ # not an instance of <tt>:class_name</tt>. If <tt>:allow_nil</tt> is set to true, the converter
+ # can return nil to skip the assignment.
+ #
+ # Option examples:
+ # composed_of :temperature, mapping: %w(reading celsius)
+ # composed_of :balance, class_name: "Money", mapping: %w(balance amount),
+ # converter: Proc.new { |balance| balance.to_money }
+ # composed_of :address, mapping: [ %w(address_street street), %w(address_city city) ]
+ # composed_of :gps_location
+ # composed_of :gps_location, allow_nil: true
+ # composed_of :ip_address,
+ # class_name: 'IPAddr',
+ # mapping: %w(ip to_i),
+ # constructor: Proc.new { |ip| IPAddr.new(ip, Socket::AF_INET) },
+ # converter: Proc.new { |ip| ip.is_a?(Integer) ? IPAddr.new(ip, Socket::AF_INET) : IPAddr.new(ip.to_s) }
+ #
+ def composed_of(part_id, options = {})
+ options.assert_valid_keys(:class_name, :mapping, :allow_nil, :constructor, :converter)
+
+ name = part_id.id2name
+ class_name = options[:class_name] || name.camelize
+ mapping = options[:mapping] || [ name, name ]
+ mapping = [ mapping ] unless mapping.first.is_a?(Array)
+ allow_nil = options[:allow_nil] || false
+ constructor = options[:constructor] || :new
+ converter = options[:converter]
+
+ reader_method(name, class_name, mapping, allow_nil, constructor)
+ writer_method(name, class_name, mapping, allow_nil, converter)
+
+ reflection = ActiveRecord::Reflection.create(:composed_of, part_id, nil, options, self)
+ Reflection.add_aggregate_reflection self, part_id, reflection
+ end
+
+ private
+ def reader_method(name, class_name, mapping, allow_nil, constructor)
+ define_method(name) do
+ if @aggregation_cache[name].nil? && (!allow_nil || mapping.any? {|key, _| !read_attribute(key).nil? })
+ attrs = mapping.collect {|key, _| read_attribute(key)}
+ object = constructor.respond_to?(:call) ?
+ constructor.call(*attrs) :
+ class_name.constantize.send(constructor, *attrs)
+ @aggregation_cache[name] = object
+ end
+ @aggregation_cache[name]
+ end
+ end
+
+ def writer_method(name, class_name, mapping, allow_nil, converter)
+ define_method("#{name}=") do |part|
+ klass = class_name.constantize
+ if part.is_a?(Hash)
+ part = klass.new(*part.values)
+ end
+
+ unless part.is_a?(klass) || converter.nil? || part.nil?
+ part = converter.respond_to?(:call) ? converter.call(part) : klass.send(converter, part)
+ end
+
+ if part.nil? && allow_nil
+ mapping.each { |key, _| self[key] = nil }
+ @aggregation_cache[name] = nil
+ else
+ mapping.each { |key, value| self[key] = part.send(value) }
+ @aggregation_cache[name] = part.freeze
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/association_relation.rb b/activerecord/lib/active_record/association_relation.rb
new file mode 100644
index 0000000000..5a84792f45
--- /dev/null
+++ b/activerecord/lib/active_record/association_relation.rb
@@ -0,0 +1,22 @@
+module ActiveRecord
+ class AssociationRelation < Relation
+ def initialize(klass, table, association)
+ super(klass, table)
+ @association = association
+ end
+
+ def proxy_association
+ @association
+ end
+
+ def ==(other)
+ other == to_a
+ end
+
+ private
+
+ def exec_queries
+ super.each { |r| @association.set_inverse_instance r }
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations.rb b/activerecord/lib/active_record/associations.rb
new file mode 100644
index 0000000000..1c5a737696
--- /dev/null
+++ b/activerecord/lib/active_record/associations.rb
@@ -0,0 +1,1630 @@
+require 'active_support/core_ext/enumerable'
+require 'active_support/core_ext/string/conversions'
+require 'active_support/core_ext/module/remove_method'
+require 'active_record/errors'
+
+module ActiveRecord
+ class AssociationNotFoundError < ConfigurationError #:nodoc:
+ def initialize(record, association_name)
+ super("Association named '#{association_name}' was not found on #{record.class.name}; perhaps you misspelled it?")
+ end
+ end
+
+ class InverseOfAssociationNotFoundError < ActiveRecordError #:nodoc:
+ def initialize(reflection, associated_class = nil)
+ super("Could not find the inverse association for #{reflection.name} (#{reflection.options[:inverse_of].inspect} in #{associated_class.nil? ? reflection.class_name : associated_class.name})")
+ end
+ end
+
+ class HasManyThroughAssociationNotFoundError < ActiveRecordError #:nodoc:
+ def initialize(owner_class_name, reflection)
+ super("Could not find the association #{reflection.options[:through].inspect} in model #{owner_class_name}")
+ end
+ end
+
+ class HasManyThroughAssociationPolymorphicSourceError < ActiveRecordError #:nodoc:
+ def initialize(owner_class_name, reflection, source_reflection)
+ super("Cannot have a has_many :through association '#{owner_class_name}##{reflection.name}' on the polymorphic object '#{source_reflection.class_name}##{source_reflection.name}' without 'source_type'. Try adding 'source_type: \"#{reflection.name.to_s.classify}\"' to 'has_many :through' definition.")
+ end
+ end
+
+ class HasManyThroughAssociationPolymorphicThroughError < ActiveRecordError #:nodoc:
+ def initialize(owner_class_name, reflection)
+ super("Cannot have a has_many :through association '#{owner_class_name}##{reflection.name}' which goes through the polymorphic association '#{owner_class_name}##{reflection.through_reflection.name}'.")
+ end
+ end
+
+ class HasManyThroughAssociationPointlessSourceTypeError < ActiveRecordError #:nodoc:
+ def initialize(owner_class_name, reflection, source_reflection)
+ super("Cannot have a has_many :through association '#{owner_class_name}##{reflection.name}' with a :source_type option if the '#{reflection.through_reflection.class_name}##{source_reflection.name}' is not polymorphic. Try removing :source_type on your association.")
+ end
+ end
+
+ class HasOneThroughCantAssociateThroughCollection < ActiveRecordError #:nodoc:
+ def initialize(owner_class_name, reflection, through_reflection)
+ super("Cannot have a has_one :through association '#{owner_class_name}##{reflection.name}' where the :through association '#{owner_class_name}##{through_reflection.name}' is a collection. Specify a has_one or belongs_to association in the :through option instead.")
+ end
+ end
+
+ class HasManyThroughSourceAssociationNotFoundError < ActiveRecordError #:nodoc:
+ def initialize(reflection)
+ through_reflection = reflection.through_reflection
+ source_reflection_names = reflection.source_reflection_names
+ source_associations = reflection.through_reflection.klass._reflections.keys
+ super("Could not find the source association(s) #{source_reflection_names.collect{ |a| a.inspect }.to_sentence(:two_words_connector => ' or ', :last_word_connector => ', or ', :locale => :en)} in model #{through_reflection.klass}. Try 'has_many #{reflection.name.inspect}, :through => #{through_reflection.name.inspect}, :source => <name>'. Is it one of #{source_associations.to_sentence(:two_words_connector => ' or ', :last_word_connector => ', or ', :locale => :en)}?")
+ end
+ end
+
+ class HasManyThroughCantAssociateThroughHasOneOrManyReflection < ActiveRecordError #:nodoc:
+ def initialize(owner, reflection)
+ super("Cannot modify association '#{owner.class.name}##{reflection.name}' because the source reflection class '#{reflection.source_reflection.class_name}' is associated to '#{reflection.through_reflection.class_name}' via :#{reflection.source_reflection.macro}.")
+ end
+ end
+
+ class HasManyThroughCantAssociateNewRecords < ActiveRecordError #:nodoc:
+ def initialize(owner, reflection)
+ super("Cannot associate new records through '#{owner.class.name}##{reflection.name}' on '#{reflection.source_reflection.class_name rescue nil}##{reflection.source_reflection.name rescue nil}'. Both records must have an id in order to create the has_many :through record associating them.")
+ end
+ end
+
+ class HasManyThroughCantDissociateNewRecords < ActiveRecordError #:nodoc:
+ def initialize(owner, reflection)
+ super("Cannot dissociate new records through '#{owner.class.name}##{reflection.name}' on '#{reflection.source_reflection.class_name rescue nil}##{reflection.source_reflection.name rescue nil}'. Both records must have an id in order to delete the has_many :through record associating them.")
+ end
+ end
+
+ class HasManyThroughNestedAssociationsAreReadonly < ActiveRecordError #:nodoc:
+ def initialize(owner, reflection)
+ super("Cannot modify association '#{owner.class.name}##{reflection.name}' because it goes through more than one other association.")
+ end
+ end
+
+ class EagerLoadPolymorphicError < ActiveRecordError #:nodoc:
+ def initialize(reflection)
+ super("Cannot eagerly load the polymorphic association #{reflection.name.inspect}")
+ end
+ end
+
+ class ReadOnlyAssociation < ActiveRecordError #:nodoc:
+ def initialize(reflection)
+ super("Cannot add to a has_many :through association. Try adding to #{reflection.through_reflection.name.inspect}.")
+ end
+ end
+
+ # This error is raised when trying to destroy a parent instance in N:1 or 1:1 associations
+ # (has_many, has_one) when there is at least 1 child associated instance.
+ # ex: if @project.tasks.size > 0, DeleteRestrictionError will be raised when trying to destroy @project
+ class DeleteRestrictionError < ActiveRecordError #:nodoc:
+ def initialize(name)
+ super("Cannot delete record because of dependent #{name}")
+ end
+ end
+
+ # See ActiveRecord::Associations::ClassMethods for documentation.
+ module Associations # :nodoc:
+ extend ActiveSupport::Autoload
+ extend ActiveSupport::Concern
+
+ # These classes will be loaded when associations are created.
+ # So there is no need to eager load them.
+ autoload :Association, 'active_record/associations/association'
+ autoload :SingularAssociation, 'active_record/associations/singular_association'
+ autoload :CollectionAssociation, 'active_record/associations/collection_association'
+ autoload :CollectionProxy, 'active_record/associations/collection_proxy'
+
+ autoload :BelongsToAssociation, 'active_record/associations/belongs_to_association'
+ autoload :BelongsToPolymorphicAssociation, 'active_record/associations/belongs_to_polymorphic_association'
+ autoload :HasManyAssociation, 'active_record/associations/has_many_association'
+ autoload :HasManyThroughAssociation, 'active_record/associations/has_many_through_association'
+ autoload :HasOneAssociation, 'active_record/associations/has_one_association'
+ autoload :HasOneThroughAssociation, 'active_record/associations/has_one_through_association'
+ autoload :ThroughAssociation, 'active_record/associations/through_association'
+
+ module Builder #:nodoc:
+ autoload :Association, 'active_record/associations/builder/association'
+ autoload :SingularAssociation, 'active_record/associations/builder/singular_association'
+ autoload :CollectionAssociation, 'active_record/associations/builder/collection_association'
+
+ autoload :BelongsTo, 'active_record/associations/builder/belongs_to'
+ autoload :HasOne, 'active_record/associations/builder/has_one'
+ autoload :HasMany, 'active_record/associations/builder/has_many'
+ autoload :HasAndBelongsToMany, 'active_record/associations/builder/has_and_belongs_to_many'
+ end
+
+ eager_autoload do
+ autoload :Preloader, 'active_record/associations/preloader'
+ autoload :JoinDependency, 'active_record/associations/join_dependency'
+ autoload :AssociationScope, 'active_record/associations/association_scope'
+ autoload :AliasTracker, 'active_record/associations/alias_tracker'
+ end
+
+ # Clears out the association cache.
+ def clear_association_cache #:nodoc:
+ @association_cache.clear if persisted?
+ end
+
+ # :nodoc:
+ attr_reader :association_cache
+
+ # Returns the association instance for the given name, instantiating it if it doesn't already exist
+ def association(name) #:nodoc:
+ association = association_instance_get(name)
+
+ if association.nil?
+ raise AssociationNotFoundError.new(self, name) unless reflection = self.class._reflect_on_association(name)
+ association = reflection.association_class.new(self, reflection)
+ association_instance_set(name, association)
+ end
+
+ association
+ end
+
+ private
+ # Returns the specified association instance if it responds to :loaded?, nil otherwise.
+ def association_instance_get(name)
+ @association_cache[name]
+ end
+
+ # Set the specified association instance.
+ def association_instance_set(name, association)
+ @association_cache[name] = association
+ end
+
+ # \Associations are a set of macro-like class methods for tying objects together through
+ # foreign keys. They express relationships like "Project has one Project Manager"
+ # or "Project belongs to a Portfolio". Each macro adds a number of methods to the
+ # class which are specialized according to the collection or association symbol and the
+ # options hash. It works much the same way as Ruby's own <tt>attr*</tt>
+ # methods.
+ #
+ # class Project < ActiveRecord::Base
+ # belongs_to :portfolio
+ # has_one :project_manager
+ # has_many :milestones
+ # has_and_belongs_to_many :categories
+ # end
+ #
+ # The project class now has the following methods (and more) to ease the traversal and
+ # manipulation of its relationships:
+ # * <tt>Project#portfolio, Project#portfolio=(portfolio), Project#portfolio.nil?</tt>
+ # * <tt>Project#project_manager, Project#project_manager=(project_manager), Project#project_manager.nil?,</tt>
+ # * <tt>Project#milestones.empty?, Project#milestones.size, Project#milestones, Project#milestones<<(milestone),</tt>
+ # <tt>Project#milestones.delete(milestone), Project#milestones.destroy(milestone), Project#milestones.find(milestone_id),</tt>
+ # <tt>Project#milestones.build, Project#milestones.create</tt>
+ # * <tt>Project#categories.empty?, Project#categories.size, Project#categories, Project#categories<<(category1),</tt>
+ # <tt>Project#categories.delete(category1), Project#categories.destroy(category1)</tt>
+ #
+ # === A word of warning
+ #
+ # Don't create associations that have the same name as instance methods of
+ # <tt>ActiveRecord::Base</tt>. Since the association adds a method with that name to
+ # its model, it will override the inherited method and break things.
+ # For instance, +attributes+ and +connection+ would be bad choices for association names.
+ #
+ # == Auto-generated methods
+ # See also Instance Public methods below for more details.
+ #
+ # === Singular associations (one-to-one)
+ # | | belongs_to |
+ # generated methods | belongs_to | :polymorphic | has_one
+ # ----------------------------------+------------+--------------+---------
+ # other(force_reload=false) | X | X | X
+ # other=(other) | X | X | X
+ # build_other(attributes={}) | X | | X
+ # create_other(attributes={}) | X | | X
+ # create_other!(attributes={}) | X | | X
+ #
+ # ===Collection associations (one-to-many / many-to-many)
+ # | | | has_many
+ # generated methods | habtm | has_many | :through
+ # ----------------------------------+-------+----------+----------
+ # others(force_reload=false) | X | X | X
+ # others=(other,other,...) | X | X | X
+ # other_ids | X | X | X
+ # other_ids=(id,id,...) | X | X | X
+ # others<< | X | X | X
+ # others.push | X | X | X
+ # others.concat | X | X | X
+ # others.build(attributes={}) | X | X | X
+ # others.create(attributes={}) | X | X | X
+ # others.create!(attributes={}) | X | X | X
+ # others.size | X | X | X
+ # others.length | X | X | X
+ # others.count | X | X | X
+ # others.sum(*args) | X | X | X
+ # others.empty? | X | X | X
+ # others.clear | X | X | X
+ # others.delete(other,other,...) | X | X | X
+ # others.delete_all | X | X | X
+ # others.destroy(other,other,...) | X | X | X
+ # others.destroy_all | X | X | X
+ # others.find(*args) | X | X | X
+ # others.exists? | X | X | X
+ # others.distinct | X | X | X
+ # others.uniq | X | X | X
+ # others.reset | X | X | X
+ #
+ # === Overriding generated methods
+ #
+ # Association methods are generated in a module that is included into the model class,
+ # which allows you to easily override with your own methods and call the original
+ # generated method with +super+. For example:
+ #
+ # class Car < ActiveRecord::Base
+ # belongs_to :owner
+ # belongs_to :old_owner
+ # def owner=(new_owner)
+ # self.old_owner = self.owner
+ # super
+ # end
+ # end
+ #
+ # If your model class is <tt>Project</tt>, the module is
+ # named <tt>Project::GeneratedFeatureMethods</tt>. The GeneratedFeatureMethods module is
+ # included in the model class immediately after the (anonymous) generated attributes methods
+ # module, meaning an association will override the methods for an attribute with the same name.
+ #
+ # == Cardinality and associations
+ #
+ # Active Record associations can be used to describe one-to-one, one-to-many and many-to-many
+ # relationships between models. Each model uses an association to describe its role in
+ # the relation. The +belongs_to+ association is always used in the model that has
+ # the foreign key.
+ #
+ # === One-to-one
+ #
+ # Use +has_one+ in the base, and +belongs_to+ in the associated model.
+ #
+ # class Employee < ActiveRecord::Base
+ # has_one :office
+ # end
+ # class Office < ActiveRecord::Base
+ # belongs_to :employee # foreign key - employee_id
+ # end
+ #
+ # === One-to-many
+ #
+ # Use +has_many+ in the base, and +belongs_to+ in the associated model.
+ #
+ # class Manager < ActiveRecord::Base
+ # has_many :employees
+ # end
+ # class Employee < ActiveRecord::Base
+ # belongs_to :manager # foreign key - manager_id
+ # end
+ #
+ # === Many-to-many
+ #
+ # There are two ways to build a many-to-many relationship.
+ #
+ # The first way uses a +has_many+ association with the <tt>:through</tt> option and a join model, so
+ # there are two stages of associations.
+ #
+ # class Assignment < ActiveRecord::Base
+ # belongs_to :programmer # foreign key - programmer_id
+ # belongs_to :project # foreign key - project_id
+ # end
+ # class Programmer < ActiveRecord::Base
+ # has_many :assignments
+ # has_many :projects, through: :assignments
+ # end
+ # class Project < ActiveRecord::Base
+ # has_many :assignments
+ # has_many :programmers, through: :assignments
+ # end
+ #
+ # For the second way, use +has_and_belongs_to_many+ in both models. This requires a join table
+ # that has no corresponding model or primary key.
+ #
+ # class Programmer < ActiveRecord::Base
+ # has_and_belongs_to_many :projects # foreign keys in the join table
+ # end
+ # class Project < ActiveRecord::Base
+ # has_and_belongs_to_many :programmers # foreign keys in the join table
+ # end
+ #
+ # Choosing which way to build a many-to-many relationship is not always simple.
+ # If you need to work with the relationship model as its own entity,
+ # use <tt>has_many :through</tt>. Use +has_and_belongs_to_many+ when working with legacy schemas or when
+ # you never work directly with the relationship itself.
+ #
+ # == Is it a +belongs_to+ or +has_one+ association?
+ #
+ # Both express a 1-1 relationship. The difference is mostly where to place the foreign
+ # key, which goes on the table for the class declaring the +belongs_to+ relationship.
+ #
+ # class User < ActiveRecord::Base
+ # # I reference an account.
+ # belongs_to :account
+ # end
+ #
+ # class Account < ActiveRecord::Base
+ # # One user references me.
+ # has_one :user
+ # end
+ #
+ # The tables for these classes could look something like:
+ #
+ # CREATE TABLE users (
+ # id int(11) NOT NULL auto_increment,
+ # account_id int(11) default NULL,
+ # name varchar default NULL,
+ # PRIMARY KEY (id)
+ # )
+ #
+ # CREATE TABLE accounts (
+ # id int(11) NOT NULL auto_increment,
+ # name varchar default NULL,
+ # PRIMARY KEY (id)
+ # )
+ #
+ # == Unsaved objects and associations
+ #
+ # You can manipulate objects and associations before they are saved to the database, but
+ # there is some special behavior you should be aware of, mostly involving the saving of
+ # associated objects.
+ #
+ # You can set the <tt>:autosave</tt> option on a <tt>has_one</tt>, <tt>belongs_to</tt>,
+ # <tt>has_many</tt>, or <tt>has_and_belongs_to_many</tt> association. Setting it
+ # to +true+ will _always_ save the members, whereas setting it to +false+ will
+ # _never_ save the members. More details about <tt>:autosave</tt> option is available at
+ # AutosaveAssociation.
+ #
+ # === One-to-one associations
+ #
+ # * Assigning an object to a +has_one+ association automatically saves that object and
+ # the object being replaced (if there is one), in order to update their foreign
+ # keys - except if the parent object is unsaved (<tt>new_record? == true</tt>).
+ # * If either of these saves fail (due to one of the objects being invalid), an
+ # <tt>ActiveRecord::RecordNotSaved</tt> exception is raised and the assignment is
+ # cancelled.
+ # * If you wish to assign an object to a +has_one+ association without saving it,
+ # use the <tt>build_association</tt> method (documented below). The object being
+ # replaced will still be saved to update its foreign key.
+ # * Assigning an object to a +belongs_to+ association does not save the object, since
+ # the foreign key field belongs on the parent. It does not save the parent either.
+ #
+ # === Collections
+ #
+ # * Adding an object to a collection (+has_many+ or +has_and_belongs_to_many+) automatically
+ # saves that object, except if the parent object (the owner of the collection) is not yet
+ # stored in the database.
+ # * If saving any of the objects being added to a collection (via <tt>push</tt> or similar)
+ # fails, then <tt>push</tt> returns +false+.
+ # * If saving fails while replacing the collection (via <tt>association=</tt>), an
+ # <tt>ActiveRecord::RecordNotSaved</tt> exception is raised and the assignment is
+ # cancelled.
+ # * You can add an object to a collection without automatically saving it by using the
+ # <tt>collection.build</tt> method (documented below).
+ # * All unsaved (<tt>new_record? == true</tt>) members of the collection are automatically
+ # saved when the parent is saved.
+ #
+ # == Customizing the query
+ #
+ # \Associations are built from <tt>Relation</tt>s, and you can use the <tt>Relation</tt> syntax
+ # to customize them. For example, to add a condition:
+ #
+ # class Blog < ActiveRecord::Base
+ # has_many :published_posts, -> { where published: true }, class_name: 'Post'
+ # end
+ #
+ # Inside the <tt>-> { ... }</tt> block you can use all of the usual <tt>Relation</tt> methods.
+ #
+ # === Accessing the owner object
+ #
+ # Sometimes it is useful to have access to the owner object when building the query. The owner
+ # is passed as a parameter to the block. For example, the following association would find all
+ # events that occur on the user's birthday:
+ #
+ # class User < ActiveRecord::Base
+ # has_many :birthday_events, ->(user) { where starts_on: user.birthday }, class_name: 'Event'
+ # end
+ #
+ # Note: Joining, eager loading and preloading of these associations is not fully possible.
+ # These operations happen before instance creation and the scope will be called with a +nil+ argument.
+ # This can lead to unexpected behavior and is deprecated.
+ #
+ # == Association callbacks
+ #
+ # Similar to the normal callbacks that hook into the life cycle of an Active Record object,
+ # you can also define callbacks that get triggered when you add an object to or remove an
+ # object from an association collection.
+ #
+ # class Project
+ # has_and_belongs_to_many :developers, after_add: :evaluate_velocity
+ #
+ # def evaluate_velocity(developer)
+ # ...
+ # end
+ # end
+ #
+ # It's possible to stack callbacks by passing them as an array. Example:
+ #
+ # class Project
+ # has_and_belongs_to_many :developers,
+ # after_add: [:evaluate_velocity, Proc.new { |p, d| p.shipping_date = Time.now}]
+ # end
+ #
+ # Possible callbacks are: +before_add+, +after_add+, +before_remove+ and +after_remove+.
+ #
+ # Should any of the +before_add+ callbacks throw an exception, the object does not get
+ # added to the collection. Same with the +before_remove+ callbacks; if an exception is
+ # thrown the object doesn't get removed.
+ #
+ # == Association extensions
+ #
+ # The proxy objects that control the access to associations can be extended through anonymous
+ # modules. This is especially beneficial for adding new finders, creators, and other
+ # factory-type methods that are only used as part of this association.
+ #
+ # class Account < ActiveRecord::Base
+ # has_many :people do
+ # def find_or_create_by_name(name)
+ # first_name, last_name = name.split(" ", 2)
+ # find_or_create_by(first_name: first_name, last_name: last_name)
+ # end
+ # end
+ # end
+ #
+ # person = Account.first.people.find_or_create_by_name("David Heinemeier Hansson")
+ # person.first_name # => "David"
+ # person.last_name # => "Heinemeier Hansson"
+ #
+ # If you need to share the same extensions between many associations, you can use a named
+ # extension module.
+ #
+ # module FindOrCreateByNameExtension
+ # def find_or_create_by_name(name)
+ # first_name, last_name = name.split(" ", 2)
+ # find_or_create_by(first_name: first_name, last_name: last_name)
+ # end
+ # end
+ #
+ # class Account < ActiveRecord::Base
+ # has_many :people, -> { extending FindOrCreateByNameExtension }
+ # end
+ #
+ # class Company < ActiveRecord::Base
+ # has_many :people, -> { extending FindOrCreateByNameExtension }
+ # end
+ #
+ # Some extensions can only be made to work with knowledge of the association's internals.
+ # Extensions can access relevant state using the following methods (where +items+ is the
+ # name of the association):
+ #
+ # * <tt>record.association(:items).owner</tt> - Returns the object the association is part of.
+ # * <tt>record.association(:items).reflection</tt> - Returns the reflection object that describes the association.
+ # * <tt>record.association(:items).target</tt> - Returns the associated object for +belongs_to+ and +has_one+, or
+ # the collection of associated objects for +has_many+ and +has_and_belongs_to_many+.
+ #
+ # However, inside the actual extension code, you will not have access to the <tt>record</tt> as
+ # above. In this case, you can access <tt>proxy_association</tt>. For example,
+ # <tt>record.association(:items)</tt> and <tt>record.items.proxy_association</tt> will return
+ # the same object, allowing you to make calls like <tt>proxy_association.owner</tt> inside
+ # association extensions.
+ #
+ # == Association Join Models
+ #
+ # Has Many associations can be configured with the <tt>:through</tt> option to use an
+ # explicit join model to retrieve the data. This operates similarly to a
+ # +has_and_belongs_to_many+ association. The advantage is that you're able to add validations,
+ # callbacks, and extra attributes on the join model. Consider the following schema:
+ #
+ # class Author < ActiveRecord::Base
+ # has_many :authorships
+ # has_many :books, through: :authorships
+ # end
+ #
+ # class Authorship < ActiveRecord::Base
+ # belongs_to :author
+ # belongs_to :book
+ # end
+ #
+ # @author = Author.first
+ # @author.authorships.collect { |a| a.book } # selects all books that the author's authorships belong to
+ # @author.books # selects all books by using the Authorship join model
+ #
+ # You can also go through a +has_many+ association on the join model:
+ #
+ # class Firm < ActiveRecord::Base
+ # has_many :clients
+ # has_many :invoices, through: :clients
+ # end
+ #
+ # class Client < ActiveRecord::Base
+ # belongs_to :firm
+ # has_many :invoices
+ # end
+ #
+ # class Invoice < ActiveRecord::Base
+ # belongs_to :client
+ # end
+ #
+ # @firm = Firm.first
+ # @firm.clients.flat_map { |c| c.invoices } # select all invoices for all clients of the firm
+ # @firm.invoices # selects all invoices by going through the Client join model
+ #
+ # Similarly you can go through a +has_one+ association on the join model:
+ #
+ # class Group < ActiveRecord::Base
+ # has_many :users
+ # has_many :avatars, through: :users
+ # end
+ #
+ # class User < ActiveRecord::Base
+ # belongs_to :group
+ # has_one :avatar
+ # end
+ #
+ # class Avatar < ActiveRecord::Base
+ # belongs_to :user
+ # end
+ #
+ # @group = Group.first
+ # @group.users.collect { |u| u.avatar }.compact # select all avatars for all users in the group
+ # @group.avatars # selects all avatars by going through the User join model.
+ #
+ # An important caveat with going through +has_one+ or +has_many+ associations on the
+ # join model is that these associations are *read-only*. For example, the following
+ # would not work following the previous example:
+ #
+ # @group.avatars << Avatar.new # this would work if User belonged_to Avatar rather than the other way around
+ # @group.avatars.delete(@group.avatars.last) # so would this
+ #
+ # == Setting Inverses
+ #
+ # If you are using a +belongs_to+ on the join model, it is a good idea to set the
+ # <tt>:inverse_of</tt> option on the +belongs_to+, which will mean that the following example
+ # works correctly (where <tt>tags</tt> is a +has_many+ <tt>:through</tt> association):
+ #
+ # @post = Post.first
+ # @tag = @post.tags.build name: "ruby"
+ # @tag.save
+ #
+ # The last line ought to save the through record (a <tt>Taggable</tt>). This will only work if the
+ # <tt>:inverse_of</tt> is set:
+ #
+ # class Taggable < ActiveRecord::Base
+ # belongs_to :post
+ # belongs_to :tag, inverse_of: :taggings
+ # end
+ #
+ # If you do not set the <tt>:inverse_of</tt> record, the association will
+ # do its best to match itself up with the correct inverse. Automatic
+ # inverse detection only works on <tt>has_many</tt>, <tt>has_one</tt>, and
+ # <tt>belongs_to</tt> associations.
+ #
+ # Extra options on the associations, as defined in the
+ # <tt>AssociationReflection::INVALID_AUTOMATIC_INVERSE_OPTIONS</tt> constant, will
+ # also prevent the association's inverse from being found automatically.
+ #
+ # The automatic guessing of the inverse association uses a heuristic based
+ # on the name of the class, so it may not work for all associations,
+ # especially the ones with non-standard names.
+ #
+ # You can turn off the automatic detection of inverse associations by setting
+ # the <tt>:inverse_of</tt> option to <tt>false</tt> like so:
+ #
+ # class Taggable < ActiveRecord::Base
+ # belongs_to :tag, inverse_of: false
+ # end
+ #
+ # == Nested \Associations
+ #
+ # You can actually specify *any* association with the <tt>:through</tt> option, including an
+ # association which has a <tt>:through</tt> option itself. For example:
+ #
+ # class Author < ActiveRecord::Base
+ # has_many :posts
+ # has_many :comments, through: :posts
+ # has_many :commenters, through: :comments
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :comments
+ # end
+ #
+ # class Comment < ActiveRecord::Base
+ # belongs_to :commenter
+ # end
+ #
+ # @author = Author.first
+ # @author.commenters # => People who commented on posts written by the author
+ #
+ # An equivalent way of setting up this association this would be:
+ #
+ # class Author < ActiveRecord::Base
+ # has_many :posts
+ # has_many :commenters, through: :posts
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :comments
+ # has_many :commenters, through: :comments
+ # end
+ #
+ # class Comment < ActiveRecord::Base
+ # belongs_to :commenter
+ # end
+ #
+ # When using nested association, you will not be able to modify the association because there
+ # is not enough information to know what modification to make. For example, if you tried to
+ # add a <tt>Commenter</tt> in the example above, there would be no way to tell how to set up the
+ # intermediate <tt>Post</tt> and <tt>Comment</tt> objects.
+ #
+ # == Polymorphic \Associations
+ #
+ # Polymorphic associations on models are not restricted on what types of models they
+ # can be associated with. Rather, they specify an interface that a +has_many+ association
+ # must adhere to.
+ #
+ # class Asset < ActiveRecord::Base
+ # belongs_to :attachable, polymorphic: true
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :assets, as: :attachable # The :as option specifies the polymorphic interface to use.
+ # end
+ #
+ # @asset.attachable = @post
+ #
+ # This works by using a type column in addition to a foreign key to specify the associated
+ # record. In the Asset example, you'd need an +attachable_id+ integer column and an
+ # +attachable_type+ string column.
+ #
+ # Using polymorphic associations in combination with single table inheritance (STI) is
+ # a little tricky. In order for the associations to work as expected, ensure that you
+ # store the base model for the STI models in the type column of the polymorphic
+ # association. To continue with the asset example above, suppose there are guest posts
+ # and member posts that use the posts table for STI. In this case, there must be a +type+
+ # column in the posts table.
+ #
+ # Note: The <tt>attachable_type=</tt> method is being called when assigning an +attachable+.
+ # The +class_name+ of the +attachable+ is passed as a String.
+ #
+ # class Asset < ActiveRecord::Base
+ # belongs_to :attachable, polymorphic: true
+ #
+ # def attachable_type=(class_name)
+ # super(class_name.constantize.base_class.to_s)
+ # end
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # # because we store "Post" in attachable_type now dependent: :destroy will work
+ # has_many :assets, as: :attachable, dependent: :destroy
+ # end
+ #
+ # class GuestPost < Post
+ # end
+ #
+ # class MemberPost < Post
+ # end
+ #
+ # == Caching
+ #
+ # All of the methods are built on a simple caching principle that will keep the result
+ # of the last query around unless specifically instructed not to. The cache is even
+ # shared across methods to make it even cheaper to use the macro-added methods without
+ # worrying too much about performance at the first go.
+ #
+ # project.milestones # fetches milestones from the database
+ # project.milestones.size # uses the milestone cache
+ # project.milestones.empty? # uses the milestone cache
+ # project.milestones(true).size # fetches milestones from the database
+ # project.milestones # uses the milestone cache
+ #
+ # == Eager loading of associations
+ #
+ # Eager loading is a way to find objects of a certain class and a number of named associations.
+ # This is one of the easiest ways of to prevent the dreaded N+1 problem in which fetching 100
+ # posts that each need to display their author triggers 101 database queries. Through the
+ # use of eager loading, the number of queries will be reduced from 101 to 2.
+ #
+ # class Post < ActiveRecord::Base
+ # belongs_to :author
+ # has_many :comments
+ # end
+ #
+ # Consider the following loop using the class above:
+ #
+ # Post.all.each do |post|
+ # puts "Post: " + post.title
+ # puts "Written by: " + post.author.name
+ # puts "Last comment on: " + post.comments.first.created_on
+ # end
+ #
+ # To iterate over these one hundred posts, we'll generate 201 database queries. Let's
+ # first just optimize it for retrieving the author:
+ #
+ # Post.includes(:author).each do |post|
+ #
+ # This references the name of the +belongs_to+ association that also used the <tt>:author</tt>
+ # symbol. After loading the posts, find will collect the +author_id+ from each one and load
+ # all the referenced authors with one query. Doing so will cut down the number of queries
+ # from 201 to 102.
+ #
+ # We can improve upon the situation further by referencing both associations in the finder with:
+ #
+ # Post.includes(:author, :comments).each do |post|
+ #
+ # This will load all comments with a single query. This reduces the total number of queries
+ # to 3. More generally the number of queries will be 1 plus the number of associations
+ # named (except if some of the associations are polymorphic +belongs_to+ - see below).
+ #
+ # To include a deep hierarchy of associations, use a hash:
+ #
+ # Post.includes(:author, {comments: {author: :gravatar}}).each do |post|
+ #
+ # That'll grab not only all the comments but all their authors and gravatar pictures.
+ # You can mix and match symbols, arrays and hashes in any combination to describe the
+ # associations you want to load.
+ #
+ # All of this power shouldn't fool you into thinking that you can pull out huge amounts
+ # of data with no performance penalty just because you've reduced the number of queries.
+ # The database still needs to send all the data to Active Record and it still needs to
+ # be processed. So it's no catch-all for performance problems, but it's a great way to
+ # cut down on the number of queries in a situation as the one described above.
+ #
+ # Since only one table is loaded at a time, conditions or orders cannot reference tables
+ # other than the main one. If this is the case Active Record falls back to the previously
+ # used LEFT OUTER JOIN based strategy. For example
+ #
+ # Post.includes([:author, :comments]).where(['comments.approved = ?', true])
+ #
+ # This will result in a single SQL query with joins along the lines of:
+ # <tt>LEFT OUTER JOIN comments ON comments.post_id = posts.id</tt> and
+ # <tt>LEFT OUTER JOIN authors ON authors.id = posts.author_id</tt>. Note that using conditions
+ # like this can have unintended consequences.
+ # In the above example posts with no approved comments are not returned at all, because
+ # the conditions apply to the SQL statement as a whole and not just to the association.
+ #
+ # You must disambiguate column references for this fallback to happen, for example
+ # <tt>order: "author.name DESC"</tt> will work but <tt>order: "name DESC"</tt> will not.
+ #
+ # If you want to load all posts (including posts with no approved comments) then write
+ # your own LEFT OUTER JOIN query using ON
+ #
+ # Post.joins("LEFT OUTER JOIN comments ON comments.post_id = posts.id AND comments.approved = '1'")
+ #
+ # In this case it is usually more natural to include an association which has conditions defined on it:
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :approved_comments, -> { where approved: true }, class_name: 'Comment'
+ # end
+ #
+ # Post.includes(:approved_comments)
+ #
+ # This will load posts and eager load the +approved_comments+ association, which contains
+ # only those comments that have been approved.
+ #
+ # If you eager load an association with a specified <tt>:limit</tt> option, it will be ignored,
+ # returning all the associated objects:
+ #
+ # class Picture < ActiveRecord::Base
+ # has_many :most_recent_comments, -> { order('id DESC').limit(10) }, class_name: 'Comment'
+ # end
+ #
+ # Picture.includes(:most_recent_comments).first.most_recent_comments # => returns all associated comments.
+ #
+ # Eager loading is supported with polymorphic associations.
+ #
+ # class Address < ActiveRecord::Base
+ # belongs_to :addressable, polymorphic: true
+ # end
+ #
+ # A call that tries to eager load the addressable model
+ #
+ # Address.includes(:addressable)
+ #
+ # This will execute one query to load the addresses and load the addressables with one
+ # query per addressable type.
+ # For example if all the addressables are either of class Person or Company then a total
+ # of 3 queries will be executed. The list of addressable types to load is determined on
+ # the back of the addresses loaded. This is not supported if Active Record has to fallback
+ # to the previous implementation of eager loading and will raise <tt>ActiveRecord::EagerLoadPolymorphicError</tt>.
+ # The reason is that the parent model's type is a column value so its corresponding table
+ # name cannot be put in the +FROM+/+JOIN+ clauses of that query.
+ #
+ # == Table Aliasing
+ #
+ # Active Record uses table aliasing in the case that a table is referenced multiple times
+ # in a join. If a table is referenced only once, the standard table name is used. The
+ # second time, the table is aliased as <tt>#{reflection_name}_#{parent_table_name}</tt>.
+ # Indexes are appended for any more successive uses of the table name.
+ #
+ # Post.joins(:comments)
+ # # => SELECT ... FROM posts INNER JOIN comments ON ...
+ # Post.joins(:special_comments) # STI
+ # # => SELECT ... FROM posts INNER JOIN comments ON ... AND comments.type = 'SpecialComment'
+ # Post.joins(:comments, :special_comments) # special_comments is the reflection name, posts is the parent table name
+ # # => SELECT ... FROM posts INNER JOIN comments ON ... INNER JOIN comments special_comments_posts
+ #
+ # Acts as tree example:
+ #
+ # TreeMixin.joins(:children)
+ # # => SELECT ... FROM mixins INNER JOIN mixins childrens_mixins ...
+ # TreeMixin.joins(children: :parent)
+ # # => SELECT ... FROM mixins INNER JOIN mixins childrens_mixins ...
+ # INNER JOIN parents_mixins ...
+ # TreeMixin.joins(children: {parent: :children})
+ # # => SELECT ... FROM mixins INNER JOIN mixins childrens_mixins ...
+ # INNER JOIN parents_mixins ...
+ # INNER JOIN mixins childrens_mixins_2
+ #
+ # Has and Belongs to Many join tables use the same idea, but add a <tt>_join</tt> suffix:
+ #
+ # Post.joins(:categories)
+ # # => SELECT ... FROM posts INNER JOIN categories_posts ... INNER JOIN categories ...
+ # Post.joins(categories: :posts)
+ # # => SELECT ... FROM posts INNER JOIN categories_posts ... INNER JOIN categories ...
+ # INNER JOIN categories_posts posts_categories_join INNER JOIN posts posts_categories
+ # Post.joins(categories: {posts: :categories})
+ # # => SELECT ... FROM posts INNER JOIN categories_posts ... INNER JOIN categories ...
+ # INNER JOIN categories_posts posts_categories_join INNER JOIN posts posts_categories
+ # INNER JOIN categories_posts categories_posts_join INNER JOIN categories categories_posts_2
+ #
+ # If you wish to specify your own custom joins using <tt>joins</tt> method, those table
+ # names will take precedence over the eager associations:
+ #
+ # Post.joins(:comments).joins("inner join comments ...")
+ # # => SELECT ... FROM posts INNER JOIN comments_posts ON ... INNER JOIN comments ...
+ # Post.joins(:comments, :special_comments).joins("inner join comments ...")
+ # # => SELECT ... FROM posts INNER JOIN comments comments_posts ON ...
+ # INNER JOIN comments special_comments_posts ...
+ # INNER JOIN comments ...
+ #
+ # Table aliases are automatically truncated according to the maximum length of table identifiers
+ # according to the specific database.
+ #
+ # == Modules
+ #
+ # By default, associations will look for objects within the current module scope. Consider:
+ #
+ # module MyApplication
+ # module Business
+ # class Firm < ActiveRecord::Base
+ # has_many :clients
+ # end
+ #
+ # class Client < ActiveRecord::Base; end
+ # end
+ # end
+ #
+ # When <tt>Firm#clients</tt> is called, it will in turn call
+ # <tt>MyApplication::Business::Client.find_all_by_firm_id(firm.id)</tt>.
+ # If you want to associate with a class in another module scope, this can be done by
+ # specifying the complete class name.
+ #
+ # module MyApplication
+ # module Business
+ # class Firm < ActiveRecord::Base; end
+ # end
+ #
+ # module Billing
+ # class Account < ActiveRecord::Base
+ # belongs_to :firm, class_name: "MyApplication::Business::Firm"
+ # end
+ # end
+ # end
+ #
+ # == Bi-directional associations
+ #
+ # When you specify an association there is usually an association on the associated model
+ # that specifies the same relationship in reverse. For example, with the following models:
+ #
+ # class Dungeon < ActiveRecord::Base
+ # has_many :traps
+ # has_one :evil_wizard
+ # end
+ #
+ # class Trap < ActiveRecord::Base
+ # belongs_to :dungeon
+ # end
+ #
+ # class EvilWizard < ActiveRecord::Base
+ # belongs_to :dungeon
+ # end
+ #
+ # The +traps+ association on +Dungeon+ and the +dungeon+ association on +Trap+ are
+ # the inverse of each other and the inverse of the +dungeon+ association on +EvilWizard+
+ # is the +evil_wizard+ association on +Dungeon+ (and vice-versa). By default,
+ # Active Record doesn't know anything about these inverse relationships and so no object
+ # loading optimization is possible. For example:
+ #
+ # d = Dungeon.first
+ # t = d.traps.first
+ # d.level == t.dungeon.level # => true
+ # d.level = 10
+ # d.level == t.dungeon.level # => false
+ #
+ # The +Dungeon+ instances +d+ and <tt>t.dungeon</tt> in the above example refer to
+ # the same object data from the database, but are actually different in-memory copies
+ # of that data. Specifying the <tt>:inverse_of</tt> option on associations lets you tell
+ # Active Record about inverse relationships and it will optimise object loading. For
+ # example, if we changed our model definitions to:
+ #
+ # class Dungeon < ActiveRecord::Base
+ # has_many :traps, inverse_of: :dungeon
+ # has_one :evil_wizard, inverse_of: :dungeon
+ # end
+ #
+ # class Trap < ActiveRecord::Base
+ # belongs_to :dungeon, inverse_of: :traps
+ # end
+ #
+ # class EvilWizard < ActiveRecord::Base
+ # belongs_to :dungeon, inverse_of: :evil_wizard
+ # end
+ #
+ # Then, from our code snippet above, +d+ and <tt>t.dungeon</tt> are actually the same
+ # in-memory instance and our final <tt>d.level == t.dungeon.level</tt> will return +true+.
+ #
+ # There are limitations to <tt>:inverse_of</tt> support:
+ #
+ # * does not work with <tt>:through</tt> associations.
+ # * does not work with <tt>:polymorphic</tt> associations.
+ # * for +belongs_to+ associations +has_many+ inverse associations are ignored.
+ #
+ # == Deleting from associations
+ #
+ # === Dependent associations
+ #
+ # +has_many+, +has_one+ and +belongs_to+ associations support the <tt>:dependent</tt> option.
+ # This allows you to specify that associated records should be deleted when the owner is
+ # deleted.
+ #
+ # For example:
+ #
+ # class Author
+ # has_many :posts, dependent: :destroy
+ # end
+ # Author.find(1).destroy # => Will destroy all of the author's posts, too
+ #
+ # The <tt>:dependent</tt> option can have different values which specify how the deletion
+ # is done. For more information, see the documentation for this option on the different
+ # specific association types. When no option is given, the behavior is to do nothing
+ # with the associated records when destroying a record.
+ #
+ # Note that <tt>:dependent</tt> is implemented using Rails' callback
+ # system, which works by processing callbacks in order. Therefore, other
+ # callbacks declared either before or after the <tt>:dependent</tt> option
+ # can affect what it does.
+ #
+ # === Delete or destroy?
+ #
+ # +has_many+ and +has_and_belongs_to_many+ associations have the methods <tt>destroy</tt>,
+ # <tt>delete</tt>, <tt>destroy_all</tt> and <tt>delete_all</tt>.
+ #
+ # For +has_and_belongs_to_many+, <tt>delete</tt> and <tt>destroy</tt> are the same: they
+ # cause the records in the join table to be removed.
+ #
+ # For +has_many+, <tt>destroy</tt> and <tt>destroy_all</tt> will always call the <tt>destroy</tt> method of the
+ # record(s) being removed so that callbacks are run. However <tt>delete</tt> and <tt>delete_all</tt> will either
+ # do the deletion according to the strategy specified by the <tt>:dependent</tt> option, or
+ # if no <tt>:dependent</tt> option is given, then it will follow the default strategy.
+ # The default strategy is <tt>:nullify</tt> (set the foreign keys to <tt>nil</tt>), except for
+ # +has_many+ <tt>:through</tt>, where the default strategy is <tt>delete_all</tt> (delete
+ # the join records, without running their callbacks).
+ #
+ # There is also a <tt>clear</tt> method which is the same as <tt>delete_all</tt>, except that
+ # it returns the association rather than the records which have been deleted.
+ #
+ # === What gets deleted?
+ #
+ # There is a potential pitfall here: +has_and_belongs_to_many+ and +has_many+ <tt>:through</tt>
+ # associations have records in join tables, as well as the associated records. So when we
+ # call one of these deletion methods, what exactly should be deleted?
+ #
+ # The answer is that it is assumed that deletion on an association is about removing the
+ # <i>link</i> between the owner and the associated object(s), rather than necessarily the
+ # associated objects themselves. So with +has_and_belongs_to_many+ and +has_many+
+ # <tt>:through</tt>, the join records will be deleted, but the associated records won't.
+ #
+ # This makes sense if you think about it: if you were to call <tt>post.tags.delete(Tag.find_by(name: 'food'))</tt>
+ # you would want the 'food' tag to be unlinked from the post, rather than for the tag itself
+ # to be removed from the database.
+ #
+ # However, there are examples where this strategy doesn't make sense. For example, suppose
+ # a person has many projects, and each project has many tasks. If we deleted one of a person's
+ # tasks, we would probably not want the project to be deleted. In this scenario, the delete method
+ # won't actually work: it can only be used if the association on the join model is a
+ # +belongs_to+. In other situations you are expected to perform operations directly on
+ # either the associated records or the <tt>:through</tt> association.
+ #
+ # With a regular +has_many+ there is no distinction between the "associated records"
+ # and the "link", so there is only one choice for what gets deleted.
+ #
+ # With +has_and_belongs_to_many+ and +has_many+ <tt>:through</tt>, if you want to delete the
+ # associated records themselves, you can always do something along the lines of
+ # <tt>person.tasks.each(&:destroy)</tt>.
+ #
+ # == Type safety with <tt>ActiveRecord::AssociationTypeMismatch</tt>
+ #
+ # If you attempt to assign an object to an association that doesn't match the inferred
+ # or specified <tt>:class_name</tt>, you'll get an <tt>ActiveRecord::AssociationTypeMismatch</tt>.
+ #
+ # == Options
+ #
+ # All of the association macros can be specialized through options. This makes cases
+ # more complex than the simple and guessable ones possible.
+ module ClassMethods
+ # Specifies a one-to-many association. The following methods for retrieval and query of
+ # collections of associated objects will be added:
+ #
+ # +collection+ is a placeholder for the symbol passed as the +name+ argument, so
+ # <tt>has_many :clients</tt> would add among others <tt>clients.empty?</tt>.
+ #
+ # [collection(force_reload = false)]
+ # Returns an array of all the associated objects.
+ # An empty array is returned if none are found.
+ # [collection<<(object, ...)]
+ # Adds one or more objects to the collection by setting their foreign keys to the collection's primary key.
+ # Note that this operation instantly fires update SQL without waiting for the save or update call on the
+ # parent object, unless the parent object is a new record.
+ # [collection.delete(object, ...)]
+ # Removes one or more objects from the collection by setting their foreign keys to +NULL+.
+ # Objects will be in addition destroyed if they're associated with <tt>dependent: :destroy</tt>,
+ # and deleted if they're associated with <tt>dependent: :delete_all</tt>.
+ #
+ # If the <tt>:through</tt> option is used, then the join records are deleted (rather than
+ # nullified) by default, but you can specify <tt>dependent: :destroy</tt> or
+ # <tt>dependent: :nullify</tt> to override this.
+ # [collection.destroy(object, ...)]
+ # Removes one or more objects from the collection by running <tt>destroy</tt> on
+ # each record, regardless of any dependent option, ensuring callbacks are run.
+ #
+ # If the <tt>:through</tt> option is used, then the join records are destroyed
+ # instead, not the objects themselves.
+ # [collection=objects]
+ # Replaces the collections content by deleting and adding objects as appropriate. If the <tt>:through</tt>
+ # option is true callbacks in the join models are triggered except destroy callbacks, since deletion is
+ # direct.
+ # [collection_singular_ids]
+ # Returns an array of the associated objects' ids
+ # [collection_singular_ids=ids]
+ # Replace the collection with the objects identified by the primary keys in +ids+. This
+ # method loads the models and calls <tt>collection=</tt>. See above.
+ # [collection.clear]
+ # Removes every object from the collection. This destroys the associated objects if they
+ # are associated with <tt>dependent: :destroy</tt>, deletes them directly from the
+ # database if <tt>dependent: :delete_all</tt>, otherwise sets their foreign keys to +NULL+.
+ # If the <tt>:through</tt> option is true no destroy callbacks are invoked on the join models.
+ # Join models are directly deleted.
+ # [collection.empty?]
+ # Returns +true+ if there are no associated objects.
+ # [collection.size]
+ # Returns the number of associated objects.
+ # [collection.find(...)]
+ # Finds an associated object according to the same rules as <tt>ActiveRecord::Base.find</tt>.
+ # [collection.exists?(...)]
+ # Checks whether an associated object with the given conditions exists.
+ # Uses the same rules as <tt>ActiveRecord::Base.exists?</tt>.
+ # [collection.build(attributes = {}, ...)]
+ # Returns one or more new objects of the collection type that have been instantiated
+ # with +attributes+ and linked to this object through a foreign key, but have not yet
+ # been saved.
+ # [collection.create(attributes = {})]
+ # Returns a new object of the collection type that has been instantiated
+ # with +attributes+, linked to this object through a foreign key, and that has already
+ # been saved (if it passed the validation). *Note*: This only works if the base model
+ # already exists in the DB, not if it is a new (unsaved) record!
+ # [collection.create!(attributes = {})]
+ # Does the same as <tt>collection.create</tt>, but raises <tt>ActiveRecord::RecordInvalid</tt>
+ # if the record is invalid.
+ #
+ # === Example
+ #
+ # A <tt>Firm</tt> class declares <tt>has_many :clients</tt>, which will add:
+ # * <tt>Firm#clients</tt> (similar to <tt>Client.where(firm_id: id)</tt>)
+ # * <tt>Firm#clients<<</tt>
+ # * <tt>Firm#clients.delete</tt>
+ # * <tt>Firm#clients.destroy</tt>
+ # * <tt>Firm#clients=</tt>
+ # * <tt>Firm#client_ids</tt>
+ # * <tt>Firm#client_ids=</tt>
+ # * <tt>Firm#clients.clear</tt>
+ # * <tt>Firm#clients.empty?</tt> (similar to <tt>firm.clients.size == 0</tt>)
+ # * <tt>Firm#clients.size</tt> (similar to <tt>Client.count "firm_id = #{id}"</tt>)
+ # * <tt>Firm#clients.find</tt> (similar to <tt>Client.where(firm_id: id).find(id)</tt>)
+ # * <tt>Firm#clients.exists?(name: 'ACME')</tt> (similar to <tt>Client.exists?(name: 'ACME', firm_id: firm.id)</tt>)
+ # * <tt>Firm#clients.build</tt> (similar to <tt>Client.new("firm_id" => id)</tt>)
+ # * <tt>Firm#clients.create</tt> (similar to <tt>c = Client.new("firm_id" => id); c.save; c</tt>)
+ # * <tt>Firm#clients.create!</tt> (similar to <tt>c = Client.new("firm_id" => id); c.save!</tt>)
+ # The declaration can also include an +options+ hash to specialize the behavior of the association.
+ #
+ # === Options
+ # [:class_name]
+ # Specify the class name of the association. Use it only if that name can't be inferred
+ # from the association name. So <tt>has_many :products</tt> will by default be linked
+ # to the Product class, but if the real class name is SpecialProduct, you'll have to
+ # specify it with this option.
+ # [:foreign_key]
+ # Specify the foreign key used for the association. By default this is guessed to be the name
+ # of this class in lower-case and "_id" suffixed. So a Person class that makes a +has_many+
+ # association will use "person_id" as the default <tt>:foreign_key</tt>.
+ # [:primary_key]
+ # Specify the method that returns the primary key used for the association. By default this is +id+.
+ # [:dependent]
+ # Controls what happens to the associated objects when
+ # their owner is destroyed. Note that these are implemented as
+ # callbacks, and Rails executes callbacks in order. Therefore, other
+ # similar callbacks may affect the <tt>:dependent</tt> behavior, and the
+ # <tt>:dependent</tt> behavior may affect other callbacks.
+ #
+ # * <tt>:destroy</tt> causes all the associated objects to also be destroyed.
+ # * <tt>:delete_all</tt> causes all the associated objects to be deleted directly from the database (so callbacks will not be executed).
+ # * <tt>:nullify</tt> causes the foreign keys to be set to +NULL+. Callbacks are not executed.
+ # * <tt>:restrict_with_exception</tt> causes an exception to be raised if there are any associated records.
+ # * <tt>:restrict_with_error</tt> causes an error to be added to the owner if there are any associated objects.
+ #
+ # If using with the <tt>:through</tt> option, the association on the join model must be
+ # a +belongs_to+, and the records which get deleted are the join records, rather than
+ # the associated records.
+ # [:counter_cache]
+ # This option can be used to configure a custom named <tt>:counter_cache.</tt> You only need this option,
+ # when you customized the name of your <tt>:counter_cache</tt> on the <tt>belongs_to</tt> association.
+ # [:as]
+ # Specifies a polymorphic interface (See <tt>belongs_to</tt>).
+ # [:through]
+ # Specifies an association through which to perform the query. This can be any other type
+ # of association, including other <tt>:through</tt> associations. Options for <tt>:class_name</tt>,
+ # <tt>:primary_key</tt> and <tt>:foreign_key</tt> are ignored, as the association uses the
+ # source reflection.
+ #
+ # If the association on the join model is a +belongs_to+, the collection can be modified
+ # and the records on the <tt>:through</tt> model will be automatically created and removed
+ # as appropriate. Otherwise, the collection is read-only, so you should manipulate the
+ # <tt>:through</tt> association directly.
+ #
+ # If you are going to modify the association (rather than just read from it), then it is
+ # a good idea to set the <tt>:inverse_of</tt> option on the source association on the
+ # join model. This allows associated records to be built which will automatically create
+ # the appropriate join model records when they are saved. (See the 'Association Join Models'
+ # section above.)
+ # [:source]
+ # Specifies the source association name used by <tt>has_many :through</tt> queries.
+ # Only use it if the name cannot be inferred from the association.
+ # <tt>has_many :subscribers, through: :subscriptions</tt> will look for either <tt>:subscribers</tt> or
+ # <tt>:subscriber</tt> on Subscription, unless a <tt>:source</tt> is given.
+ # [:source_type]
+ # Specifies type of the source association used by <tt>has_many :through</tt> queries where the source
+ # association is a polymorphic +belongs_to+.
+ # [:validate]
+ # If +false+, don't validate the associated objects when saving the parent object. true by default.
+ # [:autosave]
+ # If true, always save the associated objects or destroy them if marked for destruction,
+ # when saving the parent object. If false, never save or destroy the associated objects.
+ # By default, only save associated objects that are new records. This option is implemented as a
+ # +before_save+ callback. Because callbacks are run in the order they are defined, associated objects
+ # may need to be explicitly saved in any user-defined +before_save+ callbacks.
+ #
+ # Note that <tt>accepts_nested_attributes_for</tt> sets <tt>:autosave</tt> to <tt>true</tt>.
+ # [:inverse_of]
+ # Specifies the name of the <tt>belongs_to</tt> association on the associated object
+ # that is the inverse of this <tt>has_many</tt> association. Does not work in combination
+ # with <tt>:through</tt> or <tt>:as</tt> options.
+ # See ActiveRecord::Associations::ClassMethods's overview on Bi-directional associations for more detail.
+ #
+ # Option examples:
+ # has_many :comments, -> { order "posted_on" }
+ # has_many :comments, -> { includes :author }
+ # has_many :people, -> { where("deleted = 0").order("name") }, class_name: "Person"
+ # has_many :tracks, -> { order "position" }, dependent: :destroy
+ # has_many :comments, dependent: :nullify
+ # has_many :tags, as: :taggable
+ # has_many :reports, -> { readonly }
+ # has_many :subscribers, through: :subscriptions, source: :user
+ def has_many(name, scope = nil, options = {}, &extension)
+ reflection = Builder::HasMany.build(self, name, scope, options, &extension)
+ Reflection.add_reflection self, name, reflection
+ end
+
+ # Specifies a one-to-one association with another class. This method should only be used
+ # if the other class contains the foreign key. If the current class contains the foreign key,
+ # then you should use +belongs_to+ instead. See also ActiveRecord::Associations::ClassMethods's overview
+ # on when to use +has_one+ and when to use +belongs_to+.
+ #
+ # The following methods for retrieval and query of a single associated object will be added:
+ #
+ # +association+ is a placeholder for the symbol passed as the +name+ argument, so
+ # <tt>has_one :manager</tt> would add among others <tt>manager.nil?</tt>.
+ #
+ # [association(force_reload = false)]
+ # Returns the associated object. +nil+ is returned if none is found.
+ # [association=(associate)]
+ # Assigns the associate object, extracts the primary key, sets it as the foreign key,
+ # and saves the associate object. To avoid database inconsistencies, permanently deletes an existing
+ # associated object when assigning a new one, even if the new one isn't saved to database.
+ # [build_association(attributes = {})]
+ # Returns a new object of the associated type that has been instantiated
+ # with +attributes+ and linked to this object through a foreign key, but has not
+ # yet been saved.
+ # [create_association(attributes = {})]
+ # Returns a new object of the associated type that has been instantiated
+ # with +attributes+, linked to this object through a foreign key, and that
+ # has already been saved (if it passed the validation).
+ # [create_association!(attributes = {})]
+ # Does the same as <tt>create_association</tt>, but raises <tt>ActiveRecord::RecordInvalid</tt>
+ # if the record is invalid.
+ #
+ # === Example
+ #
+ # An Account class declares <tt>has_one :beneficiary</tt>, which will add:
+ # * <tt>Account#beneficiary</tt> (similar to <tt>Beneficiary.where(account_id: id).first</tt>)
+ # * <tt>Account#beneficiary=(beneficiary)</tt> (similar to <tt>beneficiary.account_id = account.id; beneficiary.save</tt>)
+ # * <tt>Account#build_beneficiary</tt> (similar to <tt>Beneficiary.new("account_id" => id)</tt>)
+ # * <tt>Account#create_beneficiary</tt> (similar to <tt>b = Beneficiary.new("account_id" => id); b.save; b</tt>)
+ # * <tt>Account#create_beneficiary!</tt> (similar to <tt>b = Beneficiary.new("account_id" => id); b.save!; b</tt>)
+ #
+ # === Options
+ #
+ # The declaration can also include an +options+ hash to specialize the behavior of the association.
+ #
+ # Options are:
+ # [:class_name]
+ # Specify the class name of the association. Use it only if that name can't be inferred
+ # from the association name. So <tt>has_one :manager</tt> will by default be linked to the Manager class, but
+ # if the real class name is Person, you'll have to specify it with this option.
+ # [:dependent]
+ # Controls what happens to the associated object when
+ # its owner is destroyed:
+ #
+ # * <tt>:destroy</tt> causes the associated object to also be destroyed
+ # * <tt>:delete</tt> causes the associated object to be deleted directly from the database (so callbacks will not execute)
+ # * <tt>:nullify</tt> causes the foreign key to be set to +NULL+. Callbacks are not executed.
+ # * <tt>:restrict_with_exception</tt> causes an exception to be raised if there is an associated record
+ # * <tt>:restrict_with_error</tt> causes an error to be added to the owner if there is an associated object
+ # [:foreign_key]
+ # Specify the foreign key used for the association. By default this is guessed to be the name
+ # of this class in lower-case and "_id" suffixed. So a Person class that makes a +has_one+ association
+ # will use "person_id" as the default <tt>:foreign_key</tt>.
+ # [:primary_key]
+ # Specify the method that returns the primary key used for the association. By default this is +id+.
+ # [:as]
+ # Specifies a polymorphic interface (See <tt>belongs_to</tt>).
+ # [:through]
+ # Specifies a Join Model through which to perform the query. Options for <tt>:class_name</tt>,
+ # <tt>:primary_key</tt>, and <tt>:foreign_key</tt> are ignored, as the association uses the
+ # source reflection. You can only use a <tt>:through</tt> query through a <tt>has_one</tt>
+ # or <tt>belongs_to</tt> association on the join model.
+ # [:source]
+ # Specifies the source association name used by <tt>has_one :through</tt> queries.
+ # Only use it if the name cannot be inferred from the association.
+ # <tt>has_one :favorite, through: :favorites</tt> will look for a
+ # <tt>:favorite</tt> on Favorite, unless a <tt>:source</tt> is given.
+ # [:source_type]
+ # Specifies type of the source association used by <tt>has_one :through</tt> queries where the source
+ # association is a polymorphic +belongs_to+.
+ # [:validate]
+ # If +false+, don't validate the associated object when saving the parent object. +false+ by default.
+ # [:autosave]
+ # If true, always save the associated object or destroy it if marked for destruction,
+ # when saving the parent object. If false, never save or destroy the associated object.
+ # By default, only save the associated object if it's a new record.
+ #
+ # Note that <tt>accepts_nested_attributes_for</tt> sets <tt>:autosave</tt> to <tt>true</tt>.
+ # [:inverse_of]
+ # Specifies the name of the <tt>belongs_to</tt> association on the associated object
+ # that is the inverse of this <tt>has_one</tt> association. Does not work in combination
+ # with <tt>:through</tt> or <tt>:as</tt> options.
+ # See ActiveRecord::Associations::ClassMethods's overview on Bi-directional associations for more detail.
+ # [:required]
+ # When set to +true+, the association will also have its presence validated.
+ # This will validate the association itself, not the id. You can use
+ # +:inverse_of+ to avoid an extra query during validation.
+ #
+ # Option examples:
+ # has_one :credit_card, dependent: :destroy # destroys the associated credit card
+ # has_one :credit_card, dependent: :nullify # updates the associated records foreign
+ # # key value to NULL rather than destroying it
+ # has_one :last_comment, -> { order 'posted_on' }, class_name: "Comment"
+ # has_one :project_manager, -> { where role: 'project_manager' }, class_name: "Person"
+ # has_one :attachment, as: :attachable
+ # has_one :boss, readonly: :true
+ # has_one :club, through: :membership
+ # has_one :primary_address, -> { where primary: true }, through: :addressables, source: :addressable
+ # has_one :credit_card, required: true
+ def has_one(name, scope = nil, options = {})
+ reflection = Builder::HasOne.build(self, name, scope, options)
+ Reflection.add_reflection self, name, reflection
+ end
+
+ # Specifies a one-to-one association with another class. This method should only be used
+ # if this class contains the foreign key. If the other class contains the foreign key,
+ # then you should use +has_one+ instead. See also ActiveRecord::Associations::ClassMethods's overview
+ # on when to use +has_one+ and when to use +belongs_to+.
+ #
+ # Methods will be added for retrieval and query for a single associated object, for which
+ # this object holds an id:
+ #
+ # +association+ is a placeholder for the symbol passed as the +name+ argument, so
+ # <tt>belongs_to :author</tt> would add among others <tt>author.nil?</tt>.
+ #
+ # [association(force_reload = false)]
+ # Returns the associated object. +nil+ is returned if none is found.
+ # [association=(associate)]
+ # Assigns the associate object, extracts the primary key, and sets it as the foreign key.
+ # [build_association(attributes = {})]
+ # Returns a new object of the associated type that has been instantiated
+ # with +attributes+ and linked to this object through a foreign key, but has not yet been saved.
+ # [create_association(attributes = {})]
+ # Returns a new object of the associated type that has been instantiated
+ # with +attributes+, linked to this object through a foreign key, and that
+ # has already been saved (if it passed the validation).
+ # [create_association!(attributes = {})]
+ # Does the same as <tt>create_association</tt>, but raises <tt>ActiveRecord::RecordInvalid</tt>
+ # if the record is invalid.
+ #
+ # === Example
+ #
+ # A Post class declares <tt>belongs_to :author</tt>, which will add:
+ # * <tt>Post#author</tt> (similar to <tt>Author.find(author_id)</tt>)
+ # * <tt>Post#author=(author)</tt> (similar to <tt>post.author_id = author.id</tt>)
+ # * <tt>Post#build_author</tt> (similar to <tt>post.author = Author.new</tt>)
+ # * <tt>Post#create_author</tt> (similar to <tt>post.author = Author.new; post.author.save; post.author</tt>)
+ # * <tt>Post#create_author!</tt> (similar to <tt>post.author = Author.new; post.author.save!; post.author</tt>)
+ # The declaration can also include an +options+ hash to specialize the behavior of the association.
+ #
+ # === Options
+ #
+ # [:class_name]
+ # Specify the class name of the association. Use it only if that name can't be inferred
+ # from the association name. So <tt>belongs_to :author</tt> will by default be linked to the Author class, but
+ # if the real class name is Person, you'll have to specify it with this option.
+ # [:foreign_key]
+ # Specify the foreign key used for the association. By default this is guessed to be the name
+ # of the association with an "_id" suffix. So a class that defines a <tt>belongs_to :person</tt>
+ # association will use "person_id" as the default <tt>:foreign_key</tt>. Similarly,
+ # <tt>belongs_to :favorite_person, class_name: "Person"</tt> will use a foreign key
+ # of "favorite_person_id".
+ # [:foreign_type]
+ # Specify the column used to store the associated object's type, if this is a polymorphic
+ # association. By default this is guessed to be the name of the association with a "_type"
+ # suffix. So a class that defines a <tt>belongs_to :taggable, polymorphic: true</tt>
+ # association will use "taggable_type" as the default <tt>:foreign_type</tt>.
+ # [:primary_key]
+ # Specify the method that returns the primary key of associated object used for the association.
+ # By default this is id.
+ # [:dependent]
+ # If set to <tt>:destroy</tt>, the associated object is destroyed when this object is. If set to
+ # <tt>:delete</tt>, the associated object is deleted *without* calling its destroy method.
+ # This option should not be specified when <tt>belongs_to</tt> is used in conjunction with
+ # a <tt>has_many</tt> relationship on another class because of the potential to leave
+ # orphaned records behind.
+ # [:counter_cache]
+ # Caches the number of belonging objects on the associate class through the use of +increment_counter+
+ # and +decrement_counter+. The counter cache is incremented when an object of this
+ # class is created and decremented when it's destroyed. This requires that a column
+ # named <tt>#{table_name}_count</tt> (such as +comments_count+ for a belonging Comment class)
+ # is used on the associate class (such as a Post class) - that is the migration for
+ # <tt>#{table_name}_count</tt> is created on the associate class (such that <tt>Post.comments_count</tt> will
+ # return the count cached, see note below). You can also specify a custom counter
+ # cache column by providing a column name instead of a +true+/+false+ value to this
+ # option (e.g., <tt>counter_cache: :my_custom_counter</tt>.)
+ # Note: Specifying a counter cache will add it to that model's list of readonly attributes
+ # using +attr_readonly+.
+ # [:polymorphic]
+ # Specify this association is a polymorphic association by passing +true+.
+ # Note: If you've enabled the counter cache, then you may want to add the counter cache attribute
+ # to the +attr_readonly+ list in the associated classes (e.g. <tt>class Post; attr_readonly :comments_count; end</tt>).
+ # [:validate]
+ # If +false+, don't validate the associated objects when saving the parent object. +false+ by default.
+ # [:autosave]
+ # If true, always save the associated object or destroy it if marked for destruction, when
+ # saving the parent object.
+ # If false, never save or destroy the associated object.
+ # By default, only save the associated object if it's a new record.
+ #
+ # Note that <tt>accepts_nested_attributes_for</tt> sets <tt>:autosave</tt> to <tt>true</tt>.
+ # [:touch]
+ # If true, the associated object will be touched (the updated_at/on attributes set to current time)
+ # when this record is either saved or destroyed. If you specify a symbol, that attribute
+ # will be updated with the current time in addition to the updated_at/on attribute.
+ # [:inverse_of]
+ # Specifies the name of the <tt>has_one</tt> or <tt>has_many</tt> association on the associated
+ # object that is the inverse of this <tt>belongs_to</tt> association. Does not work in
+ # combination with the <tt>:polymorphic</tt> options.
+ # See ActiveRecord::Associations::ClassMethods's overview on Bi-directional associations for more detail.
+ # [:required]
+ # When set to +true+, the association will also have its presence validated.
+ # This will validate the association itself, not the id. You can use
+ # +:inverse_of+ to avoid an extra query during validation.
+ #
+ # Option examples:
+ # belongs_to :firm, foreign_key: "client_of"
+ # belongs_to :person, primary_key: "name", foreign_key: "person_name"
+ # belongs_to :author, class_name: "Person", foreign_key: "author_id"
+ # belongs_to :valid_coupon, ->(o) { where "discounts > #{o.payments_count}" },
+ # class_name: "Coupon", foreign_key: "coupon_id"
+ # belongs_to :attachable, polymorphic: true
+ # belongs_to :project, readonly: true
+ # belongs_to :post, counter_cache: true
+ # belongs_to :company, touch: true
+ # belongs_to :company, touch: :employees_last_updated_at
+ # belongs_to :company, required: true
+ def belongs_to(name, scope = nil, options = {})
+ reflection = Builder::BelongsTo.build(self, name, scope, options)
+ Reflection.add_reflection self, name, reflection
+ end
+
+ # Specifies a many-to-many relationship with another class. This associates two classes via an
+ # intermediate join table. Unless the join table is explicitly specified as an option, it is
+ # guessed using the lexical order of the class names. So a join between Developer and Project
+ # will give the default join table name of "developers_projects" because "D" precedes "P" alphabetically.
+ # Note that this precedence is calculated using the <tt><</tt> operator for String. This
+ # means that if the strings are of different lengths, and the strings are equal when compared
+ # up to the shortest length, then the longer string is considered of higher
+ # lexical precedence than the shorter one. For example, one would expect the tables "paper_boxes" and "papers"
+ # to generate a join table name of "papers_paper_boxes" because of the length of the name "paper_boxes",
+ # but it in fact generates a join table name of "paper_boxes_papers". Be aware of this caveat, and use the
+ # custom <tt>:join_table</tt> option if you need to.
+ # If your tables share a common prefix, it will only appear once at the beginning. For example,
+ # the tables "catalog_categories" and "catalog_products" generate a join table name of "catalog_categories_products".
+ #
+ # The join table should not have a primary key or a model associated with it. You must manually generate the
+ # join table with a migration such as this:
+ #
+ # class CreateDevelopersProjectsJoinTable < ActiveRecord::Migration
+ # def change
+ # create_table :developers_projects, id: false do |t|
+ # t.integer :developer_id
+ # t.integer :project_id
+ # end
+ # end
+ # end
+ #
+ # It's also a good idea to add indexes to each of those columns to speed up the joins process.
+ # However, in MySQL it is advised to add a compound index for both of the columns as MySQL only
+ # uses one index per table during the lookup.
+ #
+ # Adds the following methods for retrieval and query:
+ #
+ # +collection+ is a placeholder for the symbol passed as the +name+ argument, so
+ # <tt>has_and_belongs_to_many :categories</tt> would add among others <tt>categories.empty?</tt>.
+ #
+ # [collection(force_reload = false)]
+ # Returns an array of all the associated objects.
+ # An empty array is returned if none are found.
+ # [collection<<(object, ...)]
+ # Adds one or more objects to the collection by creating associations in the join table
+ # (<tt>collection.push</tt> and <tt>collection.concat</tt> are aliases to this method).
+ # Note that this operation instantly fires update SQL without waiting for the save or update call on the
+ # parent object, unless the parent object is a new record.
+ # [collection.delete(object, ...)]
+ # Removes one or more objects from the collection by removing their associations from the join table.
+ # This does not destroy the objects.
+ # [collection.destroy(object, ...)]
+ # Removes one or more objects from the collection by running destroy on each association in the join table, overriding any dependent option.
+ # This does not destroy the objects.
+ # [collection=objects]
+ # Replaces the collection's content by deleting and adding objects as appropriate.
+ # [collection_singular_ids]
+ # Returns an array of the associated objects' ids.
+ # [collection_singular_ids=ids]
+ # Replace the collection by the objects identified by the primary keys in +ids+.
+ # [collection.clear]
+ # Removes every object from the collection. This does not destroy the objects.
+ # [collection.empty?]
+ # Returns +true+ if there are no associated objects.
+ # [collection.size]
+ # Returns the number of associated objects.
+ # [collection.find(id)]
+ # Finds an associated object responding to the +id+ and that
+ # meets the condition that it has to be associated with this object.
+ # Uses the same rules as <tt>ActiveRecord::Base.find</tt>.
+ # [collection.exists?(...)]
+ # Checks whether an associated object with the given conditions exists.
+ # Uses the same rules as <tt>ActiveRecord::Base.exists?</tt>.
+ # [collection.build(attributes = {})]
+ # Returns a new object of the collection type that has been instantiated
+ # with +attributes+ and linked to this object through the join table, but has not yet been saved.
+ # [collection.create(attributes = {})]
+ # Returns a new object of the collection type that has been instantiated
+ # with +attributes+, linked to this object through the join table, and that has already been
+ # saved (if it passed the validation).
+ #
+ # === Example
+ #
+ # A Developer class declares <tt>has_and_belongs_to_many :projects</tt>, which will add:
+ # * <tt>Developer#projects</tt>
+ # * <tt>Developer#projects<<</tt>
+ # * <tt>Developer#projects.delete</tt>
+ # * <tt>Developer#projects.destroy</tt>
+ # * <tt>Developer#projects=</tt>
+ # * <tt>Developer#project_ids</tt>
+ # * <tt>Developer#project_ids=</tt>
+ # * <tt>Developer#projects.clear</tt>
+ # * <tt>Developer#projects.empty?</tt>
+ # * <tt>Developer#projects.size</tt>
+ # * <tt>Developer#projects.find(id)</tt>
+ # * <tt>Developer#projects.exists?(...)</tt>
+ # * <tt>Developer#projects.build</tt> (similar to <tt>Project.new("developer_id" => id)</tt>)
+ # * <tt>Developer#projects.create</tt> (similar to <tt>c = Project.new("developer_id" => id); c.save; c</tt>)
+ # The declaration may include an +options+ hash to specialize the behavior of the association.
+ #
+ # === Options
+ #
+ # [:class_name]
+ # Specify the class name of the association. Use it only if that name can't be inferred
+ # from the association name. So <tt>has_and_belongs_to_many :projects</tt> will by default be linked to the
+ # Project class, but if the real class name is SuperProject, you'll have to specify it with this option.
+ # [:join_table]
+ # Specify the name of the join table if the default based on lexical order isn't what you want.
+ # <b>WARNING:</b> If you're overwriting the table name of either class, the +table_name+ method
+ # MUST be declared underneath any +has_and_belongs_to_many+ declaration in order to work.
+ # [:foreign_key]
+ # Specify the foreign key used for the association. By default this is guessed to be the name
+ # of this class in lower-case and "_id" suffixed. So a Person class that makes
+ # a +has_and_belongs_to_many+ association to Project will use "person_id" as the
+ # default <tt>:foreign_key</tt>.
+ # [:association_foreign_key]
+ # Specify the foreign key used for the association on the receiving side of the association.
+ # By default this is guessed to be the name of the associated class in lower-case and "_id" suffixed.
+ # So if a Person class makes a +has_and_belongs_to_many+ association to Project,
+ # the association will use "project_id" as the default <tt>:association_foreign_key</tt>.
+ # [:readonly]
+ # If true, all the associated objects are readonly through the association.
+ # [:validate]
+ # If +false+, don't validate the associated objects when saving the parent object. +true+ by default.
+ # [:autosave]
+ # If true, always save the associated objects or destroy them if marked for destruction, when
+ # saving the parent object.
+ # If false, never save or destroy the associated objects.
+ # By default, only save associated objects that are new records.
+ #
+ # Note that <tt>accepts_nested_attributes_for</tt> sets <tt>:autosave</tt> to <tt>true</tt>.
+ #
+ # Option examples:
+ # has_and_belongs_to_many :projects
+ # has_and_belongs_to_many :projects, -> { includes :milestones, :manager }
+ # has_and_belongs_to_many :nations, class_name: "Country"
+ # has_and_belongs_to_many :categories, join_table: "prods_cats"
+ # has_and_belongs_to_many :categories, -> { readonly }
+ def has_and_belongs_to_many(name, scope = nil, options = {}, &extension)
+ if scope.is_a?(Hash)
+ options = scope
+ scope = nil
+ end
+
+ habtm_reflection = ActiveRecord::Reflection::HasAndBelongsToManyReflection.new(name, scope, options, self)
+
+ builder = Builder::HasAndBelongsToMany.new name, self, options
+
+ join_model = builder.through_model
+
+ # FIXME: we should move this to the internal constants. Also people
+ # should never directly access this constant so I'm not happy about
+ # setting it.
+ const_set join_model.name, join_model
+
+ middle_reflection = builder.middle_reflection join_model
+
+ Builder::HasMany.define_callbacks self, middle_reflection
+ Reflection.add_reflection self, middle_reflection.name, middle_reflection
+ middle_reflection.parent_reflection = [name.to_s, habtm_reflection]
+
+ include Module.new {
+ class_eval <<-RUBY, __FILE__, __LINE__ + 1
+ def destroy_associations
+ association(:#{middle_reflection.name}).delete_all(:delete_all)
+ association(:#{name}).reset
+ super
+ end
+ RUBY
+ }
+
+ hm_options = {}
+ hm_options[:through] = middle_reflection.name
+ hm_options[:source] = join_model.right_reflection.name
+
+ [:before_add, :after_add, :before_remove, :after_remove, :autosave, :validate, :join_table].each do |k|
+ hm_options[k] = options[k] if options.key? k
+ end
+
+ has_many name, scope, hm_options, &extension
+ self._reflections[name.to_s].parent_reflection = [name.to_s, habtm_reflection]
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/alias_tracker.rb b/activerecord/lib/active_record/associations/alias_tracker.rb
new file mode 100644
index 0000000000..a6a1947148
--- /dev/null
+++ b/activerecord/lib/active_record/associations/alias_tracker.rb
@@ -0,0 +1,96 @@
+require 'active_support/core_ext/string/conversions'
+
+module ActiveRecord
+ module Associations
+ # Keeps track of table aliases for ActiveRecord::Associations::ClassMethods::JoinDependency and
+ # ActiveRecord::Associations::ThroughAssociationScope
+ class AliasTracker # :nodoc:
+ attr_reader :aliases, :connection
+
+ def self.empty(connection)
+ new connection, Hash.new(0)
+ end
+
+ def self.create(connection, table_joins)
+ if table_joins.empty?
+ empty connection
+ else
+ aliases = Hash.new { |h,k|
+ h[k] = initial_count_for(connection, k, table_joins)
+ }
+ new connection, aliases
+ end
+ end
+
+ def self.initial_count_for(connection, name, table_joins)
+ # quoted_name should be downcased as some database adapters (Oracle) return quoted name in uppercase
+ quoted_name = connection.quote_table_name(name).downcase
+
+ counts = table_joins.map do |join|
+ if join.is_a?(Arel::Nodes::StringJoin)
+ # Table names + table aliases
+ join.left.downcase.scan(
+ /join(?:\s+\w+)?\s+(\S+\s+)?#{quoted_name}\son/
+ ).size
+ elsif join.respond_to? :left
+ join.left.table_name == name ? 1 : 0
+ else
+ # this branch is reached by two tests:
+ #
+ # activerecord/test/cases/associations/cascaded_eager_loading_test.rb:37
+ # with :posts
+ #
+ # activerecord/test/cases/associations/eager_test.rb:1133
+ # with :comments
+ #
+ 0
+ end
+ end
+
+ counts.sum
+ end
+
+ # table_joins is an array of arel joins which might conflict with the aliases we assign here
+ def initialize(connection, aliases)
+ @aliases = aliases
+ @connection = connection
+ end
+
+ def aliased_table_for(table_name, aliased_name)
+ table_alias = aliased_name_for(table_name, aliased_name)
+
+ if table_alias == table_name
+ Arel::Table.new(table_name)
+ else
+ Arel::Table.new(table_name).alias(table_alias)
+ end
+ end
+
+ def aliased_name_for(table_name, aliased_name)
+ if aliases[table_name].zero?
+ # If it's zero, we can have our table_name
+ aliases[table_name] = 1
+ table_name
+ else
+ # Otherwise, we need to use an alias
+ aliased_name = connection.table_alias_for(aliased_name)
+
+ # Update the count
+ aliases[aliased_name] += 1
+
+ if aliases[aliased_name] > 1
+ "#{truncate(aliased_name)}_#{aliases[aliased_name]}"
+ else
+ aliased_name
+ end
+ end
+ end
+
+ private
+
+ def truncate(name)
+ name.slice(0, connection.table_alias_length - 2)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/association.rb b/activerecord/lib/active_record/associations/association.rb
new file mode 100644
index 0000000000..f1c36cd047
--- /dev/null
+++ b/activerecord/lib/active_record/associations/association.rb
@@ -0,0 +1,253 @@
+require 'active_support/core_ext/array/wrap'
+
+module ActiveRecord
+ module Associations
+ # = Active Record Associations
+ #
+ # This is the root class of all associations ('+ Foo' signifies an included module Foo):
+ #
+ # Association
+ # SingularAssociation
+ # HasOneAssociation
+ # HasOneThroughAssociation + ThroughAssociation
+ # BelongsToAssociation
+ # BelongsToPolymorphicAssociation
+ # CollectionAssociation
+ # HasManyAssociation
+ # HasManyThroughAssociation + ThroughAssociation
+ class Association #:nodoc:
+ attr_reader :owner, :target, :reflection
+ attr_accessor :inversed
+
+ delegate :options, :to => :reflection
+
+ def initialize(owner, reflection)
+ reflection.check_validity!
+
+ @owner, @reflection = owner, reflection
+
+ reset
+ reset_scope
+ end
+
+ # Returns the name of the table of the associated class:
+ #
+ # post.comments.aliased_table_name # => "comments"
+ #
+ def aliased_table_name
+ klass.table_name
+ end
+
+ # Resets the \loaded flag to +false+ and sets the \target to +nil+.
+ def reset
+ @loaded = false
+ @target = nil
+ @stale_state = nil
+ @inversed = false
+ end
+
+ # Reloads the \target and returns +self+ on success.
+ def reload
+ reset
+ reset_scope
+ load_target
+ self unless target.nil?
+ end
+
+ # Has the \target been already \loaded?
+ def loaded?
+ @loaded
+ end
+
+ # Asserts the \target has been loaded setting the \loaded flag to +true+.
+ def loaded!
+ @loaded = true
+ @stale_state = stale_state
+ @inversed = false
+ end
+
+ # The target is stale if the target no longer points to the record(s) that the
+ # relevant foreign_key(s) refers to. If stale, the association accessor method
+ # on the owner will reload the target. It's up to subclasses to implement the
+ # stale_state method if relevant.
+ #
+ # Note that if the target has not been loaded, it is not considered stale.
+ def stale_target?
+ !inversed && loaded? && @stale_state != stale_state
+ end
+
+ # Sets the target of this association to <tt>\target</tt>, and the \loaded flag to +true+.
+ def target=(target)
+ @target = target
+ loaded!
+ end
+
+ def scope
+ target_scope.merge(association_scope)
+ end
+
+ # The scope for this association.
+ #
+ # Note that the association_scope is merged into the target_scope only when the
+ # scope method is called. This is because at that point the call may be surrounded
+ # by scope.scoping { ... } or with_scope { ... } etc, which affects the scope which
+ # actually gets built.
+ def association_scope
+ if klass
+ @association_scope ||= AssociationScope.scope(self, klass.connection)
+ end
+ end
+
+ def reset_scope
+ @association_scope = nil
+ end
+
+ # Set the inverse association, if possible
+ def set_inverse_instance(record)
+ if invertible_for?(record)
+ inverse = record.association(inverse_reflection_for(record).name)
+ inverse.target = owner
+ inverse.inversed = true
+ end
+ record
+ end
+
+ # Returns the class of the target. belongs_to polymorphic overrides this to look at the
+ # polymorphic_type field on the owner.
+ def klass
+ reflection.klass
+ end
+
+ # Can be overridden (i.e. in ThroughAssociation) to merge in other scopes (i.e. the
+ # through association's scope)
+ def target_scope
+ AssociationRelation.create(klass, klass.arel_table, self).merge!(klass.all)
+ end
+
+ # Loads the \target if needed and returns it.
+ #
+ # This method is abstract in the sense that it relies on +find_target+,
+ # which is expected to be provided by descendants.
+ #
+ # If the \target is already \loaded it is just returned. Thus, you can call
+ # +load_target+ unconditionally to get the \target.
+ #
+ # ActiveRecord::RecordNotFound is rescued within the method, and it is
+ # not reraised. The proxy is \reset and +nil+ is the return value.
+ def load_target
+ @target = find_target if (@stale_state && stale_target?) || find_target?
+
+ loaded! unless loaded?
+ target
+ rescue ActiveRecord::RecordNotFound
+ reset
+ end
+
+ def interpolate(sql, record = nil)
+ if sql.respond_to?(:to_proc)
+ owner.instance_exec(record, &sql)
+ else
+ sql
+ end
+ end
+
+ # We can't dump @reflection since it contains the scope proc
+ def marshal_dump
+ ivars = (instance_variables - [:@reflection]).map { |name| [name, instance_variable_get(name)] }
+ [@reflection.name, ivars]
+ end
+
+ def marshal_load(data)
+ reflection_name, ivars = data
+ ivars.each { |name, val| instance_variable_set(name, val) }
+ @reflection = @owner.class._reflect_on_association(reflection_name)
+ end
+
+ def initialize_attributes(record) #:nodoc:
+ skip_assign = [reflection.foreign_key, reflection.type].compact
+ attributes = create_scope.except(*(record.changed - skip_assign))
+ record.assign_attributes(attributes)
+ set_inverse_instance(record)
+ end
+
+ private
+
+ def find_target?
+ !loaded? && (!owner.new_record? || foreign_key_present?) && klass
+ end
+
+ def creation_attributes
+ attributes = {}
+
+ if (reflection.has_one? || reflection.collection?) && !options[:through]
+ attributes[reflection.foreign_key] = owner[reflection.active_record_primary_key]
+
+ if reflection.options[:as]
+ attributes[reflection.type] = owner.class.base_class.name
+ end
+ end
+
+ attributes
+ end
+
+ # Sets the owner attributes on the given record
+ def set_owner_attributes(record)
+ creation_attributes.each { |key, value| record[key] = value }
+ end
+
+ # Returns true if there is a foreign key present on the owner which
+ # references the target. This is used to determine whether we can load
+ # the target if the owner is currently a new record (and therefore
+ # without a key). If the owner is a new record then foreign_key must
+ # be present in order to load target.
+ #
+ # Currently implemented by belongs_to (vanilla and polymorphic) and
+ # has_one/has_many :through associations which go through a belongs_to.
+ def foreign_key_present?
+ false
+ end
+
+ # Raises ActiveRecord::AssociationTypeMismatch unless +record+ is of
+ # the kind of the class of the associated objects. Meant to be used as
+ # a sanity check when you are about to assign an associated record.
+ def raise_on_type_mismatch!(record)
+ unless record.is_a?(reflection.klass) || record.is_a?(reflection.class_name.constantize)
+ message = "#{reflection.class_name}(##{reflection.klass.object_id}) expected, got #{record.class}(##{record.class.object_id})"
+ raise ActiveRecord::AssociationTypeMismatch, message
+ end
+ end
+
+ # Can be redefined by subclasses, notably polymorphic belongs_to
+ # The record parameter is necessary to support polymorphic inverses as we must check for
+ # the association in the specific class of the record.
+ def inverse_reflection_for(record)
+ reflection.inverse_of
+ end
+
+ # Returns true if inverse association on the given record needs to be set.
+ # This method is redefined by subclasses.
+ def invertible_for?(record)
+ foreign_key_for?(record) && inverse_reflection_for(record)
+ end
+
+ # Returns true if record contains the foreign_key
+ def foreign_key_for?(record)
+ record.has_attribute?(reflection.foreign_key)
+ end
+
+ # This should be implemented to return the values of the relevant key(s) on the owner,
+ # so that when stale_state is different from the value stored on the last find_target,
+ # the target is stale.
+ #
+ # This is only relevant to certain associations, which is why it returns nil by default.
+ def stale_state
+ end
+
+ def build_record(attributes)
+ reflection.build_association(attributes) do |record|
+ initialize_attributes(record)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/association_scope.rb b/activerecord/lib/active_record/associations/association_scope.rb
new file mode 100644
index 0000000000..519d4d8651
--- /dev/null
+++ b/activerecord/lib/active_record/associations/association_scope.rb
@@ -0,0 +1,182 @@
+module ActiveRecord
+ module Associations
+ class AssociationScope #:nodoc:
+ def self.scope(association, connection)
+ INSTANCE.scope association, connection
+ end
+
+ class BindSubstitution
+ def initialize(block)
+ @block = block
+ end
+
+ def bind_value(scope, column, value, alias_tracker)
+ substitute = alias_tracker.connection.substitute_at(
+ column, scope.bind_values.length)
+ scope.bind_values += [[column, @block.call(value)]]
+ substitute
+ end
+ end
+
+ def self.create(&block)
+ block = block ? block : lambda { |val| val }
+ new BindSubstitution.new(block)
+ end
+
+ def initialize(bind_substitution)
+ @bind_substitution = bind_substitution
+ end
+
+ INSTANCE = create
+
+ def scope(association, connection)
+ klass = association.klass
+ reflection = association.reflection
+ scope = klass.unscoped
+ owner = association.owner
+ alias_tracker = AliasTracker.empty connection
+
+ scope.extending! Array(reflection.options[:extend])
+ add_constraints(scope, owner, klass, reflection, alias_tracker)
+ end
+
+ def join_type
+ Arel::Nodes::InnerJoin
+ end
+
+ def self.get_bind_values(owner, chain)
+ bvs = []
+ chain.each_with_index do |reflection, i|
+ if reflection == chain.last
+ bvs << reflection.join_id_for(owner)
+ if reflection.type
+ bvs << owner.class.base_class.name
+ end
+ else
+ if reflection.type
+ bvs << chain[i + 1].klass.base_class.name
+ end
+ end
+ end
+ bvs
+ end
+
+ private
+
+ def construct_tables(chain, klass, refl, alias_tracker)
+ chain.map do |reflection|
+ alias_tracker.aliased_table_for(
+ table_name_for(reflection, klass, refl),
+ table_alias_for(reflection, refl, reflection != refl)
+ )
+ end
+ end
+
+ def table_alias_for(reflection, refl, join = false)
+ name = "#{reflection.plural_name}_#{alias_suffix(refl)}"
+ name << "_join" if join
+ name
+ end
+
+ def join(table, constraint)
+ table.create_join(table, table.create_on(constraint), join_type)
+ end
+
+ def column_for(table_name, column_name, alias_tracker)
+ columns = alias_tracker.connection.schema_cache.columns_hash(table_name)
+ columns[column_name]
+ end
+
+ def bind_value(scope, column, value, alias_tracker)
+ @bind_substitution.bind_value scope, column, value, alias_tracker
+ end
+
+ def bind(scope, table_name, column_name, value, tracker)
+ column = column_for table_name, column_name, tracker
+ bind_value scope, column, value, tracker
+ end
+
+ def add_constraints(scope, owner, assoc_klass, refl, tracker)
+ chain = refl.chain
+ scope_chain = refl.scope_chain
+
+ tables = construct_tables(chain, assoc_klass, refl, tracker)
+
+ chain.each_with_index do |reflection, i|
+ table, foreign_table = tables.shift, tables.first
+
+ join_keys = reflection.join_keys(assoc_klass)
+ key = join_keys.key
+ foreign_key = join_keys.foreign_key
+
+ if reflection == chain.last
+ bind_val = bind scope, table.table_name, key.to_s, owner[foreign_key], tracker
+ scope = scope.where(table[key].eq(bind_val))
+
+ if reflection.type
+ value = owner.class.base_class.name
+ bind_val = bind scope, table.table_name, reflection.type.to_s, value, tracker
+ scope = scope.where(table[reflection.type].eq(bind_val))
+ end
+ else
+ constraint = table[key].eq(foreign_table[foreign_key])
+
+ if reflection.type
+ value = chain[i + 1].klass.base_class.name
+ bind_val = bind scope, table.table_name, reflection.type.to_s, value, tracker
+ scope = scope.where(table[reflection.type].eq(bind_val))
+ end
+
+ scope = scope.joins(join(foreign_table, constraint))
+ end
+
+ is_first_chain = i == 0
+ klass = is_first_chain ? assoc_klass : reflection.klass
+
+ # Exclude the scope of the association itself, because that
+ # was already merged in the #scope method.
+ scope_chain[i].each do |scope_chain_item|
+ item = eval_scope(klass, scope_chain_item, owner)
+
+ if scope_chain_item == refl.scope
+ scope.merge! item.except(:where, :includes, :bind)
+ end
+
+ if is_first_chain
+ scope.includes! item.includes_values
+ end
+
+ scope.where_values += item.where_values
+ scope.bind_values += item.bind_values
+ scope.order_values |= item.order_values
+ end
+ end
+
+ scope
+ end
+
+ def alias_suffix(refl)
+ refl.name
+ end
+
+ def table_name_for(reflection, klass, refl)
+ if reflection == refl
+ # If this is a polymorphic belongs_to, we want to get the klass from the
+ # association because it depends on the polymorphic_type attribute of
+ # the owner
+ klass.table_name
+ else
+ reflection.table_name
+ end
+ end
+
+ def eval_scope(klass, scope, owner)
+ if scope.is_a?(Relation)
+ scope
+ else
+ klass.unscoped.instance_exec(owner, &scope)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/belongs_to_association.rb b/activerecord/lib/active_record/associations/belongs_to_association.rb
new file mode 100644
index 0000000000..81fdd681de
--- /dev/null
+++ b/activerecord/lib/active_record/associations/belongs_to_association.rb
@@ -0,0 +1,111 @@
+module ActiveRecord
+ # = Active Record Belongs To Association
+ module Associations
+ class BelongsToAssociation < SingularAssociation #:nodoc:
+
+ def handle_dependency
+ target.send(options[:dependent]) if load_target
+ end
+
+ def replace(record)
+ if record
+ raise_on_type_mismatch!(record)
+ update_counters(record)
+ replace_keys(record)
+ set_inverse_instance(record)
+ @updated = true
+ else
+ decrement_counters
+ remove_keys
+ end
+
+ self.target = record
+ end
+
+ def reset
+ super
+ @updated = false
+ end
+
+ def updated?
+ @updated
+ end
+
+ def decrement_counters # :nodoc:
+ with_cache_name { |name| decrement_counter name }
+ end
+
+ def increment_counters # :nodoc:
+ with_cache_name { |name| increment_counter name }
+ end
+
+ private
+
+ def find_target?
+ !loaded? && foreign_key_present? && klass
+ end
+
+ def with_cache_name
+ counter_cache_name = reflection.counter_cache_column
+ return unless counter_cache_name && owner.persisted?
+ yield counter_cache_name
+ end
+
+ def update_counters(record)
+ with_cache_name do |name|
+ return unless different_target? record
+ record.class.increment_counter(name, record.id)
+ decrement_counter name
+ end
+ end
+
+ def decrement_counter(counter_cache_name)
+ if foreign_key_present?
+ klass.decrement_counter(counter_cache_name, target_id)
+ end
+ end
+
+ def increment_counter(counter_cache_name)
+ if foreign_key_present?
+ klass.increment_counter(counter_cache_name, target_id)
+ end
+ end
+
+ # Checks whether record is different to the current target, without loading it
+ def different_target?(record)
+ record.id != owner[reflection.foreign_key]
+ end
+
+ def replace_keys(record)
+ owner[reflection.foreign_key] = record[reflection.association_primary_key(record.class)]
+ end
+
+ def remove_keys
+ owner[reflection.foreign_key] = nil
+ end
+
+ def foreign_key_present?
+ owner[reflection.foreign_key]
+ end
+
+ # NOTE - for now, we're only supporting inverse setting from belongs_to back onto
+ # has_one associations.
+ def invertible_for?(record)
+ inverse = inverse_reflection_for(record)
+ inverse && inverse.has_one?
+ end
+
+ def target_id
+ if options[:primary_key]
+ owner.send(reflection.name).try(:id)
+ else
+ owner[reflection.foreign_key]
+ end
+ end
+
+ def stale_state
+ owner[reflection.foreign_key] && owner[reflection.foreign_key].to_s
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb b/activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb
new file mode 100644
index 0000000000..b710cf6bdb
--- /dev/null
+++ b/activerecord/lib/active_record/associations/belongs_to_polymorphic_association.rb
@@ -0,0 +1,40 @@
+module ActiveRecord
+ # = Active Record Belongs To Polymorphic Association
+ module Associations
+ class BelongsToPolymorphicAssociation < BelongsToAssociation #:nodoc:
+ def klass
+ type = owner[reflection.foreign_type]
+ type.presence && type.constantize
+ end
+
+ private
+
+ def replace_keys(record)
+ super
+ owner[reflection.foreign_type] = record.class.base_class.name
+ end
+
+ def remove_keys
+ super
+ owner[reflection.foreign_type] = nil
+ end
+
+ def different_target?(record)
+ super || record.class != klass
+ end
+
+ def inverse_reflection_for(record)
+ reflection.polymorphic_inverse_of(record.class)
+ end
+
+ def raise_on_type_mismatch!(record)
+ # A polymorphic association cannot have a type mismatch, by definition
+ end
+
+ def stale_state
+ foreign_key = super
+ foreign_key && [foreign_key.to_s, owner[reflection.foreign_type].to_s]
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/association.rb b/activerecord/lib/active_record/associations/builder/association.rb
new file mode 100644
index 0000000000..947d61ee7b
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/association.rb
@@ -0,0 +1,149 @@
+require 'active_support/core_ext/module/attribute_accessors'
+
+# This is the parent Association class which defines the variables
+# used by all associations.
+#
+# The hierarchy is defined as follows:
+# Association
+# - SingularAssociation
+# - BelongsToAssociation
+# - HasOneAssociation
+# - CollectionAssociation
+# - HasManyAssociation
+
+module ActiveRecord::Associations::Builder
+ class Association #:nodoc:
+ class << self
+ attr_accessor :extensions
+ # TODO: This class accessor is needed to make activerecord-deprecated_finders work.
+ # We can move it to a constant in 5.0.
+ attr_accessor :valid_options
+ end
+ self.extensions = []
+
+ self.valid_options = [:class_name, :class, :foreign_key, :validate]
+
+ attr_reader :name, :scope, :options
+
+ def self.build(model, name, scope, options, &block)
+ if model.dangerous_attribute_method?(name)
+ raise ArgumentError, "You tried to define an association named #{name} on the model #{model.name}, but " \
+ "this will conflict with a method #{name} already defined by Active Record. " \
+ "Please choose a different association name."
+ end
+
+ builder = create_builder model, name, scope, options, &block
+ reflection = builder.build(model)
+ define_accessors model, reflection
+ define_callbacks model, reflection
+ define_validations model, reflection
+ builder.define_extensions model
+ reflection
+ end
+
+ def self.create_builder(model, name, scope, options, &block)
+ raise ArgumentError, "association names must be a Symbol" unless name.kind_of?(Symbol)
+
+ new(model, name, scope, options, &block)
+ end
+
+ def initialize(model, name, scope, options)
+ # TODO: Move this to create_builder as soon we drop support to activerecord-deprecated_finders.
+ if scope.is_a?(Hash)
+ options = scope
+ scope = nil
+ end
+
+ # TODO: Remove this model argument as soon we drop support to activerecord-deprecated_finders.
+ @name = name
+ @scope = scope
+ @options = options
+
+ validate_options
+
+ if scope && scope.arity == 0
+ @scope = proc { instance_exec(&scope) }
+ end
+ end
+
+ def build(model)
+ ActiveRecord::Reflection.create(macro, name, scope, options, model)
+ end
+
+ def macro
+ raise NotImplementedError
+ end
+
+ def valid_options
+ Association.valid_options + Association.extensions.flat_map(&:valid_options)
+ end
+
+ def validate_options
+ options.assert_valid_keys(valid_options)
+ end
+
+ def define_extensions(model)
+ end
+
+ def self.define_callbacks(model, reflection)
+ if dependent = reflection.options[:dependent]
+ check_dependent_options(dependent)
+ add_destroy_callbacks(model, reflection)
+ end
+
+ Association.extensions.each do |extension|
+ extension.build model, reflection
+ end
+ end
+
+ # Defines the setter and getter methods for the association
+ # class Post < ActiveRecord::Base
+ # has_many :comments
+ # end
+ #
+ # Post.first.comments and Post.first.comments= methods are defined by this method...
+ def self.define_accessors(model, reflection)
+ mixin = model.generated_association_methods
+ name = reflection.name
+ define_readers(mixin, name)
+ define_writers(mixin, name)
+ end
+
+ def self.define_readers(mixin, name)
+ mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{name}(*args)
+ association(:#{name}).reader(*args)
+ end
+ CODE
+ end
+
+ def self.define_writers(mixin, name)
+ mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{name}=(value)
+ association(:#{name}).writer(value)
+ end
+ CODE
+ end
+
+ def self.define_validations(model, reflection)
+ # noop
+ end
+
+ def self.valid_dependent_options
+ raise NotImplementedError
+ end
+
+ private
+
+ def self.check_dependent_options(dependent)
+ unless valid_dependent_options.include? dependent
+ raise ArgumentError, "The :dependent option must be one of #{valid_dependent_options}, but is :#{dependent}"
+ end
+ end
+
+ def self.add_destroy_callbacks(model, reflection)
+ name = reflection.name
+ model.before_destroy lambda { |o| o.association(name).handle_dependency }
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/belongs_to.rb b/activerecord/lib/active_record/associations/builder/belongs_to.rb
new file mode 100644
index 0000000000..954ea3878a
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/belongs_to.rb
@@ -0,0 +1,116 @@
+module ActiveRecord::Associations::Builder
+ class BelongsTo < SingularAssociation #:nodoc:
+ def macro
+ :belongs_to
+ end
+
+ def valid_options
+ super + [:foreign_type, :polymorphic, :touch, :counter_cache]
+ end
+
+ def self.valid_dependent_options
+ [:destroy, :delete]
+ end
+
+ def self.define_callbacks(model, reflection)
+ super
+ add_counter_cache_callbacks(model, reflection) if reflection.options[:counter_cache]
+ add_touch_callbacks(model, reflection) if reflection.options[:touch]
+ end
+
+ def self.define_accessors(mixin, reflection)
+ super
+ add_counter_cache_methods mixin
+ end
+
+ private
+
+ def self.add_counter_cache_methods(mixin)
+ return if mixin.method_defined? :belongs_to_counter_cache_after_update
+
+ mixin.class_eval do
+ def belongs_to_counter_cache_after_update(reflection)
+ foreign_key = reflection.foreign_key
+ cache_column = reflection.counter_cache_column
+
+ if (@_after_create_counter_called ||= false)
+ @_after_create_counter_called = false
+ elsif attribute_changed?(foreign_key) && !new_record? && reflection.constructable?
+ model = reflection.klass
+ foreign_key_was = attribute_was foreign_key
+ foreign_key = attribute foreign_key
+
+ if foreign_key && model.respond_to?(:increment_counter)
+ model.increment_counter(cache_column, foreign_key)
+ end
+ if foreign_key_was && model.respond_to?(:decrement_counter)
+ model.decrement_counter(cache_column, foreign_key_was)
+ end
+ end
+ end
+ end
+ end
+
+ def self.add_counter_cache_callbacks(model, reflection)
+ cache_column = reflection.counter_cache_column
+
+ model.after_update lambda { |record|
+ record.belongs_to_counter_cache_after_update(reflection)
+ }
+
+ klass = reflection.class_name.safe_constantize
+ klass.attr_readonly cache_column if klass && klass.respond_to?(:attr_readonly)
+ end
+
+ def self.touch_record(o, foreign_key, name, touch) # :nodoc:
+ old_foreign_id = o.changed_attributes[foreign_key]
+
+ if old_foreign_id
+ association = o.association(name)
+ reflection = association.reflection
+ if reflection.polymorphic?
+ klass = o.public_send("#{reflection.foreign_type}_was").constantize
+ else
+ klass = association.klass
+ end
+ old_record = klass.find_by(klass.primary_key => old_foreign_id)
+
+ if old_record
+ if touch != true
+ old_record.touch touch
+ else
+ old_record.touch
+ end
+ end
+ end
+
+ record = o.send name
+ if record && record.persisted?
+ if touch != true
+ record.touch touch
+ else
+ record.touch
+ end
+ end
+ end
+
+ def self.add_touch_callbacks(model, reflection)
+ foreign_key = reflection.foreign_key
+ n = reflection.name
+ touch = reflection.options[:touch]
+
+ callback = lambda { |record|
+ BelongsTo.touch_record(record, foreign_key, n, touch)
+ }
+
+ model.after_save callback, if: :changed?
+ model.after_touch callback
+ model.after_destroy callback
+ end
+
+ def self.add_destroy_callbacks(model, reflection)
+ name = reflection.name
+ model.after_destroy lambda { |o| o.association(name).handle_dependency }
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/collection_association.rb b/activerecord/lib/active_record/associations/builder/collection_association.rb
new file mode 100644
index 0000000000..bc15a49996
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/collection_association.rb
@@ -0,0 +1,91 @@
+# This class is inherited by the has_many and has_many_and_belongs_to_many association classes
+
+require 'active_record/associations'
+
+module ActiveRecord::Associations::Builder
+ class CollectionAssociation < Association #:nodoc:
+
+ CALLBACKS = [:before_add, :after_add, :before_remove, :after_remove]
+
+ def valid_options
+ super + [:table_name, :before_add,
+ :after_add, :before_remove, :after_remove, :extend]
+ end
+
+ attr_reader :block_extension
+
+ def initialize(model, name, scope, options)
+ super
+ @mod = nil
+ if block_given?
+ @mod = Module.new(&Proc.new)
+ @scope = wrap_scope @scope, @mod
+ end
+ end
+
+ def self.define_callbacks(model, reflection)
+ super
+ name = reflection.name
+ options = reflection.options
+ CALLBACKS.each { |callback_name|
+ define_callback(model, callback_name, name, options)
+ }
+ end
+
+ def define_extensions(model)
+ if @mod
+ extension_module_name = "#{model.name.demodulize}#{name.to_s.camelize}AssociationExtension"
+ model.parent.const_set(extension_module_name, @mod)
+ end
+ end
+
+ def self.define_callback(model, callback_name, name, options)
+ full_callback_name = "#{callback_name}_for_#{name}"
+
+ # TODO : why do i need method_defined? I think its because of the inheritance chain
+ model.class_attribute full_callback_name unless model.method_defined?(full_callback_name)
+ callbacks = Array(options[callback_name.to_sym]).map do |callback|
+ case callback
+ when Symbol
+ ->(method, owner, record) { owner.send(callback, record) }
+ when Proc
+ ->(method, owner, record) { callback.call(owner, record) }
+ else
+ ->(method, owner, record) { callback.send(method, owner, record) }
+ end
+ end
+ model.send "#{full_callback_name}=", callbacks
+ end
+
+ # Defines the setter and getter methods for the collection_singular_ids.
+ def self.define_readers(mixin, name)
+ super
+
+ mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{name.to_s.singularize}_ids
+ association(:#{name}).ids_reader
+ end
+ CODE
+ end
+
+ def self.define_writers(mixin, name)
+ super
+
+ mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{name.to_s.singularize}_ids=(ids)
+ association(:#{name}).ids_writer(ids)
+ end
+ CODE
+ end
+
+ private
+
+ def wrap_scope(scope, mod)
+ if scope
+ proc { |owner| instance_exec(owner, &scope).extending(mod) }
+ else
+ proc { extending(mod) }
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/has_and_belongs_to_many.rb b/activerecord/lib/active_record/associations/builder/has_and_belongs_to_many.rb
new file mode 100644
index 0000000000..34a555dfd4
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/has_and_belongs_to_many.rb
@@ -0,0 +1,124 @@
+module ActiveRecord::Associations::Builder
+ class HasAndBelongsToMany # :nodoc:
+ class JoinTableResolver
+ KnownTable = Struct.new :join_table
+
+ class KnownClass
+ def initialize(lhs_class, rhs_class_name)
+ @lhs_class = lhs_class
+ @rhs_class_name = rhs_class_name
+ @join_table = nil
+ end
+
+ def join_table
+ @join_table ||= [@lhs_class.table_name, klass.table_name].sort.join("\0").gsub(/^(.*[._])(.+)\0\1(.+)/, '\1\2_\3').gsub("\0", "_")
+ end
+
+ private
+
+ def klass
+ @lhs_class.send(:compute_type, @rhs_class_name)
+ end
+ end
+
+ def self.build(lhs_class, name, options)
+ if options[:join_table]
+ KnownTable.new options[:join_table].to_s
+ else
+ class_name = options.fetch(:class_name) {
+ name.to_s.camelize.singularize
+ }
+ KnownClass.new lhs_class, class_name
+ end
+ end
+ end
+
+ attr_reader :lhs_model, :association_name, :options
+
+ def initialize(association_name, lhs_model, options)
+ @association_name = association_name
+ @lhs_model = lhs_model
+ @options = options
+ end
+
+ def through_model
+ habtm = JoinTableResolver.build lhs_model, association_name, options
+
+ join_model = Class.new(ActiveRecord::Base) {
+ class << self;
+ attr_accessor :class_resolver
+ attr_accessor :name
+ attr_accessor :table_name_resolver
+ attr_accessor :left_reflection
+ attr_accessor :right_reflection
+ end
+
+ def self.table_name
+ table_name_resolver.join_table
+ end
+
+ def self.compute_type(class_name)
+ class_resolver.compute_type class_name
+ end
+
+ def self.add_left_association(name, options)
+ belongs_to name, options
+ self.left_reflection = _reflect_on_association(name)
+ end
+
+ def self.add_right_association(name, options)
+ rhs_name = name.to_s.singularize.to_sym
+ belongs_to rhs_name, options
+ self.right_reflection = _reflect_on_association(rhs_name)
+ end
+
+ }
+
+ join_model.name = "HABTM_#{association_name.to_s.camelize}"
+ join_model.table_name_resolver = habtm
+ join_model.class_resolver = lhs_model
+
+ join_model.add_left_association :left_side, class: lhs_model
+ join_model.add_right_association association_name, belongs_to_options(options)
+ join_model
+ end
+
+ def middle_reflection(join_model)
+ middle_name = [lhs_model.name.downcase.pluralize,
+ association_name].join('_').gsub(/::/, '_').to_sym
+ middle_options = middle_options join_model
+ hm_builder = HasMany.create_builder(lhs_model,
+ middle_name,
+ nil,
+ middle_options)
+ hm_builder.build lhs_model
+ end
+
+ private
+
+ def middle_options(join_model)
+ middle_options = {}
+ middle_options[:class] = join_model
+ middle_options[:source] = join_model.left_reflection.name
+ if options.key? :foreign_key
+ middle_options[:foreign_key] = options[:foreign_key]
+ end
+ middle_options
+ end
+
+ def belongs_to_options(options)
+ rhs_options = {}
+
+ if options.key? :class_name
+ rhs_options[:foreign_key] = options[:class_name].foreign_key
+ rhs_options[:class_name] = options[:class_name]
+ end
+
+ if options.key? :association_foreign_key
+ rhs_options[:foreign_key] = options[:association_foreign_key]
+ end
+
+ rhs_options
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/has_many.rb b/activerecord/lib/active_record/associations/builder/has_many.rb
new file mode 100644
index 0000000000..4c8c826f76
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/has_many.rb
@@ -0,0 +1,15 @@
+module ActiveRecord::Associations::Builder
+ class HasMany < CollectionAssociation #:nodoc:
+ def macro
+ :has_many
+ end
+
+ def valid_options
+ super + [:primary_key, :dependent, :as, :through, :source, :source_type, :inverse_of, :counter_cache, :join_table]
+ end
+
+ def self.valid_dependent_options
+ [:destroy, :delete_all, :nullify, :restrict_with_error, :restrict_with_exception]
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/has_one.rb b/activerecord/lib/active_record/associations/builder/has_one.rb
new file mode 100644
index 0000000000..c194c8ae9a
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/has_one.rb
@@ -0,0 +1,23 @@
+module ActiveRecord::Associations::Builder
+ class HasOne < SingularAssociation #:nodoc:
+ def macro
+ :has_one
+ end
+
+ def valid_options
+ valid = super + [:as]
+ valid += [:through, :source, :source_type] if options[:through]
+ valid
+ end
+
+ def self.valid_dependent_options
+ [:destroy, :delete, :nullify, :restrict_with_error, :restrict_with_exception]
+ end
+
+ private
+
+ def self.add_destroy_callbacks(model, reflection)
+ super unless reflection.options[:through]
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/builder/singular_association.rb b/activerecord/lib/active_record/associations/builder/singular_association.rb
new file mode 100644
index 0000000000..6e6dd7204c
--- /dev/null
+++ b/activerecord/lib/active_record/associations/builder/singular_association.rb
@@ -0,0 +1,38 @@
+# This class is inherited by the has_one and belongs_to association classes
+
+module ActiveRecord::Associations::Builder
+ class SingularAssociation < Association #:nodoc:
+ def valid_options
+ super + [:dependent, :primary_key, :inverse_of, :required]
+ end
+
+ def self.define_accessors(model, reflection)
+ super
+ define_constructors(model.generated_association_methods, reflection.name) if reflection.constructable?
+ end
+
+ # Defines the (build|create)_association methods for belongs_to or has_one association
+ def self.define_constructors(mixin, name)
+ mixin.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def build_#{name}(*args, &block)
+ association(:#{name}).build(*args, &block)
+ end
+
+ def create_#{name}(*args, &block)
+ association(:#{name}).create(*args, &block)
+ end
+
+ def create_#{name}!(*args, &block)
+ association(:#{name}).create!(*args, &block)
+ end
+ CODE
+ end
+
+ def self.define_validations(model, reflection)
+ super
+ if reflection.options[:required]
+ model.validates_presence_of reflection.name
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/collection_association.rb b/activerecord/lib/active_record/associations/collection_association.rb
new file mode 100644
index 0000000000..065a2cff01
--- /dev/null
+++ b/activerecord/lib/active_record/associations/collection_association.rb
@@ -0,0 +1,606 @@
+module ActiveRecord
+ module Associations
+ # = Active Record Association Collection
+ #
+ # CollectionAssociation is an abstract class that provides common stuff to
+ # ease the implementation of association proxies that represent
+ # collections. See the class hierarchy in Association.
+ #
+ # CollectionAssociation:
+ # HasManyAssociation => has_many
+ # HasManyThroughAssociation + ThroughAssociation => has_many :through
+ #
+ # CollectionAssociation class provides common methods to the collections
+ # defined by +has_and_belongs_to_many+, +has_many+ or +has_many+ with
+ # +:through association+ option.
+ #
+ # You need to be careful with assumptions regarding the target: The proxy
+ # does not fetch records from the database until it needs them, but new
+ # ones created with +build+ are added to the target. So, the target may be
+ # non-empty and still lack children waiting to be read from the database.
+ # If you look directly to the database you cannot assume that's the entire
+ # collection because new records may have been added to the target, etc.
+ #
+ # If you need to work on all current children, new and existing records,
+ # +load_target+ and the +loaded+ flag are your friends.
+ class CollectionAssociation < Association #:nodoc:
+
+ # Implements the reader method, e.g. foo.items for Foo.has_many :items
+ def reader(force_reload = false)
+ if force_reload
+ klass.uncached { reload }
+ elsif stale_target?
+ reload
+ end
+
+ @proxy ||= CollectionProxy.create(klass, self)
+ end
+
+ # Implements the writer method, e.g. foo.items= for Foo.has_many :items
+ def writer(records)
+ replace(records)
+ end
+
+ # Implements the ids reader method, e.g. foo.item_ids for Foo.has_many :items
+ def ids_reader
+ if loaded?
+ load_target.map do |record|
+ record.send(reflection.association_primary_key)
+ end
+ else
+ column = "#{reflection.quoted_table_name}.#{reflection.association_primary_key}"
+ scope.pluck(column)
+ end
+ end
+
+ # Implements the ids writer method, e.g. foo.item_ids= for Foo.has_many :items
+ def ids_writer(ids)
+ pk_type = reflection.primary_key_type
+ ids = Array(ids).reject { |id| id.blank? }
+ ids.map! { |i| pk_type.type_cast_from_user(i) }
+ replace(klass.find(ids).index_by { |r| r.id }.values_at(*ids))
+ end
+
+ def reset
+ super
+ @target = []
+ end
+
+ def select(*fields)
+ if block_given?
+ load_target.select.each { |e| yield e }
+ else
+ scope.select(*fields)
+ end
+ end
+
+ def find(*args)
+ if block_given?
+ load_target.find(*args) { |*block_args| yield(*block_args) }
+ else
+ if options[:inverse_of] && loaded?
+ args_flatten = args.flatten
+ raise RecordNotFound, "Couldn't find #{scope.klass.name} without an ID" if args_flatten.blank?
+ result = find_by_scan(*args)
+
+ result_size = Array(result).size
+ if !result || result_size != args_flatten.size
+ scope.raise_record_not_found_exception!(args_flatten, result_size, args_flatten.size)
+ else
+ result
+ end
+ else
+ scope.find(*args)
+ end
+ end
+ end
+
+ def first(*args)
+ first_nth_or_last(:first, *args)
+ end
+
+ def second(*args)
+ first_nth_or_last(:second, *args)
+ end
+
+ def third(*args)
+ first_nth_or_last(:third, *args)
+ end
+
+ def fourth(*args)
+ first_nth_or_last(:fourth, *args)
+ end
+
+ def fifth(*args)
+ first_nth_or_last(:fifth, *args)
+ end
+
+ def forty_two(*args)
+ first_nth_or_last(:forty_two, *args)
+ end
+
+ def last(*args)
+ first_nth_or_last(:last, *args)
+ end
+
+ def build(attributes = {}, &block)
+ if attributes.is_a?(Array)
+ attributes.collect { |attr| build(attr, &block) }
+ else
+ add_to_target(build_record(attributes)) do |record|
+ yield(record) if block_given?
+ end
+ end
+ end
+
+ def create(attributes = {}, &block)
+ _create_record(attributes, &block)
+ end
+
+ def create!(attributes = {}, &block)
+ _create_record(attributes, true, &block)
+ end
+
+ # Add +records+ to this association. Returns +self+ so method calls may
+ # be chained. Since << flattens its argument list and inserts each record,
+ # +push+ and +concat+ behave identically.
+ def concat(*records)
+ if owner.new_record?
+ load_target
+ concat_records(records)
+ else
+ transaction { concat_records(records) }
+ end
+ end
+
+ # Starts a transaction in the association class's database connection.
+ #
+ # class Author < ActiveRecord::Base
+ # has_many :books
+ # end
+ #
+ # Author.first.books.transaction do
+ # # same effect as calling Book.transaction
+ # end
+ def transaction(*args)
+ reflection.klass.transaction(*args) do
+ yield
+ end
+ end
+
+ # Removes all records from the association without calling callbacks
+ # on the associated records. It honors the `:dependent` option. However
+ # if the `:dependent` value is `:destroy` then in that case the `:delete_all`
+ # deletion strategy for the association is applied.
+ #
+ # You can force a particular deletion strategy by passing a parameter.
+ #
+ # Example:
+ #
+ # @author.books.delete_all(:nullify)
+ # @author.books.delete_all(:delete_all)
+ #
+ # See delete for more info.
+ def delete_all(dependent = nil)
+ if dependent && ![:nullify, :delete_all].include?(dependent)
+ raise ArgumentError, "Valid values are :nullify or :delete_all"
+ end
+
+ dependent = if dependent
+ dependent
+ elsif options[:dependent] == :destroy
+ :delete_all
+ else
+ options[:dependent]
+ end
+
+ delete_or_nullify_all_records(dependent).tap do
+ reset
+ loaded!
+ end
+ end
+
+ # Destroy all the records from this association.
+ #
+ # See destroy for more info.
+ def destroy_all
+ destroy(load_target).tap do
+ reset
+ loaded!
+ end
+ end
+
+ # Count all records using SQL. Construct options and pass them with
+ # scope to the target class's +count+.
+ def count(column_name = nil, count_options = {})
+ # TODO: Remove count_options argument as soon we remove support to
+ # activerecord-deprecated_finders.
+ column_name, count_options = nil, column_name if column_name.is_a?(Hash)
+
+ relation = scope
+ if association_scope.distinct_value
+ # This is needed because 'SELECT count(DISTINCT *)..' is not valid SQL.
+ column_name ||= reflection.klass.primary_key
+ relation = relation.distinct
+ end
+
+ value = relation.count(column_name)
+
+ limit = options[:limit]
+ offset = options[:offset]
+
+ if limit || offset
+ [ [value - offset.to_i, 0].max, limit.to_i ].min
+ else
+ value
+ end
+ end
+
+ # Removes +records+ from this association calling +before_remove+ and
+ # +after_remove+ callbacks.
+ #
+ # This method is abstract in the sense that +delete_records+ has to be
+ # provided by descendants. Note this method does not imply the records
+ # are actually removed from the database, that depends precisely on
+ # +delete_records+. They are in any case removed from the collection.
+ def delete(*records)
+ return if records.empty?
+ _options = records.extract_options!
+ dependent = _options[:dependent] || options[:dependent]
+
+ records = find(records) if records.any? { |record| record.kind_of?(Fixnum) || record.kind_of?(String) }
+ delete_or_destroy(records, dependent)
+ end
+
+ # Deletes the +records+ and removes them from this association calling
+ # +before_remove+ , +after_remove+ , +before_destroy+ and +after_destroy+ callbacks.
+ #
+ # Note that this method removes records from the database ignoring the
+ # +:dependent+ option.
+ def destroy(*records)
+ return if records.empty?
+ records = find(records) if records.any? { |record| record.kind_of?(Fixnum) || record.kind_of?(String) }
+ delete_or_destroy(records, :destroy)
+ end
+
+ # Returns the size of the collection by executing a SELECT COUNT(*)
+ # query if the collection hasn't been loaded, and calling
+ # <tt>collection.size</tt> if it has.
+ #
+ # If the collection has been already loaded +size+ and +length+ are
+ # equivalent. If not and you are going to need the records anyway
+ # +length+ will take one less query. Otherwise +size+ is more efficient.
+ #
+ # This method is abstract in the sense that it relies on
+ # +count_records+, which is a method descendants have to provide.
+ def size
+ if !find_target? || loaded?
+ if association_scope.distinct_value
+ target.uniq.size
+ else
+ target.size
+ end
+ elsif !loaded? && !association_scope.group_values.empty?
+ load_target.size
+ elsif !loaded? && !association_scope.distinct_value && target.is_a?(Array)
+ unsaved_records = target.select { |r| r.new_record? }
+ unsaved_records.size + count_records
+ else
+ count_records
+ end
+ end
+
+ # Returns the size of the collection calling +size+ on the target.
+ #
+ # If the collection has been already loaded +length+ and +size+ are
+ # equivalent. If not and you are going to need the records anyway this
+ # method will take one less query. Otherwise +size+ is more efficient.
+ def length
+ load_target.size
+ end
+
+ # Returns true if the collection is empty.
+ #
+ # If the collection has been loaded
+ # it is equivalent to <tt>collection.size.zero?</tt>. If the
+ # collection has not been loaded, it is equivalent to
+ # <tt>collection.exists?</tt>. If the collection has not already been
+ # loaded and you are going to fetch the records anyway it is better to
+ # check <tt>collection.length.zero?</tt>.
+ def empty?
+ if loaded?
+ size.zero?
+ else
+ @target.blank? && !scope.exists?
+ end
+ end
+
+ # Returns true if the collections is not empty.
+ # Equivalent to +!collection.empty?+.
+ def any?
+ if block_given?
+ load_target.any? { |*block_args| yield(*block_args) }
+ else
+ !empty?
+ end
+ end
+
+ # Returns true if the collection has more than 1 record.
+ # Equivalent to +collection.size > 1+.
+ def many?
+ if block_given?
+ load_target.many? { |*block_args| yield(*block_args) }
+ else
+ size > 1
+ end
+ end
+
+ def distinct
+ seen = {}
+ load_target.find_all do |record|
+ seen[record.id] = true unless seen.key?(record.id)
+ end
+ end
+ alias uniq distinct
+
+ # Replace this collection with +other_array+. This will perform a diff
+ # and delete/add only records that have changed.
+ def replace(other_array)
+ other_array.each { |val| raise_on_type_mismatch!(val) }
+ original_target = load_target.dup
+
+ if owner.new_record?
+ replace_records(other_array, original_target)
+ else
+ if other_array != original_target
+ transaction { replace_records(other_array, original_target) }
+ end
+ end
+ end
+
+ def include?(record)
+ if record.is_a?(reflection.klass)
+ if record.new_record?
+ include_in_memory?(record)
+ else
+ loaded? ? target.include?(record) : scope.exists?(record.id)
+ end
+ else
+ false
+ end
+ end
+
+ def load_target
+ if find_target?
+ @target = merge_target_lists(find_target, target)
+ end
+
+ loaded!
+ target
+ end
+
+ def add_to_target(record, skip_callbacks = false)
+ callback(:before_add, record) unless skip_callbacks
+ yield(record) if block_given?
+
+ if association_scope.distinct_value && index = @target.index(record)
+ @target[index] = record
+ else
+ @target << record
+ end
+
+ callback(:after_add, record) unless skip_callbacks
+ set_inverse_instance(record)
+
+ record
+ end
+
+ def scope(opts = {})
+ scope = super()
+ scope.none! if opts.fetch(:nullify, true) && null_scope?
+ scope
+ end
+
+ def null_scope?
+ owner.new_record? && !foreign_key_present?
+ end
+
+ private
+ def get_records
+ return scope.to_a if reflection.scope_chain.any?(&:any?)
+
+ conn = klass.connection
+ sc = reflection.association_scope_cache(conn, owner) do
+ StatementCache.create(conn) { |params|
+ as = AssociationScope.create { params.bind }
+ target_scope.merge as.scope(self, conn)
+ }
+ end
+
+ binds = AssociationScope.get_bind_values(owner, reflection.chain)
+ sc.execute binds, klass, klass.connection
+ end
+
+ def find_target
+ records = get_records
+ records.each { |record| set_inverse_instance(record) }
+ records
+ end
+
+ # We have some records loaded from the database (persisted) and some that are
+ # in-memory (memory). The same record may be represented in the persisted array
+ # and in the memory array.
+ #
+ # So the task of this method is to merge them according to the following rules:
+ #
+ # * The final array must not have duplicates
+ # * The order of the persisted array is to be preserved
+ # * Any changes made to attributes on objects in the memory array are to be preserved
+ # * Otherwise, attributes should have the value found in the database
+ def merge_target_lists(persisted, memory)
+ return persisted if memory.empty?
+ return memory if persisted.empty?
+
+ persisted.map! do |record|
+ if mem_record = memory.delete(record)
+
+ ((record.attribute_names & mem_record.attribute_names) - mem_record.changes.keys).each do |name|
+ mem_record[name] = record[name]
+ end
+
+ mem_record
+ else
+ record
+ end
+ end
+
+ persisted + memory
+ end
+
+ def _create_record(attributes, raise = false, &block)
+ unless owner.persisted?
+ raise ActiveRecord::RecordNotSaved, "You cannot call create unless the parent is saved"
+ end
+
+ if attributes.is_a?(Array)
+ attributes.collect { |attr| _create_record(attr, raise, &block) }
+ else
+ transaction do
+ add_to_target(build_record(attributes)) do |record|
+ yield(record) if block_given?
+ insert_record(record, true, raise)
+ end
+ end
+ end
+ end
+
+ # Do the relevant stuff to insert the given record into the association collection.
+ def insert_record(record, validate = true, raise = false)
+ raise NotImplementedError
+ end
+
+ def create_scope
+ scope.scope_for_create.stringify_keys
+ end
+
+ def delete_or_destroy(records, method)
+ records = records.flatten
+ records.each { |record| raise_on_type_mismatch!(record) }
+ existing_records = records.reject { |r| r.new_record? }
+
+ if existing_records.empty?
+ remove_records(existing_records, records, method)
+ else
+ transaction { remove_records(existing_records, records, method) }
+ end
+ end
+
+ def remove_records(existing_records, records, method)
+ records.each { |record| callback(:before_remove, record) }
+
+ delete_records(existing_records, method) if existing_records.any?
+ records.each { |record| target.delete(record) }
+
+ records.each { |record| callback(:after_remove, record) }
+ end
+
+ # Delete the given records from the association, using one of the methods :destroy,
+ # :delete_all or :nullify (or nil, in which case a default is used).
+ def delete_records(records, method)
+ raise NotImplementedError
+ end
+
+ def replace_records(new_target, original_target)
+ delete(target - new_target)
+
+ unless concat(new_target - target)
+ @target = original_target
+ raise RecordNotSaved, "Failed to replace #{reflection.name} because one or more of the " \
+ "new records could not be saved."
+ end
+
+ target
+ end
+
+ def concat_records(records, should_raise = false)
+ result = true
+
+ records.flatten.each do |record|
+ raise_on_type_mismatch!(record)
+ add_to_target(record) do |rec|
+ result &&= insert_record(rec, true, should_raise) unless owner.new_record?
+ end
+ end
+
+ result && records
+ end
+
+ def callback(method, record)
+ callbacks_for(method).each do |callback|
+ callback.call(method, owner, record)
+ end
+ end
+
+ def callbacks_for(callback_name)
+ full_callback_name = "#{callback_name}_for_#{reflection.name}"
+ owner.class.send(full_callback_name)
+ end
+
+ # Should we deal with assoc.first or assoc.last by issuing an independent query to
+ # the database, or by getting the target, and then taking the first/last item from that?
+ #
+ # If the args is just a non-empty options hash, go to the database.
+ #
+ # Otherwise, go to the database only if none of the following are true:
+ # * target already loaded
+ # * owner is new record
+ # * target contains new or changed record(s)
+ def fetch_first_nth_or_last_using_find?(args)
+ if args.first.is_a?(Hash)
+ true
+ else
+ !(loaded? ||
+ owner.new_record? ||
+ target.any? { |record| record.new_record? || record.changed? })
+ end
+ end
+
+ def include_in_memory?(record)
+ if reflection.is_a?(ActiveRecord::Reflection::ThroughReflection)
+ assoc = owner.association(reflection.through_reflection.name)
+ assoc.reader.any? { |source|
+ target = source.send(reflection.source_reflection.name)
+ target.respond_to?(:include?) ? target.include?(record) : target == record
+ } || target.include?(record)
+ else
+ target.include?(record)
+ end
+ end
+
+ # If the :inverse_of option has been
+ # specified, then #find scans the entire collection.
+ def find_by_scan(*args)
+ expects_array = args.first.kind_of?(Array)
+ ids = args.flatten.compact.map{ |arg| arg.to_s }.uniq
+
+ if ids.size == 1
+ id = ids.first
+ record = load_target.detect { |r| id == r.id.to_s }
+ expects_array ? [ record ] : record
+ else
+ load_target.select { |r| ids.include?(r.id.to_s) }
+ end
+ end
+
+ # Fetches the first/last using SQL if possible, otherwise from the target array.
+ def first_nth_or_last(type, *args)
+ args.shift if args.first.is_a?(Hash) && args.first.empty?
+
+ collection = fetch_first_nth_or_last_using_find?(args) ? scope : load_target
+ collection.send(type, *args).tap do |record|
+ set_inverse_instance record if record.is_a? ActiveRecord::Base
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/collection_proxy.rb b/activerecord/lib/active_record/associations/collection_proxy.rb
new file mode 100644
index 0000000000..84c8cfe72b
--- /dev/null
+++ b/activerecord/lib/active_record/associations/collection_proxy.rb
@@ -0,0 +1,1030 @@
+module ActiveRecord
+ module Associations
+ # Association proxies in Active Record are middlemen between the object that
+ # holds the association, known as the <tt>@owner</tt>, and the actual associated
+ # object, known as the <tt>@target</tt>. The kind of association any proxy is
+ # about is available in <tt>@reflection</tt>. That's an instance of the class
+ # ActiveRecord::Reflection::AssociationReflection.
+ #
+ # For example, given
+ #
+ # class Blog < ActiveRecord::Base
+ # has_many :posts
+ # end
+ #
+ # blog = Blog.first
+ #
+ # the association proxy in <tt>blog.posts</tt> has the object in +blog+ as
+ # <tt>@owner</tt>, the collection of its posts as <tt>@target</tt>, and
+ # the <tt>@reflection</tt> object represents a <tt>:has_many</tt> macro.
+ #
+ # This class delegates unknown methods to <tt>@target</tt> via
+ # <tt>method_missing</tt>.
+ #
+ # The <tt>@target</tt> object is not \loaded until needed. For example,
+ #
+ # blog.posts.count
+ #
+ # is computed directly through SQL and does not trigger by itself the
+ # instantiation of the actual post records.
+ class CollectionProxy < Relation
+ delegate(*(ActiveRecord::Calculations.public_instance_methods - [:count]), to: :scope)
+
+ def initialize(klass, association) #:nodoc:
+ @association = association
+ super klass, klass.arel_table
+ merge! association.scope(nullify: false)
+ end
+
+ def target
+ @association.target
+ end
+
+ def load_target
+ @association.load_target
+ end
+
+ # Returns +true+ if the association has been loaded, otherwise +false+.
+ #
+ # person.pets.loaded? # => false
+ # person.pets
+ # person.pets.loaded? # => true
+ def loaded?
+ @association.loaded?
+ end
+
+ # Works in two ways.
+ #
+ # *First:* Specify a subset of fields to be selected from the result set.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.select(:name)
+ # # => [
+ # # #<Pet id: nil, name: "Fancy-Fancy">,
+ # # #<Pet id: nil, name: "Spook">,
+ # # #<Pet id: nil, name: "Choo-Choo">
+ # # ]
+ #
+ # person.pets.select(:id, :name )
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy">,
+ # # #<Pet id: 2, name: "Spook">,
+ # # #<Pet id: 3, name: "Choo-Choo">
+ # # ]
+ #
+ # Be careful because this also means you're initializing a model
+ # object with only the fields that you've selected. If you attempt
+ # to access a field except +id+ that is not in the initialized record you'll
+ # receive:
+ #
+ # person.pets.select(:name).first.person_id
+ # # => ActiveModel::MissingAttributeError: missing attribute: person_id
+ #
+ # *Second:* You can pass a block so it can be used just like Array#select.
+ # This builds an array of objects from the database for the scope,
+ # converting them into an array and iterating through them using
+ # Array#select.
+ #
+ # person.pets.select { |pet| pet.name =~ /oo/ }
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.select(:name) { |pet| pet.name =~ /oo/ }
+ # # => [
+ # # #<Pet id: 2, name: "Spook">,
+ # # #<Pet id: 3, name: "Choo-Choo">
+ # # ]
+ def select(*fields, &block)
+ @association.select(*fields, &block)
+ end
+
+ # Finds an object in the collection responding to the +id+. Uses the same
+ # rules as <tt>ActiveRecord::Base.find</tt>. Returns <tt>ActiveRecord::RecordNotFound</tt>
+ # error if the object cannot be found.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.find(1) # => #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>
+ # person.pets.find(4) # => ActiveRecord::RecordNotFound: Couldn't find Pet with id=4
+ #
+ # person.pets.find(2) { |pet| pet.name.downcase! }
+ # # => #<Pet id: 2, name: "fancy-fancy", person_id: 1>
+ #
+ # person.pets.find(2, 3)
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ def find(*args, &block)
+ @association.find(*args, &block)
+ end
+
+ # Returns the first record, or the first +n+ records, from the collection.
+ # If the collection is empty, the first form returns +nil+, and the second
+ # form returns an empty array.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.first # => #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>
+ #
+ # person.pets.first(2)
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>
+ # # ]
+ #
+ # another_person_without.pets # => []
+ # another_person_without.pets.first # => nil
+ # another_person_without.pets.first(3) # => []
+ def first(*args)
+ @association.first(*args)
+ end
+
+ # Same as +first+ except returns only the second record.
+ def second(*args)
+ @association.second(*args)
+ end
+
+ # Same as +first+ except returns only the third record.
+ def third(*args)
+ @association.third(*args)
+ end
+
+ # Same as +first+ except returns only the fourth record.
+ def fourth(*args)
+ @association.fourth(*args)
+ end
+
+ # Same as +first+ except returns only the fifth record.
+ def fifth(*args)
+ @association.fifth(*args)
+ end
+
+ # Same as +first+ except returns only the forty second record.
+ # Also known as accessing "the reddit".
+ def forty_two(*args)
+ @association.forty_two(*args)
+ end
+
+ # Returns the last record, or the last +n+ records, from the collection.
+ # If the collection is empty, the first form returns +nil+, and the second
+ # form returns an empty array.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.last # => #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ #
+ # person.pets.last(2)
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # another_person_without.pets # => []
+ # another_person_without.pets.last # => nil
+ # another_person_without.pets.last(3) # => []
+ def last(*args)
+ @association.last(*args)
+ end
+
+ # Returns a new object of the collection type that has been instantiated
+ # with +attributes+ and linked to this object, but have not yet been saved.
+ # You can pass an array of attributes hashes, this will return an array
+ # with the new objects.
+ #
+ # class Person
+ # has_many :pets
+ # end
+ #
+ # person.pets.build
+ # # => #<Pet id: nil, name: nil, person_id: 1>
+ #
+ # person.pets.build(name: 'Fancy-Fancy')
+ # # => #<Pet id: nil, name: "Fancy-Fancy", person_id: 1>
+ #
+ # person.pets.build([{name: 'Spook'}, {name: 'Choo-Choo'}, {name: 'Brain'}])
+ # # => [
+ # # #<Pet id: nil, name: "Spook", person_id: 1>,
+ # # #<Pet id: nil, name: "Choo-Choo", person_id: 1>,
+ # # #<Pet id: nil, name: "Brain", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 5 # size of the collection
+ # person.pets.count # => 0 # count from database
+ def build(attributes = {}, &block)
+ @association.build(attributes, &block)
+ end
+ alias_method :new, :build
+
+ # Returns a new object of the collection type that has been instantiated with
+ # attributes, linked to this object and that has already been saved (if it
+ # passes the validations).
+ #
+ # class Person
+ # has_many :pets
+ # end
+ #
+ # person.pets.create(name: 'Fancy-Fancy')
+ # # => #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>
+ #
+ # person.pets.create([{name: 'Spook'}, {name: 'Choo-Choo'}])
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 3
+ # person.pets.count # => 3
+ #
+ # person.pets.find(1, 2, 3)
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ def create(attributes = {}, &block)
+ @association.create(attributes, &block)
+ end
+
+ # Like +create+, except that if the record is invalid, raises an exception.
+ #
+ # class Person
+ # has_many :pets
+ # end
+ #
+ # class Pet
+ # validates :name, presence: true
+ # end
+ #
+ # person.pets.create!(name: nil)
+ # # => ActiveRecord::RecordInvalid: Validation failed: Name can't be blank
+ def create!(attributes = {}, &block)
+ @association.create!(attributes, &block)
+ end
+
+ # Add one or more records to the collection by setting their foreign keys
+ # to the association's primary key. Since << flattens its argument list and
+ # inserts each record, +push+ and +concat+ behave identically. Returns +self+
+ # so method calls may be chained.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.size # => 0
+ # person.pets.concat(Pet.new(name: 'Fancy-Fancy'))
+ # person.pets.concat(Pet.new(name: 'Spook'), Pet.new(name: 'Choo-Choo'))
+ # person.pets.size # => 3
+ #
+ # person.id # => 1
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.concat([Pet.new(name: 'Brain'), Pet.new(name: 'Benny')])
+ # person.pets.size # => 5
+ def concat(*records)
+ @association.concat(*records)
+ end
+
+ # Replaces this collection with +other_array+. This will perform a diff
+ # and delete/add only records that have changed.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [#<Pet id: 1, name: "Gorby", group: "cats", person_id: 1>]
+ #
+ # other_pets = [Pet.new(name: 'Puff', group: 'celebrities']
+ #
+ # person.pets.replace(other_pets)
+ #
+ # person.pets
+ # # => [#<Pet id: 2, name: "Puff", group: "celebrities", person_id: 1>]
+ #
+ # If the supplied array has an incorrect association type, it raises
+ # an <tt>ActiveRecord::AssociationTypeMismatch</tt> error:
+ #
+ # person.pets.replace(["doo", "ggie", "gaga"])
+ # # => ActiveRecord::AssociationTypeMismatch: Pet expected, got String
+ def replace(other_array)
+ @association.replace(other_array)
+ end
+
+ # Deletes all the records from the collection. For +has_many+ associations,
+ # the deletion is done according to the strategy specified by the <tt>:dependent</tt>
+ # option.
+ #
+ # If no <tt>:dependent</tt> option is given, then it will follow the
+ # default strategy. The default strategy is <tt>:nullify</tt>. This
+ # sets the foreign keys to <tt>NULL</tt>. For, +has_many+ <tt>:through</tt>,
+ # the default strategy is +delete_all+.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets # dependent: :nullify option by default
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete_all
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 0
+ # person.pets # => []
+ #
+ # Pet.find(1, 2, 3)
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: nil>,
+ # # #<Pet id: 2, name: "Spook", person_id: nil>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: nil>
+ # # ]
+ #
+ # If it is set to <tt>:destroy</tt> all the objects from the collection
+ # are removed by calling their +destroy+ method. See +destroy+ for more
+ # information.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets, dependent: :destroy
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete_all
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # Pet.find(1, 2, 3)
+ # # => ActiveRecord::RecordNotFound
+ #
+ # If it is set to <tt>:delete_all</tt>, all the objects are deleted
+ # *without* calling their +destroy+ method.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets, dependent: :delete_all
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete_all
+ #
+ # Pet.find(1, 2, 3)
+ # # => ActiveRecord::RecordNotFound
+ def delete_all(dependent = nil)
+ @association.delete_all(dependent)
+ end
+
+ # Deletes the records of the collection directly from the database
+ # ignoring the +:dependent+ option. It invokes +before_remove+,
+ # +after_remove+ , +before_destroy+ and +after_destroy+ callbacks.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.destroy_all
+ #
+ # person.pets.size # => 0
+ # person.pets # => []
+ #
+ # Pet.find(1) # => Couldn't find Pet with id=1
+ def destroy_all
+ @association.destroy_all
+ end
+
+ # Deletes the +records+ supplied and removes them from the collection. For
+ # +has_many+ associations, the deletion is done according to the strategy
+ # specified by the <tt>:dependent</tt> option. Returns an array with the
+ # deleted records.
+ #
+ # If no <tt>:dependent</tt> option is given, then it will follow the default
+ # strategy. The default strategy is <tt>:nullify</tt>. This sets the foreign
+ # keys to <tt>NULL</tt>. For, +has_many+ <tt>:through</tt>, the default
+ # strategy is +delete_all+.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets # dependent: :nullify option by default
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete(Pet.find(1))
+ # # => [#<Pet id: 1, name: "Fancy-Fancy", person_id: 1>]
+ #
+ # person.pets.size # => 2
+ # person.pets
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # Pet.find(1)
+ # # => #<Pet id: 1, name: "Fancy-Fancy", person_id: nil>
+ #
+ # If it is set to <tt>:destroy</tt> all the +records+ are removed by calling
+ # their +destroy+ method. See +destroy+ for more information.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets, dependent: :destroy
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete(Pet.find(1), Pet.find(3))
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 1
+ # person.pets
+ # # => [#<Pet id: 2, name: "Spook", person_id: 1>]
+ #
+ # Pet.find(1, 3)
+ # # => ActiveRecord::RecordNotFound: Couldn't find all Pets with IDs (1, 3)
+ #
+ # If it is set to <tt>:delete_all</tt>, all the +records+ are deleted
+ # *without* calling their +destroy+ method.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets, dependent: :delete_all
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete(Pet.find(1))
+ # # => [#<Pet id: 1, name: "Fancy-Fancy", person_id: 1>]
+ #
+ # person.pets.size # => 2
+ # person.pets
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # Pet.find(1)
+ # # => ActiveRecord::RecordNotFound: Couldn't find Pet with id=1
+ #
+ # You can pass +Fixnum+ or +String+ values, it finds the records
+ # responding to the +id+ and executes delete on them.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.delete("1")
+ # # => [#<Pet id: 1, name: "Fancy-Fancy", person_id: 1>]
+ #
+ # person.pets.delete(2, 3)
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ def delete(*records)
+ @association.delete(*records)
+ end
+
+ # Destroys the +records+ supplied and removes them from the collection.
+ # This method will _always_ remove record from the database ignoring
+ # the +:dependent+ option. Returns an array with the removed records.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.destroy(Pet.find(1))
+ # # => [#<Pet id: 1, name: "Fancy-Fancy", person_id: 1>]
+ #
+ # person.pets.size # => 2
+ # person.pets
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.destroy(Pet.find(2), Pet.find(3))
+ # # => [
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 0
+ # person.pets # => []
+ #
+ # Pet.find(1, 2, 3) # => ActiveRecord::RecordNotFound: Couldn't find all Pets with IDs (1, 2, 3)
+ #
+ # You can pass +Fixnum+ or +String+ values, it finds the records
+ # responding to the +id+ and then deletes them from the database.
+ #
+ # person.pets.size # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 4, name: "Benny", person_id: 1>,
+ # # #<Pet id: 5, name: "Brain", person_id: 1>,
+ # # #<Pet id: 6, name: "Boss", person_id: 1>
+ # # ]
+ #
+ # person.pets.destroy("4")
+ # # => #<Pet id: 4, name: "Benny", person_id: 1>
+ #
+ # person.pets.size # => 2
+ # person.pets
+ # # => [
+ # # #<Pet id: 5, name: "Brain", person_id: 1>,
+ # # #<Pet id: 6, name: "Boss", person_id: 1>
+ # # ]
+ #
+ # person.pets.destroy(5, 6)
+ # # => [
+ # # #<Pet id: 5, name: "Brain", person_id: 1>,
+ # # #<Pet id: 6, name: "Boss", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 0
+ # person.pets # => []
+ #
+ # Pet.find(4, 5, 6) # => ActiveRecord::RecordNotFound: Couldn't find all Pets with IDs (4, 5, 6)
+ def destroy(*records)
+ @association.destroy(*records)
+ end
+
+ # Specifies whether the records should be unique or not.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.select(:name)
+ # # => [
+ # # #<Pet name: "Fancy-Fancy">,
+ # # #<Pet name: "Fancy-Fancy">
+ # # ]
+ #
+ # person.pets.select(:name).distinct
+ # # => [#<Pet name: "Fancy-Fancy">]
+ def distinct
+ @association.distinct
+ end
+ alias uniq distinct
+
+ # Count all records using SQL.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.count # => 3
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ def count(column_name = nil, options = {})
+ # TODO: Remove options argument as soon we remove support to
+ # activerecord-deprecated_finders.
+ @association.count(column_name, options)
+ end
+
+ # Returns the size of the collection. If the collection hasn't been loaded,
+ # it executes a <tt>SELECT COUNT(*)</tt> query. Else it calls <tt>collection.size</tt>.
+ #
+ # If the collection has been already loaded +size+ and +length+ are
+ # equivalent. If not and you are going to need the records anyway
+ # +length+ will take one less query. Otherwise +size+ is more efficient.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.size # => 3
+ # # executes something like SELECT COUNT(*) FROM "pets" WHERE "pets"."person_id" = 1
+ #
+ # person.pets # This will execute a SELECT * FROM query
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ #
+ # person.pets.size # => 3
+ # # Because the collection is already loaded, this will behave like
+ # # collection.size and no SQL count query is executed.
+ def size
+ @association.size
+ end
+
+ # Returns the size of the collection calling +size+ on the target.
+ # If the collection has been already loaded, +length+ and +size+ are
+ # equivalent. If not and you are going to need the records anyway this
+ # method will take one less query. Otherwise +size+ is more efficient.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.length # => 3
+ # # executes something like SELECT "pets".* FROM "pets" WHERE "pets"."person_id" = 1
+ #
+ # # Because the collection is loaded, you can
+ # # call the collection with no additional queries:
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ def length
+ @association.length
+ end
+
+ # Returns +true+ if the collection is empty. If the collection has been
+ # loaded it is equivalent
+ # to <tt>collection.size.zero?</tt>. If the collection has not been loaded,
+ # it is equivalent to <tt>collection.exists?</tt>. If the collection has
+ # not already been loaded and you are going to fetch the records anyway it
+ # is better to check <tt>collection.length.zero?</tt>.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.count # => 1
+ # person.pets.empty? # => false
+ #
+ # person.pets.delete_all
+ #
+ # person.pets.count # => 0
+ # person.pets.empty? # => true
+ def empty?
+ @association.empty?
+ end
+
+ # Returns +true+ if the collection is not empty.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.count # => 0
+ # person.pets.any? # => false
+ #
+ # person.pets << Pet.new(name: 'Snoop')
+ # person.pets.count # => 0
+ # person.pets.any? # => true
+ #
+ # You can also pass a block to define criteria. The behavior
+ # is the same, it returns true if the collection based on the
+ # criteria is not empty.
+ #
+ # person.pets
+ # # => [#<Pet name: "Snoop", group: "dogs">]
+ #
+ # person.pets.any? do |pet|
+ # pet.group == 'cats'
+ # end
+ # # => false
+ #
+ # person.pets.any? do |pet|
+ # pet.group == 'dogs'
+ # end
+ # # => true
+ def any?(&block)
+ @association.any?(&block)
+ end
+
+ # Returns true if the collection has more than one record.
+ # Equivalent to <tt>collection.size > 1</tt>.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.count # => 1
+ # person.pets.many? # => false
+ #
+ # person.pets << Pet.new(name: 'Snoopy')
+ # person.pets.count # => 2
+ # person.pets.many? # => true
+ #
+ # You can also pass a block to define criteria. The
+ # behavior is the same, it returns true if the collection
+ # based on the criteria has more than one record.
+ #
+ # person.pets
+ # # => [
+ # # #<Pet name: "Gorby", group: "cats">,
+ # # #<Pet name: "Puff", group: "cats">,
+ # # #<Pet name: "Snoop", group: "dogs">
+ # # ]
+ #
+ # person.pets.many? do |pet|
+ # pet.group == 'dogs'
+ # end
+ # # => false
+ #
+ # person.pets.many? do |pet|
+ # pet.group == 'cats'
+ # end
+ # # => true
+ def many?(&block)
+ @association.many?(&block)
+ end
+
+ # Returns +true+ if the given object is present in the collection.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets # => [#<Pet id: 20, name: "Snoop">]
+ #
+ # person.pets.include?(Pet.find(20)) # => true
+ # person.pets.include?(Pet.find(21)) # => false
+ def include?(record)
+ !!@association.include?(record)
+ end
+
+ def arel
+ scope.arel
+ end
+
+ def proxy_association
+ @association
+ end
+
+ # We don't want this object to be put on the scoping stack, because
+ # that could create an infinite loop where we call an @association
+ # method, which gets the current scope, which is this object, which
+ # delegates to @association, and so on.
+ def scoping
+ @association.scope.scoping { yield }
+ end
+
+ # Returns a <tt>Relation</tt> object for the records in this association
+ def scope
+ @association.scope
+ end
+ alias spawn scope
+
+ # Equivalent to <tt>Array#==</tt>. Returns +true+ if the two arrays
+ # contain the same number of elements and if each element is equal
+ # to the corresponding element in the other array, otherwise returns
+ # +false+.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>
+ # # ]
+ #
+ # other = person.pets.to_ary
+ #
+ # person.pets == other
+ # # => true
+ #
+ # other = [Pet.new(id: 1), Pet.new(id: 2)]
+ #
+ # person.pets == other
+ # # => false
+ def ==(other)
+ load_target == other
+ end
+
+ # Returns a new array of objects from the collection. If the collection
+ # hasn't been loaded, it fetches the records from the database.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets
+ # # => [
+ # # #<Pet id: 4, name: "Benny", person_id: 1>,
+ # # #<Pet id: 5, name: "Brain", person_id: 1>,
+ # # #<Pet id: 6, name: "Boss", person_id: 1>
+ # # ]
+ #
+ # other_pets = person.pets.to_ary
+ # # => [
+ # # #<Pet id: 4, name: "Benny", person_id: 1>,
+ # # #<Pet id: 5, name: "Brain", person_id: 1>,
+ # # #<Pet id: 6, name: "Boss", person_id: 1>
+ # # ]
+ #
+ # other_pets.replace([Pet.new(name: 'BooGoo')])
+ #
+ # other_pets
+ # # => [#<Pet id: nil, name: "BooGoo", person_id: 1>]
+ #
+ # person.pets
+ # # This is not affected by replace
+ # # => [
+ # # #<Pet id: 4, name: "Benny", person_id: 1>,
+ # # #<Pet id: 5, name: "Brain", person_id: 1>,
+ # # #<Pet id: 6, name: "Boss", person_id: 1>
+ # # ]
+ def to_ary
+ load_target.dup
+ end
+ alias_method :to_a, :to_ary
+
+ # Adds one or more +records+ to the collection by setting their foreign keys
+ # to the association's primary key. Returns +self+, so several appends may be
+ # chained together.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets.size # => 0
+ # person.pets << Pet.new(name: 'Fancy-Fancy')
+ # person.pets << [Pet.new(name: 'Spook'), Pet.new(name: 'Choo-Choo')]
+ # person.pets.size # => 3
+ #
+ # person.id # => 1
+ # person.pets
+ # # => [
+ # # #<Pet id: 1, name: "Fancy-Fancy", person_id: 1>,
+ # # #<Pet id: 2, name: "Spook", person_id: 1>,
+ # # #<Pet id: 3, name: "Choo-Choo", person_id: 1>
+ # # ]
+ def <<(*records)
+ proxy_association.concat(records) && self
+ end
+ alias_method :push, :<<
+ alias_method :append, :<<
+
+ def prepend(*args)
+ raise NoMethodError, "prepend on association is not defined. Please use << or append"
+ end
+
+ # Equivalent to +delete_all+. The difference is that returns +self+, instead
+ # of an array with the deleted objects, so methods can be chained. See
+ # +delete_all+ for more information.
+ def clear
+ delete_all
+ self
+ end
+
+ # Reloads the collection from the database. Returns +self+.
+ # Equivalent to <tt>collection(true)</tt>.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets # fetches pets from the database
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ #
+ # person.pets # uses the pets cache
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ #
+ # person.pets.reload # fetches pets from the database
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ #
+ # person.pets(true) # fetches pets from the database
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ def reload
+ proxy_association.reload
+ self
+ end
+
+ # Unloads the association. Returns +self+.
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :pets
+ # end
+ #
+ # person.pets # fetches pets from the database
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ #
+ # person.pets # uses the pets cache
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ #
+ # person.pets.reset # clears the pets cache
+ #
+ # person.pets # fetches pets from the database
+ # # => [#<Pet id: 1, name: "Snoop", group: "dogs", person_id: 1>]
+ def reset
+ proxy_association.reset
+ proxy_association.reset_scope
+ self
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/has_many_association.rb b/activerecord/lib/active_record/associations/has_many_association.rb
new file mode 100644
index 0000000000..79c3d2b0f5
--- /dev/null
+++ b/activerecord/lib/active_record/associations/has_many_association.rb
@@ -0,0 +1,184 @@
+module ActiveRecord
+ # = Active Record Has Many Association
+ module Associations
+ # This is the proxy that handles a has many association.
+ #
+ # If the association has a <tt>:through</tt> option further specialization
+ # is provided by its child HasManyThroughAssociation.
+ class HasManyAssociation < CollectionAssociation #:nodoc:
+
+ def handle_dependency
+ case options[:dependent]
+ when :restrict_with_exception
+ raise ActiveRecord::DeleteRestrictionError.new(reflection.name) unless empty?
+
+ when :restrict_with_error
+ unless empty?
+ record = klass.human_attribute_name(reflection.name).downcase
+ owner.errors.add(:base, :"restrict_dependent_destroy.many", record: record)
+ false
+ end
+
+ else
+ if options[:dependent] == :destroy
+ # No point in executing the counter update since we're going to destroy the parent anyway
+ load_target.each { |t| t.destroyed_by_association = reflection }
+ destroy_all
+ else
+ delete_all
+ end
+ end
+ end
+
+ def insert_record(record, validate = true, raise = false)
+ set_owner_attributes(record)
+ set_inverse_instance(record)
+
+ if raise
+ record.save!(:validate => validate)
+ else
+ record.save(:validate => validate)
+ end
+ end
+
+ def empty?
+ if has_cached_counter?
+ size.zero?
+ else
+ super
+ end
+ end
+
+ private
+
+ # Returns the number of records in this collection.
+ #
+ # If the association has a counter cache it gets that value. Otherwise
+ # it will attempt to do a count via SQL, bounded to <tt>:limit</tt> if
+ # there's one. Some configuration options like :group make it impossible
+ # to do an SQL count, in those cases the array count will be used.
+ #
+ # That does not depend on whether the collection has already been loaded
+ # or not. The +size+ method is the one that takes the loaded flag into
+ # account and delegates to +count_records+ if needed.
+ #
+ # If the collection is empty the target is set to an empty array and
+ # the loaded flag is set to true as well.
+ def count_records
+ count = if has_cached_counter?
+ owner.read_attribute cached_counter_attribute_name
+ else
+ scope.count
+ end
+
+ # If there's nothing in the database and @target has no new records
+ # we are certain the current target is an empty array. This is a
+ # documented side-effect of the method that may avoid an extra SELECT.
+ @target ||= [] and loaded! if count == 0
+
+ [association_scope.limit_value, count].compact.min
+ end
+
+ def has_cached_counter?(reflection = reflection())
+ owner.attribute_present?(cached_counter_attribute_name(reflection))
+ end
+
+ def cached_counter_attribute_name(reflection = reflection())
+ options[:counter_cache] || "#{reflection.name}_count"
+ end
+
+ def update_counter(difference, reflection = reflection())
+ update_counter_in_database(difference, reflection)
+ update_counter_in_memory(difference, reflection)
+ end
+
+ def update_counter_in_database(difference, reflection = reflection())
+ if has_cached_counter?(reflection)
+ counter = cached_counter_attribute_name(reflection)
+ owner.class.update_counters(owner.id, counter => difference)
+ end
+ end
+
+ def update_counter_in_memory(difference, reflection = reflection())
+ if has_cached_counter?(reflection)
+ counter = cached_counter_attribute_name(reflection)
+ owner[counter] += difference
+ owner.changed_attributes.delete(counter) # eww
+ end
+ end
+
+ # This shit is nasty. We need to avoid the following situation:
+ #
+ # * An associated record is deleted via record.destroy
+ # * Hence the callbacks run, and they find a belongs_to on the record with a
+ # :counter_cache options which points back at our owner. So they update the
+ # counter cache.
+ # * In which case, we must make sure to *not* update the counter cache, or else
+ # it will be decremented twice.
+ #
+ # Hence this method.
+ def inverse_updates_counter_cache?(reflection = reflection())
+ counter_name = cached_counter_attribute_name(reflection)
+ inverse_updates_counter_named?(counter_name, reflection)
+ end
+
+ def inverse_updates_counter_named?(counter_name, reflection = reflection())
+ reflection.klass._reflections.values.any? { |inverse_reflection|
+ inverse_reflection.belongs_to? &&
+ inverse_reflection.counter_cache_column == counter_name
+ }
+ end
+
+ def delete_count(method, scope)
+ if method == :delete_all
+ scope.delete_all
+ else
+ scope.update_all(reflection.foreign_key => nil)
+ end
+ end
+
+ def delete_or_nullify_all_records(method)
+ count = delete_count(method, self.scope)
+ update_counter(-count)
+ end
+
+ # Deletes the records according to the <tt>:dependent</tt> option.
+ def delete_records(records, method)
+ if method == :destroy
+ records.each(&:destroy!)
+ update_counter(-records.length) unless inverse_updates_counter_cache?
+ else
+ scope = self.scope.where(reflection.klass.primary_key => records)
+ update_counter(-delete_count(method, scope))
+ end
+ end
+
+ def foreign_key_present?
+ if reflection.klass.primary_key
+ owner.attribute_present?(reflection.association_primary_key)
+ else
+ false
+ end
+ end
+
+ def concat_records(records, *)
+ update_counter_if_success(super, records.length)
+ end
+
+ def _create_record(attributes, *)
+ if attributes.is_a?(Array)
+ super
+ else
+ update_counter_if_success(super, 1)
+ end
+ end
+
+ def update_counter_if_success(saved_successfully, difference)
+ if saved_successfully
+ update_counter_in_memory(difference)
+ end
+ saved_successfully
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/has_many_through_association.rb b/activerecord/lib/active_record/associations/has_many_through_association.rb
new file mode 100644
index 0000000000..44c4436e95
--- /dev/null
+++ b/activerecord/lib/active_record/associations/has_many_through_association.rb
@@ -0,0 +1,235 @@
+module ActiveRecord
+ # = Active Record Has Many Through Association
+ module Associations
+ class HasManyThroughAssociation < HasManyAssociation #:nodoc:
+ include ThroughAssociation
+
+ def initialize(owner, reflection)
+ super
+
+ @through_records = {}
+ @through_association = nil
+ end
+
+ # Returns the size of the collection by executing a SELECT COUNT(*) query
+ # if the collection hasn't been loaded, and by calling collection.size if
+ # it has. If the collection will likely have a size greater than zero,
+ # and if fetching the collection will be needed afterwards, one less
+ # SELECT query will be generated by using #length instead.
+ def size
+ if has_cached_counter?
+ owner.read_attribute cached_counter_attribute_name(reflection)
+ elsif loaded?
+ target.size
+ else
+ super
+ end
+ end
+
+ def concat(*records)
+ unless owner.new_record?
+ records.flatten.each do |record|
+ raise_on_type_mismatch!(record)
+ end
+ end
+
+ super
+ end
+
+ def concat_records(records)
+ ensure_not_nested
+
+ records = super(records, true)
+
+ if owner.new_record? && records
+ records.flatten.each do |record|
+ build_through_record(record)
+ end
+ end
+
+ records
+ end
+
+ def insert_record(record, validate = true, raise = false)
+ ensure_not_nested
+
+ if record.new_record?
+ if raise
+ record.save!(:validate => validate)
+ else
+ return unless record.save(:validate => validate)
+ end
+ end
+
+ save_through_record(record)
+ if has_cached_counter? && !through_reflection_updates_counter_cache?
+ ActiveSupport::Deprecation.warn(<<-MESSAGE.strip_heredoc)
+ Automatic updating of counter caches on through associations has been
+ deprecated, and will be removed in Rails 5.0. Instead, please set the
+ appropriate counter_cache options on the has_many and belongs_to for
+ your associations to #{through_reflection.name}.
+ MESSAGE
+ update_counter_in_database(1)
+ end
+ record
+ end
+
+ private
+
+ def through_association
+ @through_association ||= owner.association(through_reflection.name)
+ end
+
+ # The through record (built with build_record) is temporarily cached
+ # so that it may be reused if insert_record is subsequently called.
+ #
+ # However, after insert_record has been called, the cache is cleared in
+ # order to allow multiple instances of the same record in an association.
+ def build_through_record(record)
+ @through_records[record.object_id] ||= begin
+ ensure_mutable
+
+ through_record = through_association.build(*options_for_through_record)
+ through_record.send("#{source_reflection.name}=", record)
+ through_record
+ end
+ end
+
+ def options_for_through_record
+ [through_scope_attributes]
+ end
+
+ def through_scope_attributes
+ scope.where_values_hash(through_association.reflection.name.to_s).
+ except!(through_association.reflection.foreign_key,
+ through_association.reflection.klass.inheritance_column)
+ end
+
+ def save_through_record(record)
+ build_through_record(record).save!
+ ensure
+ @through_records.delete(record.object_id)
+ end
+
+ def build_record(attributes)
+ ensure_not_nested
+
+ record = super(attributes)
+
+ inverse = source_reflection.inverse_of
+ if inverse
+ if inverse.collection?
+ record.send(inverse.name) << build_through_record(record)
+ elsif inverse.has_one?
+ record.send("#{inverse.name}=", build_through_record(record))
+ end
+ end
+
+ record
+ end
+
+ def target_reflection_has_associated_record?
+ !(through_reflection.belongs_to? && owner[through_reflection.foreign_key].blank?)
+ end
+
+ def update_through_counter?(method)
+ case method
+ when :destroy
+ !inverse_updates_counter_cache?(through_reflection)
+ when :nullify
+ false
+ else
+ true
+ end
+ end
+
+ def delete_or_nullify_all_records(method)
+ delete_records(load_target, method)
+ end
+
+ def delete_records(records, method)
+ ensure_not_nested
+
+ scope = through_association.scope
+ scope.where! construct_join_attributes(*records)
+
+ case method
+ when :destroy
+ if scope.klass.primary_key
+ count = scope.destroy_all.length
+ else
+ scope.to_a.each do |record|
+ record.run_callbacks :destroy
+ end
+
+ arel = scope.arel
+
+ stmt = Arel::DeleteManager.new arel.engine
+ stmt.from scope.klass.arel_table
+ stmt.wheres = arel.constraints
+
+ count = scope.klass.connection.delete(stmt, 'SQL', scope.bind_values)
+ end
+ when :nullify
+ count = scope.update_all(source_reflection.foreign_key => nil)
+ else
+ count = scope.delete_all
+ end
+
+ delete_through_records(records)
+
+ if source_reflection.options[:counter_cache] && method != :destroy
+ counter = source_reflection.counter_cache_column
+ klass.decrement_counter counter, records.map(&:id)
+ end
+
+ if through_reflection.collection? && update_through_counter?(method)
+ update_counter(-count, through_reflection)
+ end
+
+ update_counter(-count)
+ end
+
+ def through_records_for(record)
+ attributes = construct_join_attributes(record)
+ candidates = Array.wrap(through_association.target)
+ candidates.find_all do |c|
+ attributes.all? do |key, value|
+ c.public_send(key) == value
+ end
+ end
+ end
+
+ def delete_through_records(records)
+ records.each do |record|
+ through_records = through_records_for(record)
+
+ if through_reflection.collection?
+ through_records.each { |r| through_association.target.delete(r) }
+ else
+ if through_records.include?(through_association.target)
+ through_association.target = nil
+ end
+ end
+
+ @through_records.delete(record.object_id)
+ end
+ end
+
+ def find_target
+ return [] unless target_reflection_has_associated_record?
+ get_records
+ end
+
+ # NOTE - not sure that we can actually cope with inverses here
+ def invertible_for?(record)
+ false
+ end
+
+ def through_reflection_updates_counter_cache?
+ counter_name = cached_counter_attribute_name
+ inverse_updates_counter_named?(counter_name, through_reflection)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/has_one_association.rb b/activerecord/lib/active_record/associations/has_one_association.rb
new file mode 100644
index 0000000000..e6095d84dc
--- /dev/null
+++ b/activerecord/lib/active_record/associations/has_one_association.rb
@@ -0,0 +1,105 @@
+module ActiveRecord
+ # = Active Record Belongs To Has One Association
+ module Associations
+ class HasOneAssociation < SingularAssociation #:nodoc:
+
+ def handle_dependency
+ case options[:dependent]
+ when :restrict_with_exception
+ raise ActiveRecord::DeleteRestrictionError.new(reflection.name) if load_target
+
+ when :restrict_with_error
+ if load_target
+ record = klass.human_attribute_name(reflection.name).downcase
+ owner.errors.add(:base, :"restrict_dependent_destroy.one", record: record)
+ false
+ end
+
+ else
+ delete
+ end
+ end
+
+ def replace(record, save = true)
+ raise_on_type_mismatch!(record) if record
+ load_target
+
+ return self.target if !(target || record)
+
+ assigning_another_record = target != record
+ if assigning_another_record || record.changed?
+ save &&= owner.persisted?
+
+ transaction_if(save) do
+ remove_target!(options[:dependent]) if target && !target.destroyed? && assigning_another_record
+
+ if record
+ set_owner_attributes(record)
+ set_inverse_instance(record)
+
+ if save && !record.save
+ nullify_owner_attributes(record)
+ set_owner_attributes(target) if target
+ raise RecordNotSaved, "Failed to save the new associated #{reflection.name}."
+ end
+ end
+ end
+ end
+
+ self.target = record
+ end
+
+ def delete(method = options[:dependent])
+ if load_target
+ case method
+ when :delete
+ target.delete
+ when :destroy
+ target.destroy
+ when :nullify
+ target.update_columns(reflection.foreign_key => nil)
+ end
+ end
+ end
+
+ private
+
+ # The reason that the save param for replace is false, if for create (not just build),
+ # is because the setting of the foreign keys is actually handled by the scoping when
+ # the record is instantiated, and so they are set straight away and do not need to be
+ # updated within replace.
+ def set_new_record(record)
+ replace(record, false)
+ end
+
+ def remove_target!(method)
+ case method
+ when :delete
+ target.delete
+ when :destroy
+ target.destroy
+ else
+ nullify_owner_attributes(target)
+
+ if target.persisted? && owner.persisted? && !target.save
+ set_owner_attributes(target)
+ raise RecordNotSaved, "Failed to remove the existing associated #{reflection.name}. " +
+ "The record failed to save after its foreign key was set to nil."
+ end
+ end
+ end
+
+ def nullify_owner_attributes(record)
+ record[reflection.foreign_key] = nil
+ end
+
+ def transaction_if(value)
+ if value
+ reflection.klass.transaction { yield }
+ else
+ yield
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/has_one_through_association.rb b/activerecord/lib/active_record/associations/has_one_through_association.rb
new file mode 100644
index 0000000000..08e0ec691f
--- /dev/null
+++ b/activerecord/lib/active_record/associations/has_one_through_association.rb
@@ -0,0 +1,36 @@
+module ActiveRecord
+ # = Active Record Has One Through Association
+ module Associations
+ class HasOneThroughAssociation < HasOneAssociation #:nodoc:
+ include ThroughAssociation
+
+ def replace(record)
+ create_through_record(record)
+ self.target = record
+ end
+
+ private
+
+ def create_through_record(record)
+ ensure_not_nested
+
+ through_proxy = owner.association(through_reflection.name)
+ through_record = through_proxy.send(:load_target)
+
+ if through_record && !record
+ through_record.destroy
+ elsif record
+ attributes = construct_join_attributes(record)
+
+ if through_record
+ through_record.update(attributes)
+ elsif owner.new_record?
+ through_proxy.build(attributes)
+ else
+ through_proxy.create(attributes)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/join_dependency.rb b/activerecord/lib/active_record/associations/join_dependency.rb
new file mode 100644
index 0000000000..ec5c189cd3
--- /dev/null
+++ b/activerecord/lib/active_record/associations/join_dependency.rb
@@ -0,0 +1,273 @@
+module ActiveRecord
+ module Associations
+ class JoinDependency # :nodoc:
+ autoload :JoinBase, 'active_record/associations/join_dependency/join_base'
+ autoload :JoinAssociation, 'active_record/associations/join_dependency/join_association'
+
+ class Aliases # :nodoc:
+ def initialize(tables)
+ @tables = tables
+ @alias_cache = tables.each_with_object({}) { |table,h|
+ h[table.node] = table.columns.each_with_object({}) { |column,i|
+ i[column.name] = column.alias
+ }
+ }
+ @name_and_alias_cache = tables.each_with_object({}) { |table,h|
+ h[table.node] = table.columns.map { |column|
+ [column.name, column.alias]
+ }
+ }
+ end
+
+ def columns
+ @tables.flat_map { |t| t.column_aliases }
+ end
+
+ # An array of [column_name, alias] pairs for the table
+ def column_aliases(node)
+ @name_and_alias_cache[node]
+ end
+
+ def column_alias(node, column)
+ @alias_cache[node][column]
+ end
+
+ class Table < Struct.new(:node, :columns)
+ def table
+ Arel::Nodes::TableAlias.new node.table, node.aliased_table_name
+ end
+
+ def column_aliases
+ t = table
+ columns.map { |column| t[column.name].as Arel.sql column.alias }
+ end
+ end
+ Column = Struct.new(:name, :alias)
+ end
+
+ attr_reader :alias_tracker, :base_klass, :join_root
+
+ def self.make_tree(associations)
+ hash = {}
+ walk_tree associations, hash
+ hash
+ end
+
+ def self.walk_tree(associations, hash)
+ case associations
+ when Symbol, String
+ hash[associations.to_sym] ||= {}
+ when Array
+ associations.each do |assoc|
+ walk_tree assoc, hash
+ end
+ when Hash
+ associations.each do |k,v|
+ cache = hash[k] ||= {}
+ walk_tree v, cache
+ end
+ else
+ raise ConfigurationError, associations.inspect
+ end
+ end
+
+ # base is the base class on which operation is taking place.
+ # associations is the list of associations which are joined using hash, symbol or array.
+ # joins is the list of all string join commands and arel nodes.
+ #
+ # Example :
+ #
+ # class Physician < ActiveRecord::Base
+ # has_many :appointments
+ # has_many :patients, through: :appointments
+ # end
+ #
+ # If I execute `@physician.patients.to_a` then
+ # base # => Physician
+ # associations # => []
+ # joins # => [#<Arel::Nodes::InnerJoin: ...]
+ #
+ # However if I execute `Physician.joins(:appointments).to_a` then
+ # base # => Physician
+ # associations # => [:appointments]
+ # joins # => []
+ #
+ def initialize(base, associations, joins)
+ @alias_tracker = AliasTracker.create(base.connection, joins)
+ @alias_tracker.aliased_name_for(base.table_name, base.table_name) # Updates the count for base.table_name to 1
+ tree = self.class.make_tree associations
+ @join_root = JoinBase.new base, build(tree, base)
+ @join_root.children.each { |child| construct_tables! @join_root, child }
+ end
+
+ def reflections
+ join_root.drop(1).map!(&:reflection)
+ end
+
+ def join_constraints(outer_joins)
+ joins = join_root.children.flat_map { |child|
+ make_inner_joins join_root, child
+ }
+
+ joins.concat outer_joins.flat_map { |oj|
+ if join_root.match? oj.join_root
+ walk join_root, oj.join_root
+ else
+ oj.join_root.children.flat_map { |child|
+ make_outer_joins oj.join_root, child
+ }
+ end
+ }
+ end
+
+ def aliases
+ Aliases.new join_root.each_with_index.map { |join_part,i|
+ columns = join_part.column_names.each_with_index.map { |column_name,j|
+ Aliases::Column.new column_name, "t#{i}_r#{j}"
+ }
+ Aliases::Table.new(join_part, columns)
+ }
+ end
+
+ def instantiate(result_set, aliases)
+ primary_key = aliases.column_alias(join_root, join_root.primary_key)
+
+ seen = Hash.new { |h,parent_klass|
+ h[parent_klass] = Hash.new { |i,parent_id|
+ i[parent_id] = Hash.new { |j,child_klass| j[child_klass] = {} }
+ }
+ }
+
+ model_cache = Hash.new { |h,klass| h[klass] = {} }
+ parents = model_cache[join_root]
+ column_aliases = aliases.column_aliases join_root
+
+ result_set.each { |row_hash|
+ parent = parents[row_hash[primary_key]] ||= join_root.instantiate(row_hash, column_aliases)
+ construct(parent, join_root, row_hash, result_set, seen, model_cache, aliases)
+ }
+
+ parents.values
+ end
+
+ private
+
+ def make_constraints(parent, child, tables, join_type)
+ chain = child.reflection.chain
+ foreign_table = parent.table
+ foreign_klass = parent.base_klass
+ child.join_constraints(foreign_table, foreign_klass, child, join_type, tables, child.reflection.scope_chain, chain)
+ end
+
+ def make_outer_joins(parent, child)
+ tables = table_aliases_for(parent, child)
+ join_type = Arel::Nodes::OuterJoin
+ info = make_constraints parent, child, tables, join_type
+
+ [info] + child.children.flat_map { |c| make_outer_joins(child, c) }
+ end
+
+ def make_inner_joins(parent, child)
+ tables = child.tables
+ join_type = Arel::Nodes::InnerJoin
+ info = make_constraints parent, child, tables, join_type
+
+ [info] + child.children.flat_map { |c| make_inner_joins(child, c) }
+ end
+
+ def table_aliases_for(parent, node)
+ node.reflection.chain.map { |reflection|
+ alias_tracker.aliased_table_for(
+ reflection.table_name,
+ table_alias_for(reflection, parent, reflection != node.reflection)
+ )
+ }
+ end
+
+ def construct_tables!(parent, node)
+ node.tables = table_aliases_for(parent, node)
+ node.children.each { |child| construct_tables! node, child }
+ end
+
+ def table_alias_for(reflection, parent, join)
+ name = "#{reflection.plural_name}_#{parent.table_name}"
+ name << "_join" if join
+ name
+ end
+
+ def walk(left, right)
+ intersection, missing = right.children.map { |node1|
+ [left.children.find { |node2| node1.match? node2 }, node1]
+ }.partition(&:first)
+
+ ojs = missing.flat_map { |_,n| make_outer_joins left, n }
+ intersection.flat_map { |l,r| walk l, r }.concat ojs
+ end
+
+ def find_reflection(klass, name)
+ klass._reflect_on_association(name) or
+ raise ConfigurationError, "Association named '#{ name }' was not found on #{ klass.name }; perhaps you misspelled it?"
+ end
+
+ def build(associations, base_klass)
+ associations.map do |name, right|
+ reflection = find_reflection base_klass, name
+ reflection.check_validity!
+ reflection.check_eager_loadable!
+
+ if reflection.polymorphic?
+ raise EagerLoadPolymorphicError.new(reflection)
+ end
+
+ JoinAssociation.new reflection, build(right, reflection.klass)
+ end
+ end
+
+ def construct(ar_parent, parent, row, rs, seen, model_cache, aliases)
+ primary_id = ar_parent.id
+
+ parent.children.each do |node|
+ if node.reflection.collection?
+ other = ar_parent.association(node.reflection.name)
+ other.loaded!
+ else
+ if ar_parent.association_cache.key?(node.reflection.name)
+ model = ar_parent.association(node.reflection.name).target
+ construct(model, node, row, rs, seen, model_cache, aliases)
+ next
+ end
+ end
+
+ key = aliases.column_alias(node, node.primary_key)
+ id = row[key]
+ next if id.nil?
+
+ model = seen[parent.base_klass][primary_id][node.base_klass][id]
+
+ if model
+ construct(model, node, row, rs, seen, model_cache, aliases)
+ else
+ model = construct_model(ar_parent, node, row, model_cache, id, aliases)
+ seen[parent.base_klass][primary_id][node.base_klass][id] = model
+ construct(model, node, row, rs, seen, model_cache, aliases)
+ end
+ end
+ end
+
+ def construct_model(record, node, row, model_cache, id, aliases)
+ model = model_cache[node][id] ||= node.instantiate(row,
+ aliases.column_aliases(node))
+ other = record.association(node.reflection.name)
+
+ if node.reflection.collection?
+ other.target.push(model)
+ else
+ other.target = model
+ end
+
+ other.set_inverse_instance(model)
+ model
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/join_dependency/join_association.rb b/activerecord/lib/active_record/associations/join_dependency/join_association.rb
new file mode 100644
index 0000000000..c3bbdccad8
--- /dev/null
+++ b/activerecord/lib/active_record/associations/join_dependency/join_association.rb
@@ -0,0 +1,122 @@
+require 'active_record/associations/join_dependency/join_part'
+
+module ActiveRecord
+ module Associations
+ class JoinDependency # :nodoc:
+ class JoinAssociation < JoinPart # :nodoc:
+ # The reflection of the association represented
+ attr_reader :reflection
+
+ attr_accessor :tables
+
+ def initialize(reflection, children)
+ super(reflection.klass, children)
+
+ @reflection = reflection
+ @tables = nil
+ end
+
+ def match?(other)
+ return true if self == other
+ super && reflection == other.reflection
+ end
+
+ JoinInformation = Struct.new :joins, :binds
+
+ def join_constraints(foreign_table, foreign_klass, node, join_type, tables, scope_chain, chain)
+ joins = []
+ bind_values = []
+ tables = tables.reverse
+
+ scope_chain_index = 0
+ scope_chain = scope_chain.reverse
+
+ # The chain starts with the target table, but we want to end with it here (makes
+ # more sense in this context), so we reverse
+ chain.reverse_each do |reflection|
+ table = tables.shift
+ klass = reflection.klass
+
+ join_keys = reflection.join_keys(klass)
+ key = join_keys.key
+ foreign_key = join_keys.foreign_key
+
+ constraint = build_constraint(klass, table, key, foreign_table, foreign_key)
+
+ scope_chain_items = scope_chain[scope_chain_index].map do |item|
+ if item.is_a?(Relation)
+ item
+ else
+ ActiveRecord::Relation.create(klass, table).instance_exec(node, &item)
+ end
+ end
+ scope_chain_index += 1
+
+ scope_chain_items.concat [klass.send(:build_default_scope, ActiveRecord::Relation.create(klass, table))].compact
+
+ rel = scope_chain_items.inject(scope_chain_items.shift) do |left, right|
+ left.merge right
+ end
+
+ if rel && !rel.arel.constraints.empty?
+ bind_values.concat rel.bind_values
+ constraint = constraint.and rel.arel.constraints
+ end
+
+ if reflection.type
+ value = foreign_klass.base_class.name
+ column = klass.columns_hash[column.to_s]
+
+ substitute = klass.connection.substitute_at(column, bind_values.length)
+ bind_values.push [column, value]
+ constraint = constraint.and table[reflection.type].eq substitute
+ end
+
+ joins << table.create_join(table, table.create_on(constraint), join_type)
+
+ # The current table in this iteration becomes the foreign table in the next
+ foreign_table, foreign_klass = table, klass
+ end
+
+ JoinInformation.new joins, bind_values
+ end
+
+ # Builds equality condition.
+ #
+ # Example:
+ #
+ # class Physician < ActiveRecord::Base
+ # has_many :appointments
+ # end
+ #
+ # If I execute `Physician.joins(:appointments).to_a` then
+ # klass # => Physician
+ # table # => #<Arel::Table @name="appointments" ...>
+ # key # => physician_id
+ # foreign_table # => #<Arel::Table @name="physicians" ...>
+ # foreign_key # => id
+ #
+ def build_constraint(klass, table, key, foreign_table, foreign_key)
+ constraint = table[key].eq(foreign_table[foreign_key])
+
+ if klass.finder_needs_type_condition?
+ constraint = table.create_and([
+ constraint,
+ klass.send(:type_condition, table)
+ ])
+ end
+
+ constraint
+ end
+
+ def table
+ tables.first
+ end
+
+ def aliased_table_name
+ table.table_alias || table.name
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/join_dependency/join_base.rb b/activerecord/lib/active_record/associations/join_dependency/join_base.rb
new file mode 100644
index 0000000000..3a26c25737
--- /dev/null
+++ b/activerecord/lib/active_record/associations/join_dependency/join_base.rb
@@ -0,0 +1,22 @@
+require 'active_record/associations/join_dependency/join_part'
+
+module ActiveRecord
+ module Associations
+ class JoinDependency # :nodoc:
+ class JoinBase < JoinPart # :nodoc:
+ def match?(other)
+ return true if self == other
+ super && base_klass == other.base_klass
+ end
+
+ def table
+ base_klass.arel_table
+ end
+
+ def aliased_table_name
+ base_klass.table_name
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/join_dependency/join_part.rb b/activerecord/lib/active_record/associations/join_dependency/join_part.rb
new file mode 100644
index 0000000000..91e1c6a9d7
--- /dev/null
+++ b/activerecord/lib/active_record/associations/join_dependency/join_part.rb
@@ -0,0 +1,72 @@
+module ActiveRecord
+ module Associations
+ class JoinDependency # :nodoc:
+ # A JoinPart represents a part of a JoinDependency. It is inherited
+ # by JoinBase and JoinAssociation. A JoinBase represents the Active Record which
+ # everything else is being joined onto. A JoinAssociation represents an association which
+ # is joining to the base. A JoinAssociation may result in more than one actual join
+ # operations (for example a has_and_belongs_to_many JoinAssociation would result in
+ # two; one for the join table and one for the target table).
+ class JoinPart # :nodoc:
+ include Enumerable
+
+ # The Active Record class which this join part is associated 'about'; for a JoinBase
+ # this is the actual base model, for a JoinAssociation this is the target model of the
+ # association.
+ attr_reader :base_klass, :children
+
+ delegate :table_name, :column_names, :primary_key, :to => :base_klass
+
+ def initialize(base_klass, children)
+ @base_klass = base_klass
+ @column_names_with_alias = nil
+ @children = children
+ end
+
+ def name
+ reflection.name
+ end
+
+ def match?(other)
+ self.class == other.class
+ end
+
+ def each(&block)
+ yield self
+ children.each { |child| child.each(&block) }
+ end
+
+ # An Arel::Table for the active_record
+ def table
+ raise NotImplementedError
+ end
+
+ # The alias for the active_record's table
+ def aliased_table_name
+ raise NotImplementedError
+ end
+
+ def extract_record(row, column_names_with_alias)
+ # This code is performance critical as it is called per row.
+ # see: https://github.com/rails/rails/pull/12185
+ hash = {}
+
+ index = 0
+ length = column_names_with_alias.length
+
+ while index < length
+ column_name, alias_name = column_names_with_alias[index]
+ hash[column_name] = row[alias_name]
+ index += 1
+ end
+
+ hash
+ end
+
+ def instantiate(row, aliases)
+ base_klass.instantiate(extract_record(row, aliases))
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader.rb b/activerecord/lib/active_record/associations/preloader.rb
new file mode 100644
index 0000000000..7519fec10a
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader.rb
@@ -0,0 +1,193 @@
+module ActiveRecord
+ module Associations
+ # Implements the details of eager loading of Active Record associations.
+ #
+ # Note that 'eager loading' and 'preloading' are actually the same thing.
+ # However, there are two different eager loading strategies.
+ #
+ # The first one is by using table joins. This was only strategy available
+ # prior to Rails 2.1. Suppose that you have an Author model with columns
+ # 'name' and 'age', and a Book model with columns 'name' and 'sales'. Using
+ # this strategy, Active Record would try to retrieve all data for an author
+ # and all of its books via a single query:
+ #
+ # SELECT * FROM authors
+ # LEFT OUTER JOIN books ON authors.id = books.author_id
+ # WHERE authors.name = 'Ken Akamatsu'
+ #
+ # However, this could result in many rows that contain redundant data. After
+ # having received the first row, we already have enough data to instantiate
+ # the Author object. In all subsequent rows, only the data for the joined
+ # 'books' table is useful; the joined 'authors' data is just redundant, and
+ # processing this redundant data takes memory and CPU time. The problem
+ # quickly becomes worse and worse as the level of eager loading increases
+ # (i.e. if Active Record is to eager load the associations' associations as
+ # well).
+ #
+ # The second strategy is to use multiple database queries, one for each
+ # level of association. Since Rails 2.1, this is the default strategy. In
+ # situations where a table join is necessary (e.g. when the +:conditions+
+ # option references an association's column), it will fallback to the table
+ # join strategy.
+ class Preloader #:nodoc:
+ extend ActiveSupport::Autoload
+
+ eager_autoload do
+ autoload :Association, 'active_record/associations/preloader/association'
+ autoload :SingularAssociation, 'active_record/associations/preloader/singular_association'
+ autoload :CollectionAssociation, 'active_record/associations/preloader/collection_association'
+ autoload :ThroughAssociation, 'active_record/associations/preloader/through_association'
+
+ autoload :HasMany, 'active_record/associations/preloader/has_many'
+ autoload :HasManyThrough, 'active_record/associations/preloader/has_many_through'
+ autoload :HasOne, 'active_record/associations/preloader/has_one'
+ autoload :HasOneThrough, 'active_record/associations/preloader/has_one_through'
+ autoload :BelongsTo, 'active_record/associations/preloader/belongs_to'
+ end
+
+ # Eager loads the named associations for the given Active Record record(s).
+ #
+ # In this description, 'association name' shall refer to the name passed
+ # to an association creation method. For example, a model that specifies
+ # <tt>belongs_to :author</tt>, <tt>has_many :buyers</tt> has association
+ # names +:author+ and +:buyers+.
+ #
+ # == Parameters
+ # +records+ is an array of ActiveRecord::Base. This array needs not be flat,
+ # i.e. +records+ itself may also contain arrays of records. In any case,
+ # +preload_associations+ will preload the all associations records by
+ # flattening +records+.
+ #
+ # +associations+ specifies one or more associations that you want to
+ # preload. It may be:
+ # - a Symbol or a String which specifies a single association name. For
+ # example, specifying +:books+ allows this method to preload all books
+ # for an Author.
+ # - an Array which specifies multiple association names. This array
+ # is processed recursively. For example, specifying <tt>[:avatar, :books]</tt>
+ # allows this method to preload an author's avatar as well as all of his
+ # books.
+ # - a Hash which specifies multiple association names, as well as
+ # association names for the to-be-preloaded association objects. For
+ # example, specifying <tt>{ author: :avatar }</tt> will preload a
+ # book's author, as well as that author's avatar.
+ #
+ # +:associations+ has the same format as the +:include+ option for
+ # <tt>ActiveRecord::Base.find</tt>. So +associations+ could look like this:
+ #
+ # :books
+ # [ :books, :author ]
+ # { author: :avatar }
+ # [ :books, { author: :avatar } ]
+
+ NULL_RELATION = Struct.new(:values, :bind_values).new({}, [])
+
+ def preload(records, associations, preload_scope = nil)
+ records = Array.wrap(records).compact.uniq
+ associations = Array.wrap(associations)
+ preload_scope = preload_scope || NULL_RELATION
+
+ if records.empty?
+ []
+ else
+ associations.flat_map { |association|
+ preloaders_on association, records, preload_scope
+ }
+ end
+ end
+
+ private
+
+ def preloaders_on(association, records, scope)
+ case association
+ when Hash
+ preloaders_for_hash(association, records, scope)
+ when Symbol
+ preloaders_for_one(association, records, scope)
+ when String
+ preloaders_for_one(association.to_sym, records, scope)
+ else
+ raise ArgumentError, "#{association.inspect} was not recognised for preload"
+ end
+ end
+
+ def preloaders_for_hash(association, records, scope)
+ association.flat_map { |parent, child|
+ loaders = preloaders_for_one parent, records, scope
+
+ recs = loaders.flat_map(&:preloaded_records).uniq
+ loaders.concat Array.wrap(child).flat_map { |assoc|
+ preloaders_on assoc, recs, scope
+ }
+ loaders
+ }
+ end
+
+ # Not all records have the same class, so group then preload group on the reflection
+ # itself so that if various subclass share the same association then we do not split
+ # them unnecessarily
+ #
+ # Additionally, polymorphic belongs_to associations can have multiple associated
+ # classes, depending on the polymorphic_type field. So we group by the classes as
+ # well.
+ def preloaders_for_one(association, records, scope)
+ grouped_records(association, records).flat_map do |reflection, klasses|
+ klasses.map do |rhs_klass, rs|
+ loader = preloader_for(reflection, rs, rhs_klass).new(rhs_klass, rs, reflection, scope)
+ loader.run self
+ loader
+ end
+ end
+ end
+
+ def grouped_records(association, records)
+ h = {}
+ records.each do |record|
+ next unless record
+ assoc = record.association(association)
+ klasses = h[assoc.reflection] ||= {}
+ (klasses[assoc.klass] ||= []) << record
+ end
+ h
+ end
+
+ class AlreadyLoaded
+ attr_reader :owners, :reflection
+
+ def initialize(klass, owners, reflection, preload_scope)
+ @owners = owners
+ @reflection = reflection
+ end
+
+ def run(preloader); end
+
+ def preloaded_records
+ owners.flat_map { |owner| owner.association(reflection.name).target }
+ end
+ end
+
+ class NullPreloader
+ def self.new(klass, owners, reflection, preload_scope); self; end
+ def self.run(preloader); end
+ end
+
+ def preloader_for(reflection, owners, rhs_klass)
+ return NullPreloader unless rhs_klass
+
+ if owners.first.association(reflection.name).loaded?
+ return AlreadyLoaded
+ end
+ reflection.check_preloadable!
+
+ case reflection.macro
+ when :has_many
+ reflection.options[:through] ? HasManyThrough : HasMany
+ when :has_one
+ reflection.options[:through] ? HasOneThrough : HasOne
+ when :belongs_to
+ BelongsTo
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/association.rb b/activerecord/lib/active_record/associations/preloader/association.rb
new file mode 100644
index 0000000000..c0639742be
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/association.rb
@@ -0,0 +1,167 @@
+module ActiveRecord
+ module Associations
+ class Preloader
+ class Association #:nodoc:
+ attr_reader :owners, :reflection, :preload_scope, :model, :klass
+ attr_reader :preloaded_records
+
+ def initialize(klass, owners, reflection, preload_scope)
+ @klass = klass
+ @owners = owners
+ @reflection = reflection
+ @preload_scope = preload_scope
+ @model = owners.first && owners.first.class
+ @scope = nil
+ @owners_by_key = nil
+ @preloaded_records = []
+ end
+
+ def run(preloader)
+ preload(preloader)
+ end
+
+ def preload(preloader)
+ raise NotImplementedError
+ end
+
+ def scope
+ @scope ||= build_scope
+ end
+
+ def records_for(ids)
+ query_scope(ids)
+ end
+
+ def query_scope(ids)
+ scope.where(association_key.in(ids))
+ end
+
+ def table
+ klass.arel_table
+ end
+
+ # The name of the key on the associated records
+ def association_key_name
+ raise NotImplementedError
+ end
+
+ # This is overridden by HABTM as the condition should be on the foreign_key column in
+ # the join table
+ def association_key
+ table[association_key_name]
+ end
+
+ # The name of the key on the model which declares the association
+ def owner_key_name
+ raise NotImplementedError
+ end
+
+ def owners_by_key
+ @owners_by_key ||= if key_conversion_required?
+ owners.group_by do |owner|
+ owner[owner_key_name].to_s
+ end
+ else
+ owners.group_by do |owner|
+ owner[owner_key_name]
+ end
+ end
+ end
+
+ def options
+ reflection.options
+ end
+
+ private
+
+ def associated_records_by_owner(preloader)
+ owners_map = owners_by_key
+ owner_keys = owners_map.keys.compact
+
+ # Each record may have multiple owners, and vice-versa
+ records_by_owner = owners.each_with_object({}) do |owner,h|
+ h[owner] = []
+ end
+
+ if owner_keys.any?
+ # Some databases impose a limit on the number of ids in a list (in Oracle it's 1000)
+ # Make several smaller queries if necessary or make one query if the adapter supports it
+ sliced = owner_keys.each_slice(klass.connection.in_clause_length || owner_keys.size)
+
+ records = load_slices sliced
+ records.each do |record, owner_key|
+ owners_map[owner_key].each do |owner|
+ records_by_owner[owner] << record
+ end
+ end
+ end
+
+ records_by_owner
+ end
+
+ def key_conversion_required?
+ association_key_type != owner_key_type
+ end
+
+ def association_key_type
+ @klass.type_for_attribute(association_key_name.to_s).type
+ end
+
+ def owner_key_type
+ @model.type_for_attribute(owner_key_name.to_s).type
+ end
+
+ def load_slices(slices)
+ @preloaded_records = slices.flat_map { |slice|
+ records_for(slice)
+ }
+
+ @preloaded_records.map { |record|
+ key = record[association_key_name]
+ key = key.to_s if key_conversion_required?
+
+ [record, key]
+ }
+ end
+
+ def reflection_scope
+ @reflection_scope ||= reflection.scope ? klass.unscoped.instance_exec(nil, &reflection.scope) : klass.unscoped
+ end
+
+ def build_scope
+ scope = klass.unscoped
+
+ values = reflection_scope.values
+ reflection_binds = reflection_scope.bind_values
+ preload_values = preload_scope.values
+ preload_binds = preload_scope.bind_values
+
+ scope.where_values = Array(values[:where]) + Array(preload_values[:where])
+ scope.references_values = Array(values[:references]) + Array(preload_values[:references])
+ scope.bind_values = (reflection_binds + preload_binds)
+
+ scope._select! preload_values[:select] || values[:select] || table[Arel.star]
+ scope.includes! preload_values[:includes] || values[:includes]
+
+ if preload_values.key? :order
+ scope.order! preload_values[:order]
+ else
+ if values.key? :order
+ scope.order! values[:order]
+ end
+ end
+
+ if preload_values[:readonly] || values[:readonly]
+ scope.readonly!
+ end
+
+ if options[:as]
+ scope.where!(klass.table_name => { reflection.type => model.base_class.sti_name })
+ end
+
+ klass.default_scoped.merge(scope)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/belongs_to.rb b/activerecord/lib/active_record/associations/preloader/belongs_to.rb
new file mode 100644
index 0000000000..5091d4717a
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/belongs_to.rb
@@ -0,0 +1,17 @@
+module ActiveRecord
+ module Associations
+ class Preloader
+ class BelongsTo < SingularAssociation #:nodoc:
+
+ def association_key_name
+ reflection.options[:primary_key] || klass && klass.primary_key
+ end
+
+ def owner_key_name
+ reflection.foreign_key
+ end
+
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/collection_association.rb b/activerecord/lib/active_record/associations/preloader/collection_association.rb
new file mode 100644
index 0000000000..5adffcd831
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/collection_association.rb
@@ -0,0 +1,24 @@
+module ActiveRecord
+ module Associations
+ class Preloader
+ class CollectionAssociation < Association #:nodoc:
+
+ private
+
+ def build_scope
+ super.order(preload_scope.values[:order] || reflection_scope.values[:order])
+ end
+
+ def preload(preloader)
+ associated_records_by_owner(preloader).each do |owner, records|
+ association = owner.association(reflection.name)
+ association.loaded!
+ association.target.concat(records)
+ records.each { |record| association.set_inverse_instance(record) }
+ end
+ end
+
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/has_many.rb b/activerecord/lib/active_record/associations/preloader/has_many.rb
new file mode 100644
index 0000000000..3ea91a8c11
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/has_many.rb
@@ -0,0 +1,17 @@
+module ActiveRecord
+ module Associations
+ class Preloader
+ class HasMany < CollectionAssociation #:nodoc:
+
+ def association_key_name
+ reflection.foreign_key
+ end
+
+ def owner_key_name
+ reflection.active_record_primary_key
+ end
+
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/has_many_through.rb b/activerecord/lib/active_record/associations/preloader/has_many_through.rb
new file mode 100644
index 0000000000..7b37b5942d
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/has_many_through.rb
@@ -0,0 +1,19 @@
+module ActiveRecord
+ module Associations
+ class Preloader
+ class HasManyThrough < CollectionAssociation #:nodoc:
+ include ThroughAssociation
+
+ def associated_records_by_owner(preloader)
+ records_by_owner = super
+
+ if reflection_scope.distinct_value
+ records_by_owner.each_value { |records| records.uniq! }
+ end
+
+ records_by_owner
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/has_one.rb b/activerecord/lib/active_record/associations/preloader/has_one.rb
new file mode 100644
index 0000000000..24728e9f01
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/has_one.rb
@@ -0,0 +1,23 @@
+module ActiveRecord
+ module Associations
+ class Preloader
+ class HasOne < SingularAssociation #:nodoc:
+
+ def association_key_name
+ reflection.foreign_key
+ end
+
+ def owner_key_name
+ reflection.active_record_primary_key
+ end
+
+ private
+
+ def build_scope
+ super.order(preload_scope.values[:order] || reflection_scope.values[:order])
+ end
+
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/has_one_through.rb b/activerecord/lib/active_record/associations/preloader/has_one_through.rb
new file mode 100644
index 0000000000..f063f85574
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/has_one_through.rb
@@ -0,0 +1,9 @@
+module ActiveRecord
+ module Associations
+ class Preloader
+ class HasOneThrough < SingularAssociation #:nodoc:
+ include ThroughAssociation
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/singular_association.rb b/activerecord/lib/active_record/associations/preloader/singular_association.rb
new file mode 100644
index 0000000000..f60647a81e
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/singular_association.rb
@@ -0,0 +1,21 @@
+module ActiveRecord
+ module Associations
+ class Preloader
+ class SingularAssociation < Association #:nodoc:
+
+ private
+
+ def preload(preloader)
+ associated_records_by_owner(preloader).each do |owner, associated_records|
+ record = associated_records.first
+
+ association = owner.association(reflection.name)
+ association.target = record
+ association.set_inverse_instance(record) if record
+ end
+ end
+
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/preloader/through_association.rb b/activerecord/lib/active_record/associations/preloader/through_association.rb
new file mode 100644
index 0000000000..1fed7f74e7
--- /dev/null
+++ b/activerecord/lib/active_record/associations/preloader/through_association.rb
@@ -0,0 +1,95 @@
+module ActiveRecord
+ module Associations
+ class Preloader
+ module ThroughAssociation #:nodoc:
+ def through_reflection
+ reflection.through_reflection
+ end
+
+ def source_reflection
+ reflection.source_reflection
+ end
+
+ def associated_records_by_owner(preloader)
+ preloader.preload(owners,
+ through_reflection.name,
+ through_scope)
+
+ through_records = owners.map do |owner|
+ association = owner.association through_reflection.name
+
+ [owner, Array(association.reader)]
+ end
+
+ reset_association owners, through_reflection.name
+
+ middle_records = through_records.flat_map { |(_,rec)| rec }
+
+ preloaders = preloader.preload(middle_records,
+ source_reflection.name,
+ reflection_scope)
+
+ @preloaded_records = preloaders.flat_map(&:preloaded_records)
+
+ middle_to_pl = preloaders.each_with_object({}) do |pl,h|
+ pl.owners.each { |middle|
+ h[middle] = pl
+ }
+ end
+
+ record_offset = {}
+ @preloaded_records.each_with_index do |record,i|
+ record_offset[record] = i
+ end
+
+ through_records.each_with_object({}) { |(lhs,center),records_by_owner|
+ pl_to_middle = center.group_by { |record| middle_to_pl[record] }
+
+ records_by_owner[lhs] = pl_to_middle.flat_map do |pl, middles|
+ rhs_records = middles.flat_map { |r|
+ association = r.association source_reflection.name
+
+ association.reader
+ }.compact
+
+ rhs_records.sort_by { |rhs| record_offset[rhs] }
+ end
+ }
+ end
+
+ private
+
+ def reset_association(owners, association_name)
+ should_reset = (through_scope != through_reflection.klass.unscoped) ||
+ (reflection.options[:source_type] && through_reflection.collection?)
+
+ # Dont cache the association - we would only be caching a subset
+ if should_reset
+ owners.each { |owner|
+ owner.association(association_name).reset
+ }
+ end
+ end
+
+
+ def through_scope
+ scope = through_reflection.klass.unscoped
+
+ if options[:source_type]
+ scope.where! reflection.foreign_type => options[:source_type]
+ else
+ unless reflection_scope.where_values.empty?
+ scope.includes_values = Array(reflection_scope.values[:includes] || options[:source])
+ scope.where_values = reflection_scope.values[:where]
+ end
+
+ scope.references! reflection_scope.values[:references]
+ scope = scope.order reflection_scope.values[:order] if scope.eager_loading?
+ end
+
+ scope
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/singular_association.rb b/activerecord/lib/active_record/associations/singular_association.rb
new file mode 100644
index 0000000000..f2e3a4e40f
--- /dev/null
+++ b/activerecord/lib/active_record/associations/singular_association.rb
@@ -0,0 +1,80 @@
+module ActiveRecord
+ module Associations
+ class SingularAssociation < Association #:nodoc:
+ # Implements the reader method, e.g. foo.bar for Foo.has_one :bar
+ def reader(force_reload = false)
+ if force_reload
+ klass.uncached { reload }
+ elsif !loaded? || stale_target?
+ reload
+ end
+
+ target
+ end
+
+ # Implements the writer method, e.g. foo.bar= for Foo.belongs_to :bar
+ def writer(record)
+ replace(record)
+ end
+
+ def create(attributes = {}, &block)
+ _create_record(attributes, &block)
+ end
+
+ def create!(attributes = {}, &block)
+ _create_record(attributes, true, &block)
+ end
+
+ def build(attributes = {})
+ record = build_record(attributes)
+ yield(record) if block_given?
+ set_new_record(record)
+ record
+ end
+
+ private
+
+ def create_scope
+ scope.scope_for_create.stringify_keys.except(klass.primary_key)
+ end
+
+ def get_records
+ return scope.limit(1).to_a if reflection.scope_chain.any?(&:any?)
+
+ conn = klass.connection
+ sc = reflection.association_scope_cache(conn, owner) do
+ StatementCache.create(conn) { |params|
+ as = AssociationScope.create { params.bind }
+ target_scope.merge(as.scope(self, conn)).limit(1)
+ }
+ end
+
+ binds = AssociationScope.get_bind_values(owner, reflection.chain)
+ sc.execute binds, klass, klass.connection
+ end
+
+ def find_target
+ if record = get_records.first
+ set_inverse_instance record
+ end
+ end
+
+ def replace(record)
+ raise NotImplementedError, "Subclasses must implement a replace(record) method"
+ end
+
+ def set_new_record(record)
+ replace(record)
+ end
+
+ def _create_record(attributes, raise_error = false)
+ record = build_record(attributes)
+ yield(record) if block_given?
+ saved = record.save
+ set_new_record(record)
+ raise RecordInvalid.new(record) if !saved && raise_error
+ record
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/associations/through_association.rb b/activerecord/lib/active_record/associations/through_association.rb
new file mode 100644
index 0000000000..611d471e62
--- /dev/null
+++ b/activerecord/lib/active_record/associations/through_association.rb
@@ -0,0 +1,92 @@
+module ActiveRecord
+ # = Active Record Through Association
+ module Associations
+ module ThroughAssociation #:nodoc:
+
+ delegate :source_reflection, :through_reflection, :to => :reflection
+
+ protected
+
+ # We merge in these scopes for two reasons:
+ #
+ # 1. To get the default_scope conditions for any of the other reflections in the chain
+ # 2. To get the type conditions for any STI models in the chain
+ def target_scope
+ scope = super
+ reflection.chain.drop(1).each do |reflection|
+ relation = reflection.klass.all
+ relation.merge!(reflection.scope) if reflection.scope
+
+ scope.merge!(
+ relation.except(:select, :create_with, :includes, :preload, :joins, :eager_load)
+ )
+ end
+ scope
+ end
+
+ private
+
+ # Construct attributes for :through pointing to owner and associate. This is used by the
+ # methods which create and delete records on the association.
+ #
+ # We only support indirectly modifying through associations which has a belongs_to source.
+ # This is the "has_many :tags, through: :taggings" situation, where the join model
+ # typically has a belongs_to on both side. In other words, associations which could also
+ # be represented as has_and_belongs_to_many associations.
+ #
+ # We do not support creating/deleting records on the association where the source has
+ # some other type, because this opens up a whole can of worms, and in basically any
+ # situation it is more natural for the user to just create or modify their join records
+ # directly as required.
+ def construct_join_attributes(*records)
+ ensure_mutable
+
+ if source_reflection.association_primary_key(reflection.klass) == reflection.klass.primary_key
+ join_attributes = { source_reflection.name => records }
+ else
+ join_attributes = {
+ source_reflection.foreign_key =>
+ records.map { |record|
+ record.send(source_reflection.association_primary_key(reflection.klass))
+ }
+ }
+ end
+
+ if options[:source_type]
+ join_attributes[source_reflection.foreign_type] =
+ records.map { |record| record.class.base_class.name }
+ end
+
+ if records.count == 1
+ Hash[join_attributes.map { |k, v| [k, v.first] }]
+ else
+ join_attributes
+ end
+ end
+
+ # Note: this does not capture all cases, for example it would be crazy to try to
+ # properly support stale-checking for nested associations.
+ def stale_state
+ if through_reflection.belongs_to?
+ owner[through_reflection.foreign_key] && owner[through_reflection.foreign_key].to_s
+ end
+ end
+
+ def foreign_key_present?
+ through_reflection.belongs_to? && !owner[through_reflection.foreign_key].nil?
+ end
+
+ def ensure_mutable
+ unless source_reflection.belongs_to?
+ raise HasManyThroughCantAssociateThroughHasOneOrManyReflection.new(owner, reflection)
+ end
+ end
+
+ def ensure_not_nested
+ if reflection.nested?
+ raise HasManyThroughNestedAssociationsAreReadonly.new(owner, reflection)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute.rb b/activerecord/lib/active_record/attribute.rb
new file mode 100644
index 0000000000..6d38224830
--- /dev/null
+++ b/activerecord/lib/active_record/attribute.rb
@@ -0,0 +1,120 @@
+module ActiveRecord
+ class Attribute # :nodoc:
+ class << self
+ def from_database(name, value, type)
+ FromDatabase.new(name, value, type)
+ end
+
+ def from_user(name, value, type)
+ FromUser.new(name, value, type)
+ end
+
+ def null(name)
+ Null.new(name)
+ end
+
+ def uninitialized(name, type)
+ Uninitialized.new(name, type)
+ end
+ end
+
+ attr_reader :name, :value_before_type_cast, :type
+
+ # This method should not be called directly.
+ # Use #from_database or #from_user
+ def initialize(name, value_before_type_cast, type)
+ @name = name
+ @value_before_type_cast = value_before_type_cast
+ @type = type
+ end
+
+ def value
+ # `defined?` is cheaper than `||=` when we get back falsy values
+ @value = type_cast(value_before_type_cast) unless defined?(@value)
+ @value
+ end
+
+ def value_for_database
+ type.type_cast_for_database(value)
+ end
+
+ def changed_from?(old_value)
+ type.changed?(old_value, value, value_before_type_cast)
+ end
+
+ def changed_in_place_from?(old_value)
+ type.changed_in_place?(old_value, value)
+ end
+
+ def with_value_from_user(value)
+ self.class.from_user(name, value, type)
+ end
+
+ def with_value_from_database(value)
+ self.class.from_database(name, value, type)
+ end
+
+ def type_cast
+ raise NotImplementedError
+ end
+
+ def initialized?
+ true
+ end
+
+ protected
+
+ def initialize_dup(other)
+ if defined?(@value) && @value.duplicable?
+ @value = @value.dup
+ end
+ end
+
+ class FromDatabase < Attribute # :nodoc:
+ def type_cast(value)
+ type.type_cast_from_database(value)
+ end
+ end
+
+ class FromUser < Attribute # :nodoc:
+ def type_cast(value)
+ type.type_cast_from_user(value)
+ end
+ end
+
+ class Null < Attribute # :nodoc:
+ def initialize(name)
+ super(name, nil, Type::Value.new)
+ end
+
+ def value
+ nil
+ end
+
+ def with_value_from_database(value)
+ raise ActiveModel::MissingAttributeError, "can't write unknown attribute `#{name}`"
+ end
+ alias_method :with_value_from_user, :with_value_from_database
+ end
+
+ class Uninitialized < Attribute # :nodoc:
+ def initialize(name, type)
+ super(name, nil, type)
+ end
+
+ def value
+ if block_given?
+ yield name
+ end
+ end
+
+ def value_for_database
+ end
+
+ def initialized?
+ false
+ end
+ end
+ private_constant :FromDatabase, :FromUser, :Null, :Uninitialized
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_assignment.rb b/activerecord/lib/active_record/attribute_assignment.rb
new file mode 100644
index 0000000000..2887db3bf7
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_assignment.rb
@@ -0,0 +1,212 @@
+require 'active_model/forbidden_attributes_protection'
+
+module ActiveRecord
+ module AttributeAssignment
+ extend ActiveSupport::Concern
+ include ActiveModel::ForbiddenAttributesProtection
+
+ # Allows you to set all the attributes by passing in a hash of attributes with
+ # keys matching the attribute names (which again matches the column names).
+ #
+ # If the passed hash responds to <tt>permitted?</tt> method and the return value
+ # of this method is +false+ an <tt>ActiveModel::ForbiddenAttributesError</tt>
+ # exception is raised.
+ #
+ # cat = Cat.new(name: "Gorby", status: "yawning")
+ # cat.attributes # => { "name" => "Gorby", "status" => "yawning", "created_at" => nil, "updated_at" => nil}
+ # cat.assign_attributes(status: "sleeping")
+ # cat.attributes # => { "name" => "Gorby", "status" => "sleeping", "created_at" => nil, "updated_at" => nil }
+ #
+ # New attributes will be persisted in the database when the object is saved.
+ #
+ # Aliased to <tt>attributes=</tt>.
+ def assign_attributes(new_attributes)
+ if !new_attributes.respond_to?(:stringify_keys)
+ raise ArgumentError, "When assigning attributes, you must pass a hash as an argument."
+ end
+ return if new_attributes.blank?
+
+ attributes = new_attributes.stringify_keys
+ multi_parameter_attributes = []
+ nested_parameter_attributes = []
+
+ attributes = sanitize_for_mass_assignment(attributes)
+
+ attributes.each do |k, v|
+ if k.include?("(")
+ multi_parameter_attributes << [ k, v ]
+ elsif v.is_a?(Hash)
+ nested_parameter_attributes << [ k, v ]
+ else
+ _assign_attribute(k, v)
+ end
+ end
+
+ assign_nested_parameter_attributes(nested_parameter_attributes) unless nested_parameter_attributes.empty?
+ assign_multiparameter_attributes(multi_parameter_attributes) unless multi_parameter_attributes.empty?
+ end
+
+ alias attributes= assign_attributes
+
+ private
+
+ def _assign_attribute(k, v)
+ public_send("#{k}=", v)
+ rescue NoMethodError
+ if respond_to?("#{k}=")
+ raise
+ else
+ raise UnknownAttributeError.new(self, k)
+ end
+ end
+
+ # Assign any deferred nested attributes after the base attributes have been set.
+ def assign_nested_parameter_attributes(pairs)
+ pairs.each { |k, v| _assign_attribute(k, v) }
+ end
+
+ # Instantiates objects for all attribute classes that needs more than one constructor parameter. This is done
+ # by calling new on the column type or aggregation type (through composed_of) object with these parameters.
+ # So having the pairs written_on(1) = "2004", written_on(2) = "6", written_on(3) = "24", will instantiate
+ # written_on (a date type) with Date.new("2004", "6", "24"). You can also specify a typecast character in the
+ # parentheses to have the parameters typecasted before they're used in the constructor. Use i for Fixnum and
+ # f for Float. If all the values for a given attribute are empty, the attribute will be set to +nil+.
+ def assign_multiparameter_attributes(pairs)
+ execute_callstack_for_multiparameter_attributes(
+ extract_callstack_for_multiparameter_attributes(pairs)
+ )
+ end
+
+ def execute_callstack_for_multiparameter_attributes(callstack)
+ errors = []
+ callstack.each do |name, values_with_empty_parameters|
+ begin
+ send("#{name}=", MultiparameterAttribute.new(self, name, values_with_empty_parameters).read_value)
+ rescue => ex
+ errors << AttributeAssignmentError.new("error on assignment #{values_with_empty_parameters.values.inspect} to #{name} (#{ex.message})", ex, name)
+ end
+ end
+ unless errors.empty?
+ error_descriptions = errors.map { |ex| ex.message }.join(",")
+ raise MultiparameterAssignmentErrors.new(errors), "#{errors.size} error(s) on assignment of multiparameter attributes [#{error_descriptions}]"
+ end
+ end
+
+ def extract_callstack_for_multiparameter_attributes(pairs)
+ attributes = {}
+
+ pairs.each do |(multiparameter_name, value)|
+ attribute_name = multiparameter_name.split("(").first
+ attributes[attribute_name] ||= {}
+
+ parameter_value = value.empty? ? nil : type_cast_attribute_value(multiparameter_name, value)
+ attributes[attribute_name][find_parameter_position(multiparameter_name)] ||= parameter_value
+ end
+
+ attributes
+ end
+
+ def type_cast_attribute_value(multiparameter_name, value)
+ multiparameter_name =~ /\([0-9]*([if])\)/ ? value.send("to_" + $1) : value
+ end
+
+ def find_parameter_position(multiparameter_name)
+ multiparameter_name.scan(/\(([0-9]*).*\)/).first.first.to_i
+ end
+
+ class MultiparameterAttribute #:nodoc:
+ attr_reader :object, :name, :values, :cast_type
+
+ def initialize(object, name, values)
+ @object = object
+ @name = name
+ @values = values
+ end
+
+ def read_value
+ return if values.values.compact.empty?
+
+ @cast_type = object.type_for_attribute(name)
+ klass = cast_type.klass
+
+ if klass == Time
+ read_time
+ elsif klass == Date
+ read_date
+ else
+ read_other
+ end
+ end
+
+ private
+
+ def instantiate_time_object(set_values)
+ if object.class.send(:create_time_zone_conversion_attribute?, name, cast_type)
+ Time.zone.local(*set_values)
+ else
+ Time.send(object.class.default_timezone, *set_values)
+ end
+ end
+
+ def read_time
+ # If column is a :time (and not :date or :datetime) there is no need to validate if
+ # there are year/month/day fields
+ if cast_type.type == :time
+ # if the column is a time set the values to their defaults as January 1, 1970, but only if they're nil
+ { 1 => 1970, 2 => 1, 3 => 1 }.each do |key,value|
+ values[key] ||= value
+ end
+ else
+ # else column is a timestamp, so if Date bits were not provided, error
+ validate_required_parameters!([1,2,3])
+
+ # If Date bits were provided but blank, then return nil
+ return if blank_date_parameter?
+ end
+
+ max_position = extract_max_param(6)
+ set_values = values.values_at(*(1..max_position))
+ # If Time bits are not there, then default to 0
+ (3..5).each { |i| set_values[i] = set_values[i].presence || 0 }
+ instantiate_time_object(set_values)
+ end
+
+ def read_date
+ return if blank_date_parameter?
+ set_values = values.values_at(1,2,3)
+ begin
+ Date.new(*set_values)
+ rescue ArgumentError # if Date.new raises an exception on an invalid date
+ instantiate_time_object(set_values).to_date # we instantiate Time object and convert it back to a date thus using Time's logic in handling invalid dates
+ end
+ end
+
+ def read_other
+ max_position = extract_max_param
+ positions = (1..max_position)
+ validate_required_parameters!(positions)
+
+ values.slice(*positions)
+ end
+
+ # Checks whether some blank date parameter exists. Note that this is different
+ # than the validate_required_parameters! method, since it just checks for blank
+ # positions instead of missing ones, and does not raise in case one blank position
+ # exists. The caller is responsible to handle the case of this returning true.
+ def blank_date_parameter?
+ (1..3).any? { |position| values[position].blank? }
+ end
+
+ # If some position is not provided, it errors out a missing parameter exception.
+ def validate_required_parameters!(positions)
+ if missing_parameter = positions.detect { |position| !values.key?(position) }
+ raise ArgumentError.new("Missing Parameter - #{name}(#{missing_parameter})")
+ end
+ end
+
+ def extract_max_param(upper_cap = 100)
+ [values.keys.max, upper_cap].min
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_decorators.rb b/activerecord/lib/active_record/attribute_decorators.rb
new file mode 100644
index 0000000000..5b96623b6e
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_decorators.rb
@@ -0,0 +1,66 @@
+module ActiveRecord
+ module AttributeDecorators # :nodoc:
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :attribute_type_decorations, instance_accessor: false # :internal:
+ self.attribute_type_decorations = TypeDecorator.new
+ end
+
+ module ClassMethods # :nodoc:
+ def decorate_attribute_type(column_name, decorator_name, &block)
+ matcher = ->(name, _) { name == column_name.to_s }
+ key = "_#{column_name}_#{decorator_name}"
+ decorate_matching_attribute_types(matcher, key, &block)
+ end
+
+ def decorate_matching_attribute_types(matcher, decorator_name, &block)
+ clear_caches_calculated_from_columns
+ decorator_name = decorator_name.to_s
+
+ # Create new hashes so we don't modify parent classes
+ self.attribute_type_decorations = attribute_type_decorations.merge(decorator_name => [matcher, block])
+ end
+
+ private
+
+ def add_user_provided_columns(*)
+ super.map do |column|
+ decorated_type = attribute_type_decorations.apply(column.name, column.cast_type)
+ column.with_type(decorated_type)
+ end
+ end
+ end
+
+ class TypeDecorator # :nodoc:
+ delegate :clear, to: :@decorations
+
+ def initialize(decorations = {})
+ @decorations = decorations
+ end
+
+ def merge(*args)
+ TypeDecorator.new(@decorations.merge(*args))
+ end
+
+ def apply(name, type)
+ decorations = decorators_for(name, type)
+ decorations.inject(type) do |new_type, block|
+ block.call(new_type)
+ end
+ end
+
+ private
+
+ def decorators_for(name, type)
+ matching(name, type).map(&:last)
+ end
+
+ def matching(name, type)
+ @decorations.values.select do |(matcher, _)|
+ matcher.call(name, type)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods.rb b/activerecord/lib/active_record/attribute_methods.rb
new file mode 100644
index 0000000000..a2bb78dfcc
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods.rb
@@ -0,0 +1,433 @@
+require 'active_support/core_ext/enumerable'
+require 'mutex_m'
+require 'thread_safe'
+
+module ActiveRecord
+ # = Active Record Attribute Methods
+ module AttributeMethods
+ extend ActiveSupport::Concern
+ include ActiveModel::AttributeMethods
+
+ included do
+ initialize_generated_modules
+ include Read
+ include Write
+ include BeforeTypeCast
+ include Query
+ include PrimaryKey
+ include TimeZoneConversion
+ include Dirty
+ include Serialization
+
+ delegate :column_for_attribute, to: :class
+ end
+
+ AttrNames = Module.new {
+ def self.set_name_cache(name, value)
+ const_name = "ATTR_#{name}"
+ unless const_defined? const_name
+ const_set const_name, value.dup.freeze
+ end
+ end
+ }
+
+ BLACKLISTED_CLASS_METHODS = %w(private public protected)
+
+ class AttributeMethodCache
+ def initialize
+ @module = Module.new
+ @method_cache = ThreadSafe::Cache.new
+ end
+
+ def [](name)
+ @method_cache.compute_if_absent(name) do
+ safe_name = name.unpack('h*').first
+ temp_method = "__temp__#{safe_name}"
+ ActiveRecord::AttributeMethods::AttrNames.set_name_cache safe_name, name
+ @module.module_eval method_body(temp_method, safe_name), __FILE__, __LINE__
+ @module.instance_method temp_method
+ end
+ end
+
+ private
+
+ # Override this method in the subclasses for method body.
+ def method_body(method_name, const_name)
+ raise NotImplementedError, "Subclasses must implement a method_body(method_name, const_name) method."
+ end
+ end
+
+ module ClassMethods
+ def inherited(child_class) #:nodoc:
+ child_class.initialize_generated_modules
+ super
+ end
+
+ def initialize_generated_modules # :nodoc:
+ @generated_attribute_methods = Module.new { extend Mutex_m }
+ @attribute_methods_generated = false
+ include @generated_attribute_methods
+ end
+
+ # Generates all the attribute related methods for columns in the database
+ # accessors, mutators and query methods.
+ def define_attribute_methods # :nodoc:
+ return false if @attribute_methods_generated
+ # Use a mutex; we don't want two threads simultaneously trying to define
+ # attribute methods.
+ generated_attribute_methods.synchronize do
+ return false if @attribute_methods_generated
+ superclass.define_attribute_methods unless self == base_class
+ super(column_names)
+ @attribute_methods_generated = true
+ end
+ true
+ end
+
+ def undefine_attribute_methods # :nodoc:
+ generated_attribute_methods.synchronize do
+ super if @attribute_methods_generated
+ @attribute_methods_generated = false
+ end
+ end
+
+ # Raises a <tt>ActiveRecord::DangerousAttributeError</tt> exception when an
+ # \Active \Record method is defined in the model, otherwise +false+.
+ #
+ # class Person < ActiveRecord::Base
+ # def save
+ # 'already defined by Active Record'
+ # end
+ # end
+ #
+ # Person.instance_method_already_implemented?(:save)
+ # # => ActiveRecord::DangerousAttributeError: save is defined by ActiveRecord
+ #
+ # Person.instance_method_already_implemented?(:name)
+ # # => false
+ def instance_method_already_implemented?(method_name)
+ if dangerous_attribute_method?(method_name)
+ raise DangerousAttributeError, "#{method_name} is defined by Active Record"
+ end
+
+ if superclass == Base
+ super
+ else
+ # If B < A and A defines its own attribute method, then we don't want to overwrite that.
+ defined = method_defined_within?(method_name, superclass, superclass.generated_attribute_methods)
+ base_defined = Base.method_defined?(method_name) || Base.private_method_defined?(method_name)
+ defined && !base_defined || super
+ end
+ end
+
+ # A method name is 'dangerous' if it is already (re)defined by Active Record, but
+ # not by any ancestors. (So 'puts' is not dangerous but 'save' is.)
+ def dangerous_attribute_method?(name) # :nodoc:
+ method_defined_within?(name, Base)
+ end
+
+ def method_defined_within?(name, klass, superklass = klass.superclass) # :nodoc:
+ if klass.method_defined?(name) || klass.private_method_defined?(name)
+ if superklass.method_defined?(name) || superklass.private_method_defined?(name)
+ klass.instance_method(name).owner != superklass.instance_method(name).owner
+ else
+ true
+ end
+ else
+ false
+ end
+ end
+
+ # A class method is 'dangerous' if it is already (re)defined by Active Record, but
+ # not by any ancestors. (So 'puts' is not dangerous but 'new' is.)
+ def dangerous_class_method?(method_name)
+ BLACKLISTED_CLASS_METHODS.include?(method_name.to_s) || class_method_defined_within?(method_name, Base)
+ end
+
+ def class_method_defined_within?(name, klass, superklass = klass.superclass) # :nodoc
+ if klass.respond_to?(name, true)
+ if superklass.respond_to?(name, true)
+ klass.method(name).owner != superklass.method(name).owner
+ else
+ true
+ end
+ else
+ false
+ end
+ end
+
+ # Returns +true+ if +attribute+ is an attribute method and table exists,
+ # +false+ otherwise.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # Person.attribute_method?('name') # => true
+ # Person.attribute_method?(:age=) # => true
+ # Person.attribute_method?(:nothing) # => false
+ def attribute_method?(attribute)
+ super || (table_exists? && column_names.include?(attribute.to_s.sub(/=$/, '')))
+ end
+
+ # Returns an array of column names as strings if it's not an abstract class and
+ # table exists. Otherwise it returns an empty array.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # Person.attribute_names
+ # # => ["id", "created_at", "updated_at", "name", "age"]
+ def attribute_names
+ @attribute_names ||= if !abstract_class? && table_exists?
+ column_names
+ else
+ []
+ end
+ end
+
+ # Returns the column object for the named attribute.
+ # Returns nil if the named attribute does not exist.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # person = Person.new
+ # person.column_for_attribute(:name) # the result depends on the ConnectionAdapter
+ # # => #<ActiveRecord::ConnectionAdapters::Column:0x007ff4ab083980 @name="name", @sql_type="varchar(255)", @null=true, ...>
+ #
+ # person.column_for_attribute(:nothing)
+ # # => nil
+ def column_for_attribute(name)
+ column = columns_hash[name.to_s]
+ if column.nil?
+ ActiveSupport::Deprecation.warn(<<-MESSAGE.strip_heredoc)
+ `column_for_attribute` will return a null object for non-existent columns
+ in Rails 5.0. Use `has_attribute?` if you need to check for an
+ attribute's existence.
+ MESSAGE
+ end
+ column
+ end
+ end
+
+ # A Person object with a name attribute can ask <tt>person.respond_to?(:name)</tt>,
+ # <tt>person.respond_to?(:name=)</tt>, and <tt>person.respond_to?(:name?)</tt>
+ # which will all return +true+. It also define the attribute methods if they have
+ # not been generated.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # person = Person.new
+ # person.respond_to(:name) # => true
+ # person.respond_to(:name=) # => true
+ # person.respond_to(:name?) # => true
+ # person.respond_to('age') # => true
+ # person.respond_to('age=') # => true
+ # person.respond_to('age?') # => true
+ # person.respond_to(:nothing) # => false
+ def respond_to?(name, include_private = false)
+ return false unless super
+ name = name.to_s
+
+ # If the result is true then check for the select case.
+ # For queries selecting a subset of columns, return false for unselected columns.
+ # We check defined?(@attributes) not to issue warnings if called on objects that
+ # have been allocated but not yet initialized.
+ if defined?(@attributes) && self.class.column_names.include?(name)
+ return has_attribute?(name)
+ end
+
+ return true
+ end
+
+ # Returns +true+ if the given attribute is in the attributes hash, otherwise +false+.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # person = Person.new
+ # person.has_attribute?(:name) # => true
+ # person.has_attribute?('age') # => true
+ # person.has_attribute?(:nothing) # => false
+ def has_attribute?(attr_name)
+ @attributes.key?(attr_name.to_s)
+ end
+
+ # Returns an array of names for the attributes available on this object.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # person = Person.new
+ # person.attribute_names
+ # # => ["id", "created_at", "updated_at", "name", "age"]
+ def attribute_names
+ @attributes.keys
+ end
+
+ # Returns a hash of all the attributes with their names as keys and the values of the attributes as values.
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # person = Person.create(name: 'Francesco', age: 22)
+ # person.attributes
+ # # => {"id"=>3, "created_at"=>Sun, 21 Oct 2012 04:53:04, "updated_at"=>Sun, 21 Oct 2012 04:53:04, "name"=>"Francesco", "age"=>22}
+ def attributes
+ @attributes.to_hash
+ end
+
+ # Returns an <tt>#inspect</tt>-like string for the value of the
+ # attribute +attr_name+. String attributes are truncated upto 50
+ # characters, Date and Time attributes are returned in the
+ # <tt>:db</tt> format, Array attributes are truncated upto 10 values.
+ # Other attributes return the value of <tt>#inspect</tt> without
+ # modification.
+ #
+ # person = Person.create!(name: 'David Heinemeier Hansson ' * 3)
+ #
+ # person.attribute_for_inspect(:name)
+ # # => "\"David Heinemeier Hansson David Heinemeier Hansson ...\""
+ #
+ # person.attribute_for_inspect(:created_at)
+ # # => "\"2012-10-22 00:15:07\""
+ #
+ # person.attribute_for_inspect(:tag_ids)
+ # # => "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, ...]"
+ def attribute_for_inspect(attr_name)
+ value = read_attribute(attr_name)
+
+ if value.is_a?(String) && value.length > 50
+ "#{value[0, 50]}...".inspect
+ elsif value.is_a?(Date) || value.is_a?(Time)
+ %("#{value.to_s(:db)}")
+ elsif value.is_a?(Array) && value.size > 10
+ inspected = value.first(10).inspect
+ %(#{inspected[0...-1]}, ...])
+ else
+ value.inspect
+ end
+ end
+
+ # Returns +true+ if the specified +attribute+ has been set by the user or by a
+ # database load and is neither +nil+ nor <tt>empty?</tt> (the latter only applies
+ # to objects that respond to <tt>empty?</tt>, most notably Strings). Otherwise, +false+.
+ # Note that it always returns +true+ with boolean attributes.
+ #
+ # class Task < ActiveRecord::Base
+ # end
+ #
+ # task = Task.new(title: '', is_done: false)
+ # task.attribute_present?(:title) # => false
+ # task.attribute_present?(:is_done) # => true
+ # task.title = 'Buy milk'
+ # task.is_done = true
+ # task.attribute_present?(:title) # => true
+ # task.attribute_present?(:is_done) # => true
+ def attribute_present?(attribute)
+ value = read_attribute(attribute)
+ !value.nil? && !(value.respond_to?(:empty?) && value.empty?)
+ end
+
+ # Returns the value of the attribute identified by <tt>attr_name</tt> after it has been typecast (for example,
+ # "2004-12-12" in a date column is cast to a date object, like Date.new(2004, 12, 12)). It raises
+ # <tt>ActiveModel::MissingAttributeError</tt> if the identified attribute is missing.
+ #
+ # Note: +:id+ is always present.
+ #
+ # Alias for the <tt>read_attribute</tt> method.
+ #
+ # class Person < ActiveRecord::Base
+ # belongs_to :organization
+ # end
+ #
+ # person = Person.new(name: 'Francesco', age: '22')
+ # person[:name] # => "Francesco"
+ # person[:age] # => 22
+ #
+ # person = Person.select('id').first
+ # person[:name] # => ActiveModel::MissingAttributeError: missing attribute: name
+ # person[:organization_id] # => ActiveModel::MissingAttributeError: missing attribute: organization_id
+ def [](attr_name)
+ read_attribute(attr_name) { |n| missing_attribute(n, caller) }
+ end
+
+ # Updates the attribute identified by <tt>attr_name</tt> with the specified +value+.
+ # (Alias for the protected <tt>write_attribute</tt> method).
+ #
+ # class Person < ActiveRecord::Base
+ # end
+ #
+ # person = Person.new
+ # person[:age] = '22'
+ # person[:age] # => 22
+ # person[:age] # => Fixnum
+ def []=(attr_name, value)
+ write_attribute(attr_name, value)
+ end
+
+ protected
+
+ def clone_attribute_value(reader_method, attribute_name) # :nodoc:
+ value = send(reader_method, attribute_name)
+ value.duplicable? ? value.clone : value
+ rescue TypeError, NoMethodError
+ value
+ end
+
+ def arel_attributes_with_values_for_create(attribute_names) # :nodoc:
+ arel_attributes_with_values(attributes_for_create(attribute_names))
+ end
+
+ def arel_attributes_with_values_for_update(attribute_names) # :nodoc:
+ arel_attributes_with_values(attributes_for_update(attribute_names))
+ end
+
+ def attribute_method?(attr_name) # :nodoc:
+ # We check defined? because Syck calls respond_to? before actually calling initialize.
+ defined?(@attributes) && @attributes.key?(attr_name)
+ end
+
+ private
+
+ # Returns a Hash of the Arel::Attributes and attribute values that have been
+ # typecasted for use in an Arel insert/update method.
+ def arel_attributes_with_values(attribute_names)
+ attrs = {}
+ arel_table = self.class.arel_table
+
+ attribute_names.each do |name|
+ attrs[arel_table[name]] = typecasted_attribute_value(name)
+ end
+ attrs
+ end
+
+ # Filters the primary keys and readonly attributes from the attribute names.
+ def attributes_for_update(attribute_names)
+ attribute_names.reject do |name|
+ readonly_attribute?(name)
+ end
+ end
+
+ # Filters out the primary keys, from the attribute names, when the primary
+ # key is to be generated (e.g. the id attribute has no value).
+ def attributes_for_create(attribute_names)
+ attribute_names.reject do |name|
+ pk_attribute?(name) && id.nil?
+ end
+ end
+
+ def readonly_attribute?(name)
+ self.class.readonly_attributes.include?(name)
+ end
+
+ def pk_attribute?(name)
+ name == self.class.primary_key
+ end
+
+ def typecasted_attribute_value(name)
+ read_attribute(name)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/before_type_cast.rb b/activerecord/lib/active_record/attribute_methods/before_type_cast.rb
new file mode 100644
index 0000000000..fd61febd57
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/before_type_cast.rb
@@ -0,0 +1,71 @@
+module ActiveRecord
+ module AttributeMethods
+ # = Active Record Attribute Methods Before Type Cast
+ #
+ # <tt>ActiveRecord::AttributeMethods::BeforeTypeCast</tt> provides a way to
+ # read the value of the attributes before typecasting and deserialization.
+ #
+ # class Task < ActiveRecord::Base
+ # end
+ #
+ # task = Task.new(id: '1', completed_on: '2012-10-21')
+ # task.id # => 1
+ # task.completed_on # => Sun, 21 Oct 2012
+ #
+ # task.attributes_before_type_cast
+ # # => {"id"=>"1", "completed_on"=>"2012-10-21", ... }
+ # task.read_attribute_before_type_cast('id') # => "1"
+ # task.read_attribute_before_type_cast('completed_on') # => "2012-10-21"
+ #
+ # In addition to #read_attribute_before_type_cast and #attributes_before_type_cast,
+ # it declares a method for all attributes with the <tt>*_before_type_cast</tt>
+ # suffix.
+ #
+ # task.id_before_type_cast # => "1"
+ # task.completed_on_before_type_cast # => "2012-10-21"
+ module BeforeTypeCast
+ extend ActiveSupport::Concern
+
+ included do
+ attribute_method_suffix "_before_type_cast"
+ end
+
+ # Returns the value of the attribute identified by +attr_name+ before
+ # typecasting and deserialization.
+ #
+ # class Task < ActiveRecord::Base
+ # end
+ #
+ # task = Task.new(id: '1', completed_on: '2012-10-21')
+ # task.read_attribute('id') # => 1
+ # task.read_attribute_before_type_cast('id') # => '1'
+ # task.read_attribute('completed_on') # => Sun, 21 Oct 2012
+ # task.read_attribute_before_type_cast('completed_on') # => "2012-10-21"
+ # task.read_attribute_before_type_cast(:completed_on) # => "2012-10-21"
+ def read_attribute_before_type_cast(attr_name)
+ @attributes[attr_name.to_s].value_before_type_cast
+ end
+
+ # Returns a hash of attributes before typecasting and deserialization.
+ #
+ # class Task < ActiveRecord::Base
+ # end
+ #
+ # task = Task.new(title: nil, is_done: true, completed_on: '2012-10-21')
+ # task.attributes
+ # # => {"id"=>nil, "title"=>nil, "is_done"=>true, "completed_on"=>Sun, 21 Oct 2012, "created_at"=>nil, "updated_at"=>nil}
+ # task.attributes_before_type_cast
+ # # => {"id"=>nil, "title"=>nil, "is_done"=>true, "completed_on"=>"2012-10-21", "created_at"=>nil, "updated_at"=>nil}
+ def attributes_before_type_cast
+ @attributes.values_before_type_cast
+ end
+
+ private
+
+ # Handle *_before_type_cast for method_missing.
+ def attribute_before_type_cast(attribute_name)
+ read_attribute_before_type_cast(attribute_name)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/dirty.rb b/activerecord/lib/active_record/attribute_methods/dirty.rb
new file mode 100644
index 0000000000..b58295a106
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/dirty.rb
@@ -0,0 +1,167 @@
+require 'active_support/core_ext/module/attribute_accessors'
+
+module ActiveRecord
+ module AttributeMethods
+ module Dirty # :nodoc:
+ extend ActiveSupport::Concern
+
+ include ActiveModel::Dirty
+
+ included do
+ if self < ::ActiveRecord::Timestamp
+ raise "You cannot include Dirty after Timestamp"
+ end
+
+ class_attribute :partial_writes, instance_writer: false
+ self.partial_writes = true
+ end
+
+ # Attempts to +save+ the record and clears changed attributes if successful.
+ def save(*)
+ if status = super
+ changes_applied
+ end
+ status
+ end
+
+ # Attempts to <tt>save!</tt> the record and clears changed attributes if successful.
+ def save!(*)
+ super.tap do
+ changes_applied
+ end
+ end
+
+ # <tt>reload</tt> the record and clears changed attributes.
+ def reload(*)
+ super.tap do
+ clear_changes_information
+ end
+ end
+
+ def initialize_dup(other) # :nodoc:
+ super
+ calculate_changes_from_defaults
+ end
+
+ def changed?
+ super || changed_in_place.any?
+ end
+
+ def changed
+ super | changed_in_place
+ end
+
+ def attribute_changed?(attr_name, options = {})
+ result = super
+ # We can't change "from" something in place. Only setters can define
+ # "from" and "to"
+ result ||= changed_in_place?(attr_name) unless options.key?(:from)
+ result
+ end
+
+ def changes_applied
+ super
+ store_original_raw_attributes
+ end
+
+ def clear_changes_information
+ super
+ original_raw_attributes.clear
+ end
+
+ private
+
+ def calculate_changes_from_defaults
+ @changed_attributes = nil
+ self.class.column_defaults.each do |attr, orig_value|
+ changed_attributes[attr] = orig_value if _field_changed?(attr, orig_value)
+ end
+ end
+
+ # Wrap write_attribute to remember original attribute value.
+ def write_attribute(attr, value)
+ attr = attr.to_s
+
+ old_value = old_attribute_value(attr)
+
+ result = super
+ store_original_raw_attribute(attr)
+ save_changed_attribute(attr, old_value)
+ result
+ end
+
+ def raw_write_attribute(attr, value)
+ attr = attr.to_s
+
+ result = super
+ original_raw_attributes[attr] = value
+ result
+ end
+
+ def save_changed_attribute(attr, old_value)
+ if attribute_changed?(attr)
+ changed_attributes.delete(attr) unless _field_changed?(attr, old_value)
+ else
+ changed_attributes[attr] = old_value if _field_changed?(attr, old_value)
+ end
+ end
+
+ def old_attribute_value(attr)
+ if attribute_changed?(attr)
+ changed_attributes[attr]
+ else
+ clone_attribute_value(:read_attribute, attr)
+ end
+ end
+
+ def _update_record(*)
+ partial_writes? ? super(keys_for_partial_write) : super
+ end
+
+ def _create_record(*)
+ partial_writes? ? super(keys_for_partial_write) : super
+ end
+
+ # Serialized attributes should always be written in case they've been
+ # changed in place.
+ def keys_for_partial_write
+ changed
+ end
+
+ def _field_changed?(attr, old_value)
+ @attributes[attr].changed_from?(old_value)
+ end
+
+ def changed_in_place
+ self.class.attribute_names.select do |attr_name|
+ changed_in_place?(attr_name)
+ end
+ end
+
+ def changed_in_place?(attr_name)
+ old_value = original_raw_attribute(attr_name)
+ @attributes[attr_name].changed_in_place_from?(old_value)
+ end
+
+ def original_raw_attribute(attr_name)
+ original_raw_attributes.fetch(attr_name) do
+ read_attribute_before_type_cast(attr_name)
+ end
+ end
+
+ def original_raw_attributes
+ @original_raw_attributes ||= {}
+ end
+
+ def store_original_raw_attribute(attr_name)
+ original_raw_attributes[attr_name] = @attributes[attr_name].value_for_database
+ end
+
+ def store_original_raw_attributes
+ attribute_names.each do |attr|
+ store_original_raw_attribute(attr)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/primary_key.rb b/activerecord/lib/active_record/attribute_methods/primary_key.rb
new file mode 100644
index 0000000000..9bd333bbac
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/primary_key.rb
@@ -0,0 +1,127 @@
+require 'set'
+
+module ActiveRecord
+ module AttributeMethods
+ module PrimaryKey
+ extend ActiveSupport::Concern
+
+ # Returns this record's primary key value wrapped in an Array if one is
+ # available.
+ def to_key
+ sync_with_transaction_state
+ key = self.id
+ [key] if key
+ end
+
+ # Returns the primary key value.
+ def id
+ if pk = self.class.primary_key
+ sync_with_transaction_state
+ read_attribute(pk)
+ end
+ end
+
+ # Sets the primary key value.
+ def id=(value)
+ sync_with_transaction_state
+ write_attribute(self.class.primary_key, value) if self.class.primary_key
+ end
+
+ # Queries the primary key value.
+ def id?
+ sync_with_transaction_state
+ query_attribute(self.class.primary_key)
+ end
+
+ # Returns the primary key value before type cast.
+ def id_before_type_cast
+ sync_with_transaction_state
+ read_attribute_before_type_cast(self.class.primary_key)
+ end
+
+ # Returns the primary key previous value.
+ def id_was
+ sync_with_transaction_state
+ attribute_was(self.class.primary_key)
+ end
+
+ protected
+
+ def attribute_method?(attr_name)
+ attr_name == 'id' || super
+ end
+
+ module ClassMethods
+ def define_method_attribute(attr_name)
+ super
+
+ if attr_name == primary_key && attr_name != 'id'
+ generated_attribute_methods.send(:alias_method, :id, primary_key)
+ end
+ end
+
+ ID_ATTRIBUTE_METHODS = %w(id id= id? id_before_type_cast id_was).to_set
+
+ def dangerous_attribute_method?(method_name)
+ super && !ID_ATTRIBUTE_METHODS.include?(method_name)
+ end
+
+ # Defines the primary key field -- can be overridden in subclasses.
+ # Overwriting will negate any effect of the +primary_key_prefix_type+
+ # setting, though.
+ def primary_key
+ @primary_key = reset_primary_key unless defined? @primary_key
+ @primary_key
+ end
+
+ # Returns a quoted version of the primary key name, used to construct
+ # SQL statements.
+ def quoted_primary_key
+ @quoted_primary_key ||= connection.quote_column_name(primary_key)
+ end
+
+ def reset_primary_key #:nodoc:
+ if self == base_class
+ self.primary_key = get_primary_key(base_class.name)
+ else
+ self.primary_key = base_class.primary_key
+ end
+ end
+
+ def get_primary_key(base_name) #:nodoc:
+ if base_name && primary_key_prefix_type == :table_name
+ base_name.foreign_key(false)
+ elsif base_name && primary_key_prefix_type == :table_name_with_underscore
+ base_name.foreign_key
+ else
+ if ActiveRecord::Base != self && table_exists?
+ connection.schema_cache.primary_keys(table_name)
+ else
+ 'id'
+ end
+ end
+ end
+
+ # Sets the name of the primary key column.
+ #
+ # class Project < ActiveRecord::Base
+ # self.primary_key = 'sysid'
+ # end
+ #
+ # You can also define the +primary_key+ method yourself:
+ #
+ # class Project < ActiveRecord::Base
+ # def self.primary_key
+ # 'foo_' + super
+ # end
+ # end
+ #
+ # Project.primary_key # => "foo_id"
+ def primary_key=(value)
+ @primary_key = value && value.to_s
+ @quoted_primary_key = nil
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/query.rb b/activerecord/lib/active_record/attribute_methods/query.rb
new file mode 100644
index 0000000000..0f9723febb
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/query.rb
@@ -0,0 +1,40 @@
+module ActiveRecord
+ module AttributeMethods
+ module Query
+ extend ActiveSupport::Concern
+
+ included do
+ attribute_method_suffix "?"
+ end
+
+ def query_attribute(attr_name)
+ value = read_attribute(attr_name) { |n| missing_attribute(n, caller) }
+
+ case value
+ when true then true
+ when false, nil then false
+ else
+ column = self.class.columns_hash[attr_name]
+ if column.nil?
+ if Numeric === value || value !~ /[^0-9]/
+ !value.to_i.zero?
+ else
+ return false if ActiveRecord::ConnectionAdapters::Column::FALSE_VALUES.include?(value)
+ !value.blank?
+ end
+ elsif column.number?
+ !value.zero?
+ else
+ !value.blank?
+ end
+ end
+ end
+
+ private
+ # Handle *? for method_missing.
+ def attribute?(attribute_name)
+ query_attribute(attribute_name)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/read.rb b/activerecord/lib/active_record/attribute_methods/read.rb
new file mode 100644
index 0000000000..10869dfc1e
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/read.rb
@@ -0,0 +1,97 @@
+require 'active_support/core_ext/module/method_transplanting'
+
+module ActiveRecord
+ module AttributeMethods
+ module Read
+ ReaderMethodCache = Class.new(AttributeMethodCache) {
+ private
+ # We want to generate the methods via module_eval rather than
+ # define_method, because define_method is slower on dispatch.
+ # Evaluating many similar methods may use more memory as the instruction
+ # sequences are duplicated and cached (in MRI). define_method may
+ # be slower on dispatch, but if you're careful about the closure
+ # created, then define_method will consume much less memory.
+ #
+ # But sometimes the database might return columns with
+ # characters that are not allowed in normal method names (like
+ # 'my_column(omg)'. So to work around this we first define with
+ # the __temp__ identifier, and then use alias method to rename
+ # it to what we want.
+ #
+ # We are also defining a constant to hold the frozen string of
+ # the attribute name. Using a constant means that we do not have
+ # to allocate an object on each call to the attribute method.
+ # Making it frozen means that it doesn't get duped when used to
+ # key the @attributes in read_attribute.
+ def method_body(method_name, const_name)
+ <<-EOMETHOD
+ def #{method_name}
+ name = ::ActiveRecord::AttributeMethods::AttrNames::ATTR_#{const_name}
+ read_attribute(name) { |n| missing_attribute(n, caller) }
+ end
+ EOMETHOD
+ end
+ }.new
+
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ [:cache_attributes, :cached_attributes, :cache_attribute?].each do |method_name|
+ define_method method_name do |*|
+ cached_attributes_deprecation_warning(method_name)
+ true
+ end
+ end
+
+ protected
+
+ def cached_attributes_deprecation_warning(method_name)
+ ActiveSupport::Deprecation.warn(<<-MESSAGE.strip_heredoc)
+ Calling `#{method_name}` is no longer necessary. All attributes are cached.
+ MESSAGE
+ end
+
+ if Module.methods_transplantable?
+ def define_method_attribute(name)
+ method = ReaderMethodCache[name]
+ generated_attribute_methods.module_eval { define_method name, method }
+ end
+ else
+ def define_method_attribute(name)
+ safe_name = name.unpack('h*').first
+ temp_method = "__temp__#{safe_name}"
+
+ ActiveRecord::AttributeMethods::AttrNames.set_name_cache safe_name, name
+
+ generated_attribute_methods.module_eval <<-STR, __FILE__, __LINE__ + 1
+ def #{temp_method}
+ name = ::ActiveRecord::AttributeMethods::AttrNames::ATTR_#{safe_name}
+ read_attribute(name) { |n| missing_attribute(n, caller) }
+ end
+ STR
+
+ generated_attribute_methods.module_eval do
+ alias_method name, temp_method
+ undef_method temp_method
+ end
+ end
+ end
+ end
+
+ # Returns the value of the attribute identified by <tt>attr_name</tt> after
+ # it has been typecast (for example, "2004-12-12" in a date column is cast
+ # to a date object, like Date.new(2004, 12, 12)).
+ def read_attribute(attr_name, &block)
+ name = attr_name.to_s
+ name = self.class.primary_key if name == 'id'
+ @attributes.fetch_value(name, &block)
+ end
+
+ private
+
+ def attribute(attribute_name)
+ read_attribute(attribute_name)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/serialization.rb b/activerecord/lib/active_record/attribute_methods/serialization.rb
new file mode 100644
index 0000000000..264ce2bdfa
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/serialization.rb
@@ -0,0 +1,70 @@
+module ActiveRecord
+ module AttributeMethods
+ module Serialization
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ # If you have an attribute that needs to be saved to the database as an
+ # object, and retrieved as the same object, then specify the name of that
+ # attribute using this method and it will be handled automatically. The
+ # serialization is done through YAML. If +class_name+ is specified, the
+ # serialized object must be of that class on retrieval or
+ # <tt>SerializationTypeMismatch</tt> will be raised.
+ #
+ # A notable side effect of serialized attributes is that the model will
+ # be updated on every save, even if it is not dirty.
+ #
+ # ==== Parameters
+ #
+ # * +attr_name+ - The field name that should be serialized.
+ # * +class_name_or_coder+ - Optional, a coder object, which responds to `.load` / `.dump`
+ # or a class name that the object type should be equal to.
+ #
+ # ==== Example
+ #
+ # # Serialize a preferences attribute.
+ # class User < ActiveRecord::Base
+ # serialize :preferences
+ # end
+ #
+ # # Serialize preferences using JSON as coder.
+ # class User < ActiveRecord::Base
+ # serialize :preferences, JSON
+ # end
+ #
+ # # Serialize preferences as Hash using YAML coder.
+ # class User < ActiveRecord::Base
+ # serialize :preferences, Hash
+ # end
+ def serialize(attr_name, class_name_or_coder = Object)
+ # When ::JSON is used, force it to go through the Active Support JSON encoder
+ # to ensure special objects (e.g. Active Record models) are dumped correctly
+ # using the #as_json hook.
+ coder = if class_name_or_coder == ::JSON
+ Coders::JSON
+ elsif [:load, :dump].all? { |x| class_name_or_coder.respond_to?(x) }
+ class_name_or_coder
+ else
+ Coders::YAMLColumn.new(class_name_or_coder)
+ end
+
+ decorate_attribute_type(attr_name, :serialize) do |type|
+ Type::Serialized.new(type, coder)
+ end
+ end
+
+ def serialized_attributes
+ ActiveSupport::Deprecation.warn(<<-WARNING.strip_heredoc)
+ `serialized_attributes` is deprecated without replacement, and will
+ be removed in Rails 5.0.
+ WARNING
+ @serialized_attributes ||= Hash[
+ columns.select { |t| t.cast_type.is_a?(Type::Serialized) }.map { |c|
+ [c.name, c.cast_type.coder]
+ }
+ ]
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb b/activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb
new file mode 100644
index 0000000000..f439bd1ffe
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/time_zone_conversion.rb
@@ -0,0 +1,63 @@
+module ActiveRecord
+ module AttributeMethods
+ module TimeZoneConversion
+ class TimeZoneConverter < SimpleDelegator # :nodoc:
+ def type_cast_from_database(value)
+ convert_time_to_time_zone(super)
+ end
+
+ def type_cast_from_user(value)
+ if value.is_a?(Array)
+ value.map { |v| type_cast_from_user(v) }
+ elsif value.respond_to?(:in_time_zone)
+ value.in_time_zone
+ end
+ end
+
+ def convert_time_to_time_zone(value)
+ if value.is_a?(Array)
+ value.map { |v| convert_time_to_time_zone(v) }
+ elsif value.acts_like?(:time)
+ value.in_time_zone
+ else
+ value
+ end
+ end
+ end
+
+ extend ActiveSupport::Concern
+
+ included do
+ mattr_accessor :time_zone_aware_attributes, instance_writer: false
+ self.time_zone_aware_attributes = false
+
+ class_attribute :skip_time_zone_conversion_for_attributes, instance_writer: false
+ self.skip_time_zone_conversion_for_attributes = []
+ end
+
+ module ClassMethods
+ private
+
+ def inherited(subclass)
+ # We need to apply this decorator here, rather than on module inclusion. The closure
+ # created by the matcher would otherwise evaluate for `ActiveRecord::Base`, not the
+ # sub class being decorated. As such, changes to `time_zone_aware_attributes`, or
+ # `skip_time_zone_conversion_for_attributes` would not be picked up.
+ subclass.class_eval do
+ matcher = ->(name, type) { create_time_zone_conversion_attribute?(name, type) }
+ decorate_matching_attribute_types(matcher, :_time_zone_conversion) do |type|
+ TimeZoneConverter.new(type)
+ end
+ end
+ super
+ end
+
+ def create_time_zone_conversion_attribute?(name, cast_type)
+ time_zone_aware_attributes &&
+ !self.skip_time_zone_conversion_for_attributes.include?(name.to_sym) &&
+ (:datetime == cast_type.type)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_methods/write.rb b/activerecord/lib/active_record/attribute_methods/write.rb
new file mode 100644
index 0000000000..b3c8209a74
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_methods/write.rb
@@ -0,0 +1,83 @@
+require 'active_support/core_ext/module/method_transplanting'
+
+module ActiveRecord
+ module AttributeMethods
+ module Write
+ WriterMethodCache = Class.new(AttributeMethodCache) {
+ private
+
+ def method_body(method_name, const_name)
+ <<-EOMETHOD
+ def #{method_name}(value)
+ name = ::ActiveRecord::AttributeMethods::AttrNames::ATTR_#{const_name}
+ write_attribute(name, value)
+ end
+ EOMETHOD
+ end
+ }.new
+
+ extend ActiveSupport::Concern
+
+ included do
+ attribute_method_suffix "="
+ end
+
+ module ClassMethods
+ protected
+
+ if Module.methods_transplantable?
+ def define_method_attribute=(name)
+ method = WriterMethodCache[name]
+ generated_attribute_methods.module_eval {
+ define_method "#{name}=", method
+ }
+ end
+ else
+ def define_method_attribute=(name)
+ safe_name = name.unpack('h*').first
+ ActiveRecord::AttributeMethods::AttrNames.set_name_cache safe_name, name
+
+ generated_attribute_methods.module_eval <<-STR, __FILE__, __LINE__ + 1
+ def __temp__#{safe_name}=(value)
+ name = ::ActiveRecord::AttributeMethods::AttrNames::ATTR_#{safe_name}
+ write_attribute(name, value)
+ end
+ alias_method #{(name + '=').inspect}, :__temp__#{safe_name}=
+ undef_method :__temp__#{safe_name}=
+ STR
+ end
+ end
+ end
+
+ # Updates the attribute identified by <tt>attr_name</tt> with the
+ # specified +value+. Empty strings for fixnum and float columns are
+ # turned into +nil+.
+ def write_attribute(attr_name, value)
+ write_attribute_with_type_cast(attr_name, value, true)
+ end
+
+ def raw_write_attribute(attr_name, value)
+ write_attribute_with_type_cast(attr_name, value, false)
+ end
+
+ private
+ # Handle *= for method_missing.
+ def attribute=(attribute_name, value)
+ write_attribute(attribute_name, value)
+ end
+
+ def write_attribute_with_type_cast(attr_name, value, should_type_cast)
+ attr_name = attr_name.to_s
+ attr_name = self.class.primary_key if attr_name == 'id' && self.class.primary_key
+
+ if should_type_cast
+ @attributes.write_from_user(attr_name, value)
+ else
+ @attributes.write_from_database(attr_name, value)
+ end
+
+ value
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_set.rb b/activerecord/lib/active_record/attribute_set.rb
new file mode 100644
index 0000000000..98ac63c7e1
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_set.rb
@@ -0,0 +1,77 @@
+require 'active_record/attribute_set/builder'
+
+module ActiveRecord
+ class AttributeSet # :nodoc:
+ delegate :keys, to: :initialized_attributes
+
+ def initialize(attributes)
+ @attributes = attributes
+ end
+
+ def [](name)
+ attributes[name] || Attribute.null(name)
+ end
+
+ def values_before_type_cast
+ attributes.transform_values(&:value_before_type_cast)
+ end
+
+ def to_hash
+ initialized_attributes.transform_values(&:value)
+ end
+ alias_method :to_h, :to_hash
+
+ def key?(name)
+ attributes.key?(name) && self[name].initialized?
+ end
+
+ def fetch_value(name, &block)
+ self[name].value(&block)
+ end
+
+ def write_from_database(name, value)
+ attributes[name] = self[name].with_value_from_database(value)
+ end
+
+ def write_from_user(name, value)
+ attributes[name] = self[name].with_value_from_user(value)
+ end
+
+ def freeze
+ @attributes.freeze
+ super
+ end
+
+ def initialize_dup(_)
+ @attributes = attributes.transform_values(&:dup)
+ super
+ end
+
+ def initialize_clone(_)
+ @attributes = attributes.clone
+ super
+ end
+
+ def reset(key)
+ if key?(key)
+ write_from_database(key, nil)
+ end
+ end
+
+ def ensure_initialized(key)
+ unless self[key].initialized?
+ write_from_database(key, nil)
+ end
+ end
+
+ protected
+
+ attr_reader :attributes
+
+ private
+
+ def initialized_attributes
+ attributes.select { |_, attr| attr.initialized? }
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attribute_set/builder.rb b/activerecord/lib/active_record/attribute_set/builder.rb
new file mode 100644
index 0000000000..1e146a07da
--- /dev/null
+++ b/activerecord/lib/active_record/attribute_set/builder.rb
@@ -0,0 +1,32 @@
+module ActiveRecord
+ class AttributeSet # :nodoc:
+ class Builder # :nodoc:
+ attr_reader :types
+
+ def initialize(types)
+ @types = types
+ end
+
+ def build_from_database(values = {}, additional_types = {})
+ attributes = build_attributes_from_values(values, additional_types)
+ add_uninitialized_attributes(attributes)
+ AttributeSet.new(attributes)
+ end
+
+ private
+
+ def build_attributes_from_values(values, additional_types)
+ values.each_with_object({}) do |(name, value), hash|
+ type = additional_types.fetch(name, types[name])
+ hash[name] = Attribute.from_database(name, value, type)
+ end
+ end
+
+ def add_uninitialized_attributes(attributes)
+ types.except(*attributes.keys).each do |name, type|
+ attributes[name] = Attribute.uninitialized(name, type)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/attributes.rb b/activerecord/lib/active_record/attributes.rb
new file mode 100644
index 0000000000..890a1314d9
--- /dev/null
+++ b/activerecord/lib/active_record/attributes.rb
@@ -0,0 +1,122 @@
+module ActiveRecord
+ module Attributes # :nodoc:
+ extend ActiveSupport::Concern
+
+ Type = ActiveRecord::Type
+
+ included do
+ class_attribute :user_provided_columns, instance_accessor: false # :internal:
+ self.user_provided_columns = {}
+ end
+
+ module ClassMethods # :nodoc:
+ # Defines or overrides a attribute on this model. This allows customization of
+ # Active Record's type casting behavior, as well as adding support for user defined
+ # types.
+ #
+ # +name+ The name of the methods to define attribute methods for, and the column which
+ # this will persist to.
+ #
+ # +cast_type+ A type object that contains information about how to type cast the value.
+ # See the examples section for more information.
+ #
+ # ==== Options
+ # The options hash accepts the following options:
+ #
+ # +default+ is the default value that the column should use on a new record.
+ #
+ # ==== Examples
+ #
+ # The type detected by Active Record can be overridden.
+ #
+ # # db/schema.rb
+ # create_table :store_listings, force: true do |t|
+ # t.decimal :price_in_cents
+ # end
+ #
+ # # app/models/store_listing.rb
+ # class StoreListing < ActiveRecord::Base
+ # end
+ #
+ # store_listing = StoreListing.new(price_in_cents: '10.1')
+ #
+ # # before
+ # store_listing.price_in_cents # => BigDecimal.new(10.1)
+ #
+ # class StoreListing < ActiveRecord::Base
+ # attribute :price_in_cents, Type::Integer.new
+ # end
+ #
+ # # after
+ # store_listing.price_in_cents # => 10
+ #
+ # Users may also define their own custom types, as long as they respond to the methods
+ # defined on the value type. The `type_cast` method on your type object will be called
+ # with values both from the database, and from your controllers. See
+ # `ActiveRecord::Attributes::Type::Value` for the expected API. It is recommended that your
+ # type objects inherit from an existing type, or the base value type.
+ #
+ # class MoneyType < ActiveRecord::Type::Integer
+ # def type_cast(value)
+ # if value.include?('$')
+ # price_in_dollars = value.gsub(/\$/, '').to_f
+ # price_in_dollars * 100
+ # else
+ # value.to_i
+ # end
+ # end
+ # end
+ #
+ # class StoreListing < ActiveRecord::Base
+ # attribute :price_in_cents, MoneyType.new
+ # end
+ #
+ # store_listing = StoreListing.new(price_in_cents: '$10.00')
+ # store_listing.price_in_cents # => 1000
+ def attribute(name, cast_type, options = {})
+ name = name.to_s
+ clear_caches_calculated_from_columns
+ # Assign a new hash to ensure that subclasses do not share a hash
+ self.user_provided_columns = user_provided_columns.merge(name => connection.new_column(name, options[:default], cast_type))
+ end
+
+ # Returns an array of column objects for the table associated with this class.
+ def columns
+ @columns ||= add_user_provided_columns(connection.schema_cache.columns(table_name))
+ end
+
+ # Returns a hash of column objects for the table associated with this class.
+ def columns_hash
+ @columns_hash ||= Hash[columns.map { |c| [c.name, c] }]
+ end
+
+ def reset_column_information # :nodoc:
+ super
+ clear_caches_calculated_from_columns
+ end
+
+ private
+
+ def add_user_provided_columns(schema_columns)
+ existing_columns = schema_columns.map do |column|
+ user_provided_columns[column.name] || column
+ end
+
+ existing_column_names = existing_columns.map(&:name)
+ new_columns = user_provided_columns.except(*existing_column_names).values
+
+ existing_columns + new_columns
+ end
+
+ def clear_caches_calculated_from_columns
+ @attributes_builder = nil
+ @column_names = nil
+ @column_types = nil
+ @columns = nil
+ @columns_hash = nil
+ @content_columns = nil
+ @default_attributes = nil
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/autosave_association.rb b/activerecord/lib/active_record/autosave_association.rb
new file mode 100644
index 0000000000..dd92e29199
--- /dev/null
+++ b/activerecord/lib/active_record/autosave_association.rb
@@ -0,0 +1,437 @@
+module ActiveRecord
+ # = Active Record Autosave Association
+ #
+ # +AutosaveAssociation+ is a module that takes care of automatically saving
+ # associated records when their parent is saved. In addition to saving, it
+ # also destroys any associated records that were marked for destruction.
+ # (See +mark_for_destruction+ and <tt>marked_for_destruction?</tt>).
+ #
+ # Saving of the parent, its associations, and the destruction of marked
+ # associations, all happen inside a transaction. This should never leave the
+ # database in an inconsistent state.
+ #
+ # If validations for any of the associations fail, their error messages will
+ # be applied to the parent.
+ #
+ # Note that it also means that associations marked for destruction won't
+ # be destroyed directly. They will however still be marked for destruction.
+ #
+ # Note that <tt>autosave: false</tt> is not same as not declaring <tt>:autosave</tt>.
+ # When the <tt>:autosave</tt> option is not present then new association records are
+ # saved but the updated association records are not saved.
+ #
+ # == Validation
+ #
+ # Children records are validated unless <tt>:validate</tt> is +false+.
+ #
+ # == Callbacks
+ #
+ # Association with autosave option defines several callbacks on your
+ # model (before_save, after_create, after_update). Please note that
+ # callbacks are executed in the order they were defined in
+ # model. You should avoid modifying the association content, before
+ # autosave callbacks are executed. Placing your callbacks after
+ # associations is usually a good practice.
+ #
+ # === One-to-one Example
+ #
+ # class Post < ActiveRecord::Base
+ # has_one :author, autosave: true
+ # end
+ #
+ # Saving changes to the parent and its associated model can now be performed
+ # automatically _and_ atomically:
+ #
+ # post = Post.find(1)
+ # post.title # => "The current global position of migrating ducks"
+ # post.author.name # => "alloy"
+ #
+ # post.title = "On the migration of ducks"
+ # post.author.name = "Eloy Duran"
+ #
+ # post.save
+ # post.reload
+ # post.title # => "On the migration of ducks"
+ # post.author.name # => "Eloy Duran"
+ #
+ # Destroying an associated model, as part of the parent's save action, is as
+ # simple as marking it for destruction:
+ #
+ # post.author.mark_for_destruction
+ # post.author.marked_for_destruction? # => true
+ #
+ # Note that the model is _not_ yet removed from the database:
+ #
+ # id = post.author.id
+ # Author.find_by(id: id).nil? # => false
+ #
+ # post.save
+ # post.reload.author # => nil
+ #
+ # Now it _is_ removed from the database:
+ #
+ # Author.find_by(id: id).nil? # => true
+ #
+ # === One-to-many Example
+ #
+ # When <tt>:autosave</tt> is not declared new children are saved when their parent is saved:
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :comments # :autosave option is not declared
+ # end
+ #
+ # post = Post.new(title: 'ruby rocks')
+ # post.comments.build(body: 'hello world')
+ # post.save # => saves both post and comment
+ #
+ # post = Post.create(title: 'ruby rocks')
+ # post.comments.build(body: 'hello world')
+ # post.save # => saves both post and comment
+ #
+ # post = Post.create(title: 'ruby rocks')
+ # post.comments.create(body: 'hello world')
+ # post.save # => saves both post and comment
+ #
+ # When <tt>:autosave</tt> is true all children are saved, no matter whether they
+ # are new records or not:
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :comments, autosave: true
+ # end
+ #
+ # post = Post.create(title: 'ruby rocks')
+ # post.comments.create(body: 'hello world')
+ # post.comments[0].body = 'hi everyone'
+ # post.comments.build(body: "good morning.")
+ # post.title += "!"
+ # post.save # => saves both post and comments.
+ #
+ # Destroying one of the associated models as part of the parent's save action
+ # is as simple as marking it for destruction:
+ #
+ # post.comments # => [#<Comment id: 1, ...>, #<Comment id: 2, ...]>
+ # post.comments[1].mark_for_destruction
+ # post.comments[1].marked_for_destruction? # => true
+ # post.comments.length # => 2
+ #
+ # Note that the model is _not_ yet removed from the database:
+ #
+ # id = post.comments.last.id
+ # Comment.find_by(id: id).nil? # => false
+ #
+ # post.save
+ # post.reload.comments.length # => 1
+ #
+ # Now it _is_ removed from the database:
+ #
+ # Comment.find_by(id: id).nil? # => true
+
+ module AutosaveAssociation
+ extend ActiveSupport::Concern
+
+ module AssociationBuilderExtension #:nodoc:
+ def self.build(model, reflection)
+ model.send(:add_autosave_association_callbacks, reflection)
+ end
+
+ def self.valid_options
+ [ :autosave ]
+ end
+ end
+
+ included do
+ Associations::Builder::Association.extensions << AssociationBuilderExtension
+ end
+
+ module ClassMethods
+ private
+
+ def define_non_cyclic_method(name, &block)
+ return if method_defined?(name)
+ define_method(name) do |*args|
+ result = true; @_already_called ||= {}
+ # Loop prevention for validation of associations
+ unless @_already_called[name]
+ begin
+ @_already_called[name]=true
+ result = instance_eval(&block)
+ ensure
+ @_already_called[name]=false
+ end
+ end
+
+ result
+ end
+ end
+
+ # Adds validation and save callbacks for the association as specified by
+ # the +reflection+.
+ #
+ # For performance reasons, we don't check whether to validate at runtime.
+ # However the validation and callback methods are lazy and those methods
+ # get created when they are invoked for the very first time. However,
+ # this can change, for instance, when using nested attributes, which is
+ # called _after_ the association has been defined. Since we don't want
+ # the callbacks to get defined multiple times, there are guards that
+ # check if the save or validation methods have already been defined
+ # before actually defining them.
+ def add_autosave_association_callbacks(reflection)
+ save_method = :"autosave_associated_records_for_#{reflection.name}"
+ validation_method = :"validate_associated_records_for_#{reflection.name}"
+ collection = reflection.collection?
+
+ if collection
+ before_save :before_save_collection_association
+
+ define_non_cyclic_method(save_method) { save_collection_association(reflection) }
+ after_save save_method
+ elsif reflection.has_one?
+ define_method(save_method) { save_has_one_association(reflection) } unless method_defined?(save_method)
+ # Configures two callbacks instead of a single after_save so that
+ # the model may rely on their execution order relative to its
+ # own callbacks.
+ #
+ # For example, given that after_creates run before after_saves, if
+ # we configured instead an after_save there would be no way to fire
+ # a custom after_create callback after the child association gets
+ # created.
+ after_create save_method
+ after_update save_method
+ else
+ define_non_cyclic_method(save_method) { save_belongs_to_association(reflection) }
+ before_save save_method
+ end
+
+ if reflection.validate? && !method_defined?(validation_method)
+ method = (collection ? :validate_collection_association : :validate_single_association)
+ define_non_cyclic_method(validation_method) { send(method, reflection) }
+ validate validation_method
+ end
+ end
+ end
+
+ # Reloads the attributes of the object as usual and clears <tt>marked_for_destruction</tt> flag.
+ def reload(options = nil)
+ @marked_for_destruction = false
+ @destroyed_by_association = nil
+ super
+ end
+
+ # Marks this record to be destroyed as part of the parents save transaction.
+ # This does _not_ actually destroy the record instantly, rather child record will be destroyed
+ # when <tt>parent.save</tt> is called.
+ #
+ # Only useful if the <tt>:autosave</tt> option on the parent is enabled for this associated model.
+ def mark_for_destruction
+ @marked_for_destruction = true
+ end
+
+ # Returns whether or not this record will be destroyed as part of the parents save transaction.
+ #
+ # Only useful if the <tt>:autosave</tt> option on the parent is enabled for this associated model.
+ def marked_for_destruction?
+ @marked_for_destruction
+ end
+
+ # Records the association that is being destroyed and destroying this
+ # record in the process.
+ def destroyed_by_association=(reflection)
+ @destroyed_by_association = reflection
+ end
+
+ # Returns the association for the parent being destroyed.
+ #
+ # Used to avoid updating the counter cache unnecessarily.
+ def destroyed_by_association
+ @destroyed_by_association
+ end
+
+ # Returns whether or not this record has been changed in any way (including whether
+ # any of its nested autosave associations are likewise changed)
+ def changed_for_autosave?
+ new_record? || changed? || marked_for_destruction? || nested_records_changed_for_autosave?
+ end
+
+ private
+
+ # Returns the record for an association collection that should be validated
+ # or saved. If +autosave+ is +false+ only new records will be returned,
+ # unless the parent is/was a new record itself.
+ def associated_records_to_validate_or_save(association, new_record, autosave)
+ if new_record
+ association && association.target
+ elsif autosave
+ association.target.find_all { |record| record.changed_for_autosave? }
+ else
+ association.target.find_all { |record| record.new_record? }
+ end
+ end
+
+ # go through nested autosave associations that are loaded in memory (without loading
+ # any new ones), and return true if is changed for autosave
+ def nested_records_changed_for_autosave?
+ self.class._reflections.values.any? do |reflection|
+ if reflection.options[:autosave]
+ association = association_instance_get(reflection.name)
+ association && Array.wrap(association.target).any? { |a| a.changed_for_autosave? }
+ end
+ end
+ end
+
+ # Validate the association if <tt>:validate</tt> or <tt>:autosave</tt> is
+ # turned on for the association.
+ def validate_single_association(reflection)
+ association = association_instance_get(reflection.name)
+ record = association && association.reader
+ association_valid?(reflection, record) if record
+ end
+
+ # Validate the associated records if <tt>:validate</tt> or
+ # <tt>:autosave</tt> is turned on for the association specified by
+ # +reflection+.
+ def validate_collection_association(reflection)
+ if association = association_instance_get(reflection.name)
+ if records = associated_records_to_validate_or_save(association, new_record?, reflection.options[:autosave])
+ records.each { |record| association_valid?(reflection, record) }
+ end
+ end
+ end
+
+ # Returns whether or not the association is valid and applies any errors to
+ # the parent, <tt>self</tt>, if it wasn't. Skips any <tt>:autosave</tt>
+ # enabled records if they're marked_for_destruction? or destroyed.
+ def association_valid?(reflection, record)
+ return true if record.destroyed? || record.marked_for_destruction?
+
+ validation_context = self.validation_context unless [:create, :update].include?(self.validation_context)
+ unless valid = record.valid?(validation_context)
+ if reflection.options[:autosave]
+ record.errors.each do |attribute, message|
+ attribute = "#{reflection.name}.#{attribute}"
+ errors[attribute] << message
+ errors[attribute].uniq!
+ end
+ else
+ errors.add(reflection.name)
+ end
+ end
+ valid
+ end
+
+ # Is used as a before_save callback to check while saving a collection
+ # association whether or not the parent was a new record before saving.
+ def before_save_collection_association
+ @new_record_before_save = new_record?
+ true
+ end
+
+ # Saves any new associated records, or all loaded autosave associations if
+ # <tt>:autosave</tt> is enabled on the association.
+ #
+ # In addition, it destroys all children that were marked for destruction
+ # with mark_for_destruction.
+ #
+ # This all happens inside a transaction, _if_ the Transactions module is included into
+ # ActiveRecord::Base after the AutosaveAssociation module, which it does by default.
+ def save_collection_association(reflection)
+ if association = association_instance_get(reflection.name)
+ autosave = reflection.options[:autosave]
+
+ if records = associated_records_to_validate_or_save(association, @new_record_before_save, autosave)
+
+ if autosave
+ records_to_destroy = records.select(&:marked_for_destruction?)
+ records_to_destroy.each { |record| association.destroy(record) }
+ records -= records_to_destroy
+ end
+
+ records.each do |record|
+ next if record.destroyed?
+
+ saved = true
+
+ if autosave != false && (@new_record_before_save || record.new_record?)
+ if autosave
+ saved = association.insert_record(record, false)
+ else
+ association.insert_record(record) unless reflection.nested?
+ end
+ elsif autosave
+ saved = record.save(:validate => false)
+ end
+
+ raise ActiveRecord::Rollback unless saved
+ end
+ @new_record_before_save = false
+ end
+
+ # reconstruct the scope now that we know the owner's id
+ association.reset_scope if association.respond_to?(:reset_scope)
+ end
+ end
+
+ # Saves the associated record if it's new or <tt>:autosave</tt> is enabled
+ # on the association.
+ #
+ # In addition, it will destroy the association if it was marked for
+ # destruction with mark_for_destruction.
+ #
+ # This all happens inside a transaction, _if_ the Transactions module is included into
+ # ActiveRecord::Base after the AutosaveAssociation module, which it does by default.
+ def save_has_one_association(reflection)
+ association = association_instance_get(reflection.name)
+ record = association && association.load_target
+
+ if record && !record.destroyed?
+ autosave = reflection.options[:autosave]
+
+ if autosave && record.marked_for_destruction?
+ record.destroy
+ elsif autosave != false
+ key = reflection.options[:primary_key] ? send(reflection.options[:primary_key]) : id
+
+ if (autosave && record.changed_for_autosave?) || new_record? || record_changed?(reflection, record, key)
+ unless reflection.through_reflection
+ record[reflection.foreign_key] = key
+ end
+
+ saved = record.save(:validate => !autosave)
+ raise ActiveRecord::Rollback if !saved && autosave
+ saved
+ end
+ end
+ end
+ end
+
+ # If the record is new or it has changed, returns true.
+ def record_changed?(reflection, record, key)
+ record.new_record? || record[reflection.foreign_key] != key || record.attribute_changed?(reflection.foreign_key)
+ end
+
+ # Saves the associated record if it's new or <tt>:autosave</tt> is enabled.
+ #
+ # In addition, it will destroy the association if it was marked for destruction.
+ def save_belongs_to_association(reflection)
+ association = association_instance_get(reflection.name)
+ record = association && association.load_target
+ if record && !record.destroyed?
+ autosave = reflection.options[:autosave]
+
+ if autosave && record.marked_for_destruction?
+ self[reflection.foreign_key] = nil
+ record.destroy
+ elsif autosave != false
+ saved = record.save(:validate => !autosave) if record.new_record? || (autosave && record.changed_for_autosave?)
+
+ if association.updated?
+ association_id = record.send(reflection.options[:primary_key] || :id)
+ self[reflection.foreign_key] = association_id
+ association.loaded!
+ end
+
+ saved if autosave
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/base.rb b/activerecord/lib/active_record/base.rb
new file mode 100644
index 0000000000..f978fbd0a4
--- /dev/null
+++ b/activerecord/lib/active_record/base.rb
@@ -0,0 +1,317 @@
+require 'yaml'
+require 'set'
+require 'active_support/benchmarkable'
+require 'active_support/dependencies'
+require 'active_support/descendants_tracker'
+require 'active_support/time'
+require 'active_support/core_ext/module/attribute_accessors'
+require 'active_support/core_ext/class/delegating_attributes'
+require 'active_support/core_ext/array/extract_options'
+require 'active_support/core_ext/hash/deep_merge'
+require 'active_support/core_ext/hash/slice'
+require 'active_support/core_ext/hash/transform_values'
+require 'active_support/core_ext/string/behavior'
+require 'active_support/core_ext/kernel/singleton_class'
+require 'active_support/core_ext/module/introspection'
+require 'active_support/core_ext/object/duplicable'
+require 'active_support/core_ext/class/subclasses'
+require 'arel'
+require 'active_record/attribute_decorators'
+require 'active_record/errors'
+require 'active_record/log_subscriber'
+require 'active_record/explain_subscriber'
+require 'active_record/relation/delegation'
+require 'active_record/attributes'
+
+module ActiveRecord #:nodoc:
+ # = Active Record
+ #
+ # Active Record objects don't specify their attributes directly, but rather infer them from
+ # the table definition with which they're linked. Adding, removing, and changing attributes
+ # and their type is done directly in the database. Any change is instantly reflected in the
+ # Active Record objects. The mapping that binds a given Active Record class to a certain
+ # database table will happen automatically in most common cases, but can be overwritten for the uncommon ones.
+ #
+ # See the mapping rules in table_name and the full example in link:files/activerecord/README_rdoc.html for more insight.
+ #
+ # == Creation
+ #
+ # Active Records accept constructor parameters either in a hash or as a block. The hash
+ # method is especially useful when you're receiving the data from somewhere else, like an
+ # HTTP request. It works like this:
+ #
+ # user = User.new(name: "David", occupation: "Code Artist")
+ # user.name # => "David"
+ #
+ # You can also use block initialization:
+ #
+ # user = User.new do |u|
+ # u.name = "David"
+ # u.occupation = "Code Artist"
+ # end
+ #
+ # And of course you can just create a bare object and specify the attributes after the fact:
+ #
+ # user = User.new
+ # user.name = "David"
+ # user.occupation = "Code Artist"
+ #
+ # == Conditions
+ #
+ # Conditions can either be specified as a string, array, or hash representing the WHERE-part of an SQL statement.
+ # The array form is to be used when the condition input is tainted and requires sanitization. The string form can
+ # be used for statements that don't involve tainted data. The hash form works much like the array form, except
+ # only equality and range is possible. Examples:
+ #
+ # class User < ActiveRecord::Base
+ # def self.authenticate_unsafely(user_name, password)
+ # where("user_name = '#{user_name}' AND password = '#{password}'").first
+ # end
+ #
+ # def self.authenticate_safely(user_name, password)
+ # where("user_name = ? AND password = ?", user_name, password).first
+ # end
+ #
+ # def self.authenticate_safely_simply(user_name, password)
+ # where(user_name: user_name, password: password).first
+ # end
+ # end
+ #
+ # The <tt>authenticate_unsafely</tt> method inserts the parameters directly into the query
+ # and is thus susceptible to SQL-injection attacks if the <tt>user_name</tt> and +password+
+ # parameters come directly from an HTTP request. The <tt>authenticate_safely</tt> and
+ # <tt>authenticate_safely_simply</tt> both will sanitize the <tt>user_name</tt> and +password+
+ # before inserting them in the query, which will ensure that an attacker can't escape the
+ # query and fake the login (or worse).
+ #
+ # When using multiple parameters in the conditions, it can easily become hard to read exactly
+ # what the fourth or fifth question mark is supposed to represent. In those cases, you can
+ # resort to named bind variables instead. That's done by replacing the question marks with
+ # symbols and supplying a hash with values for the matching symbol keys:
+ #
+ # Company.where(
+ # "id = :id AND name = :name AND division = :division AND created_at > :accounting_date",
+ # { id: 3, name: "37signals", division: "First", accounting_date: '2005-01-01' }
+ # ).first
+ #
+ # Similarly, a simple hash without a statement will generate conditions based on equality with the SQL AND
+ # operator. For instance:
+ #
+ # Student.where(first_name: "Harvey", status: 1)
+ # Student.where(params[:student])
+ #
+ # A range may be used in the hash to use the SQL BETWEEN operator:
+ #
+ # Student.where(grade: 9..12)
+ #
+ # An array may be used in the hash to use the SQL IN operator:
+ #
+ # Student.where(grade: [9,11,12])
+ #
+ # When joining tables, nested hashes or keys written in the form 'table_name.column_name'
+ # can be used to qualify the table name of a particular condition. For instance:
+ #
+ # Student.joins(:schools).where(schools: { category: 'public' })
+ # Student.joins(:schools).where('schools.category' => 'public' )
+ #
+ # == Overwriting default accessors
+ #
+ # All column values are automatically available through basic accessors on the Active Record
+ # object, but sometimes you want to specialize this behavior. This can be done by overwriting
+ # the default accessors (using the same name as the attribute) and calling
+ # <tt>read_attribute(attr_name)</tt> and <tt>write_attribute(attr_name, value)</tt> to actually
+ # change things.
+ #
+ # class Song < ActiveRecord::Base
+ # # Uses an integer of seconds to hold the length of the song
+ #
+ # def length=(minutes)
+ # write_attribute(:length, minutes.to_i * 60)
+ # end
+ #
+ # def length
+ # read_attribute(:length) / 60
+ # end
+ # end
+ #
+ # You can alternatively use <tt>self[:attribute]=(value)</tt> and <tt>self[:attribute]</tt>
+ # instead of <tt>write_attribute(:attribute, value)</tt> and <tt>read_attribute(:attribute)</tt>.
+ #
+ # == Attribute query methods
+ #
+ # In addition to the basic accessors, query methods are also automatically available on the Active Record object.
+ # Query methods allow you to test whether an attribute value is present.
+ # For numeric values, present is defined as non-zero.
+ #
+ # For example, an Active Record User with the <tt>name</tt> attribute has a <tt>name?</tt> method that you can call
+ # to determine whether the user has a name:
+ #
+ # user = User.new(name: "David")
+ # user.name? # => true
+ #
+ # anonymous = User.new(name: "")
+ # anonymous.name? # => false
+ #
+ # == Accessing attributes before they have been typecasted
+ #
+ # Sometimes you want to be able to read the raw attribute data without having the column-determined
+ # typecast run its course first. That can be done by using the <tt><attribute>_before_type_cast</tt>
+ # accessors that all attributes have. For example, if your Account model has a <tt>balance</tt> attribute,
+ # you can call <tt>account.balance_before_type_cast</tt> or <tt>account.id_before_type_cast</tt>.
+ #
+ # This is especially useful in validation situations where the user might supply a string for an
+ # integer field and you want to display the original string back in an error message. Accessing the
+ # attribute normally would typecast the string to 0, which isn't what you want.
+ #
+ # == Dynamic attribute-based finders
+ #
+ # Dynamic attribute-based finders are a mildly deprecated way of getting (and/or creating) objects
+ # by simple queries without turning to SQL. They work by appending the name of an attribute
+ # to <tt>find_by_</tt> like <tt>Person.find_by_user_name</tt>.
+ # Instead of writing <tt>Person.find_by(user_name: user_name)</tt>, you can use
+ # <tt>Person.find_by_user_name(user_name)</tt>.
+ #
+ # It's possible to add an exclamation point (!) on the end of the dynamic finders to get them to raise an
+ # <tt>ActiveRecord::RecordNotFound</tt> error if they do not return any records,
+ # like <tt>Person.find_by_last_name!</tt>.
+ #
+ # It's also possible to use multiple attributes in the same find by separating them with "_and_".
+ #
+ # Person.find_by(user_name: user_name, password: password)
+ # Person.find_by_user_name_and_password(user_name, password) # with dynamic finder
+ #
+ # It's even possible to call these dynamic finder methods on relations and named scopes.
+ #
+ # Payment.order("created_on").find_by_amount(50)
+ #
+ # == Saving arrays, hashes, and other non-mappable objects in text columns
+ #
+ # Active Record can serialize any object in text columns using YAML. To do so, you must
+ # specify this with a call to the class method +serialize+.
+ # This makes it possible to store arrays, hashes, and other non-mappable objects without doing
+ # any additional work.
+ #
+ # class User < ActiveRecord::Base
+ # serialize :preferences
+ # end
+ #
+ # user = User.create(preferences: { "background" => "black", "display" => large })
+ # User.find(user.id).preferences # => { "background" => "black", "display" => large }
+ #
+ # You can also specify a class option as the second parameter that'll raise an exception
+ # if a serialized object is retrieved as a descendant of a class not in the hierarchy.
+ #
+ # class User < ActiveRecord::Base
+ # serialize :preferences, Hash
+ # end
+ #
+ # user = User.create(preferences: %w( one two three ))
+ # User.find(user.id).preferences # raises SerializationTypeMismatch
+ #
+ # When you specify a class option, the default value for that attribute will be a new
+ # instance of that class.
+ #
+ # class User < ActiveRecord::Base
+ # serialize :preferences, OpenStruct
+ # end
+ #
+ # user = User.new
+ # user.preferences.theme_color = "red"
+ #
+ #
+ # == Single table inheritance
+ #
+ # Active Record allows inheritance by storing the name of the class in a
+ # column that is named "type" by default. See ActiveRecord::Inheritance for
+ # more details.
+ #
+ # == Connection to multiple databases in different models
+ #
+ # Connections are usually created through ActiveRecord::Base.establish_connection and retrieved
+ # by ActiveRecord::Base.connection. All classes inheriting from ActiveRecord::Base will use this
+ # connection. But you can also set a class-specific connection. For example, if Course is an
+ # ActiveRecord::Base, but resides in a different database, you can just say <tt>Course.establish_connection</tt>
+ # and Course and all of its subclasses will use this connection instead.
+ #
+ # This feature is implemented by keeping a connection pool in ActiveRecord::Base that is
+ # a Hash indexed by the class. If a connection is requested, the retrieve_connection method
+ # will go up the class-hierarchy until a connection is found in the connection pool.
+ #
+ # == Exceptions
+ #
+ # * ActiveRecordError - Generic error class and superclass of all other errors raised by Active Record.
+ # * AdapterNotSpecified - The configuration hash used in <tt>establish_connection</tt> didn't include an
+ # <tt>:adapter</tt> key.
+ # * AdapterNotFound - The <tt>:adapter</tt> key used in <tt>establish_connection</tt> specified a
+ # non-existent adapter
+ # (or a bad spelling of an existing one).
+ # * AssociationTypeMismatch - The object assigned to the association wasn't of the type
+ # specified in the association definition.
+ # * AttributeAssignmentError - An error occurred while doing a mass assignment through the
+ # <tt>attributes=</tt> method.
+ # You can inspect the +attribute+ property of the exception object to determine which attribute
+ # triggered the error.
+ # * ConnectionNotEstablished - No connection has been established. Use <tt>establish_connection</tt>
+ # before querying.
+ # * MultiparameterAssignmentErrors - Collection of errors that occurred during a mass assignment using the
+ # <tt>attributes=</tt> method. The +errors+ property of this exception contains an array of
+ # AttributeAssignmentError
+ # objects that should be inspected to determine which attributes triggered the errors.
+ # * RecordInvalid - raised by save! and create! when the record is invalid.
+ # * RecordNotFound - No record responded to the +find+ method. Either the row with the given ID doesn't exist
+ # or the row didn't meet the additional restrictions. Some +find+ calls do not raise this exception to signal
+ # nothing was found, please check its documentation for further details.
+ # * SerializationTypeMismatch - The serialized object wasn't of the class specified as the second parameter.
+ # * StatementInvalid - The database server rejected the SQL statement. The precise error is added in the message.
+ #
+ # *Note*: The attributes listed are class-level attributes (accessible from both the class and instance level).
+ # So it's possible to assign a logger to the class through <tt>Base.logger=</tt> which will then be used by all
+ # instances in the current object space.
+ class Base
+ extend ActiveModel::Naming
+
+ extend ActiveSupport::Benchmarkable
+ extend ActiveSupport::DescendantsTracker
+
+ extend ConnectionHandling
+ extend QueryCache::ClassMethods
+ extend Querying
+ extend Translation
+ extend DynamicMatchers
+ extend Explain
+ extend Enum
+ extend Delegation::DelegateCache
+
+ include Core
+ include Persistence
+ include ReadonlyAttributes
+ include ModelSchema
+ include Inheritance
+ include Scoping
+ include Sanitization
+ include AttributeAssignment
+ include ActiveModel::Conversion
+ include Integration
+ include Validations
+ include CounterCache
+ include Attributes
+ include AttributeDecorators
+ include Locking::Optimistic
+ include Locking::Pessimistic
+ include AttributeMethods
+ include Callbacks
+ include Timestamp
+ include Associations
+ include ActiveModel::SecurePassword
+ include AutosaveAssociation
+ include NestedAttributes
+ include Aggregations
+ include Transactions
+ include NoTouching
+ include Reflection
+ include Serialization
+ include Store
+ end
+
+ ActiveSupport.run_load_hooks(:active_record, Base)
+end
diff --git a/activerecord/lib/active_record/callbacks.rb b/activerecord/lib/active_record/callbacks.rb
new file mode 100644
index 0000000000..5955673b42
--- /dev/null
+++ b/activerecord/lib/active_record/callbacks.rb
@@ -0,0 +1,313 @@
+module ActiveRecord
+ # = Active Record Callbacks
+ #
+ # Callbacks are hooks into the life cycle of an Active Record object that allow you to trigger logic
+ # before or after an alteration of the object state. This can be used to make sure that associated and
+ # dependent objects are deleted when +destroy+ is called (by overwriting +before_destroy+) or to massage attributes
+ # before they're validated (by overwriting +before_validation+). As an example of the callbacks initiated, consider
+ # the <tt>Base#save</tt> call for a new record:
+ #
+ # * (-) <tt>save</tt>
+ # * (-) <tt>valid</tt>
+ # * (1) <tt>before_validation</tt>
+ # * (-) <tt>validate</tt>
+ # * (2) <tt>after_validation</tt>
+ # * (3) <tt>before_save</tt>
+ # * (4) <tt>before_create</tt>
+ # * (-) <tt>create</tt>
+ # * (5) <tt>after_create</tt>
+ # * (6) <tt>after_save</tt>
+ # * (7) <tt>after_commit</tt>
+ #
+ # Also, an <tt>after_rollback</tt> callback can be configured to be triggered whenever a rollback is issued.
+ # Check out <tt>ActiveRecord::Transactions</tt> for more details about <tt>after_commit</tt> and
+ # <tt>after_rollback</tt>.
+ #
+ # Additionally, an <tt>after_touch</tt> callback is triggered whenever an
+ # object is touched.
+ #
+ # Lastly an <tt>after_find</tt> and <tt>after_initialize</tt> callback is triggered for each object that
+ # is found and instantiated by a finder, with <tt>after_initialize</tt> being triggered after new objects
+ # are instantiated as well.
+ #
+ # There are nineteen callbacks in total, which give you immense power to react and prepare for each state in the
+ # Active Record life cycle. The sequence for calling <tt>Base#save</tt> for an existing record is similar,
+ # except that each <tt>_create</tt> callback is replaced by the corresponding <tt>_update</tt> callback.
+ #
+ # Examples:
+ # class CreditCard < ActiveRecord::Base
+ # # Strip everything but digits, so the user can specify "555 234 34" or
+ # # "5552-3434" and both will mean "55523434"
+ # before_validation(on: :create) do
+ # self.number = number.gsub(/[^0-9]/, "") if attribute_present?("number")
+ # end
+ # end
+ #
+ # class Subscription < ActiveRecord::Base
+ # before_create :record_signup
+ #
+ # private
+ # def record_signup
+ # self.signed_up_on = Date.today
+ # end
+ # end
+ #
+ # class Firm < ActiveRecord::Base
+ # # Destroys the associated clients and people when the firm is destroyed
+ # before_destroy { |record| Person.destroy_all "firm_id = #{record.id}" }
+ # before_destroy { |record| Client.destroy_all "client_of = #{record.id}" }
+ # end
+ #
+ # == Inheritable callback queues
+ #
+ # Besides the overwritable callback methods, it's also possible to register callbacks through the
+ # use of the callback macros. Their main advantage is that the macros add behavior into a callback
+ # queue that is kept intact down through an inheritance hierarchy.
+ #
+ # class Topic < ActiveRecord::Base
+ # before_destroy :destroy_author
+ # end
+ #
+ # class Reply < Topic
+ # before_destroy :destroy_readers
+ # end
+ #
+ # Now, when <tt>Topic#destroy</tt> is run only +destroy_author+ is called. When <tt>Reply#destroy</tt> is
+ # run, both +destroy_author+ and +destroy_readers+ are called. Contrast this to the following situation
+ # where the +before_destroy+ method is overridden:
+ #
+ # class Topic < ActiveRecord::Base
+ # def before_destroy() destroy_author end
+ # end
+ #
+ # class Reply < Topic
+ # def before_destroy() destroy_readers end
+ # end
+ #
+ # In that case, <tt>Reply#destroy</tt> would only run +destroy_readers+ and _not_ +destroy_author+.
+ # So, use the callback macros when you want to ensure that a certain callback is called for the entire
+ # hierarchy, and use the regular overwritable methods when you want to leave it up to each descendant
+ # to decide whether they want to call +super+ and trigger the inherited callbacks.
+ #
+ # *IMPORTANT:* In order for inheritance to work for the callback queues, you must specify the
+ # callbacks before specifying the associations. Otherwise, you might trigger the loading of a
+ # child before the parent has registered the callbacks and they won't be inherited.
+ #
+ # == Types of callbacks
+ #
+ # There are four types of callbacks accepted by the callback macros: Method references (symbol), callback objects,
+ # inline methods (using a proc), and inline eval methods (using a string). Method references and callback objects
+ # are the recommended approaches, inline methods using a proc are sometimes appropriate (such as for
+ # creating mix-ins), and inline eval methods are deprecated.
+ #
+ # The method reference callbacks work by specifying a protected or private method available in the object, like this:
+ #
+ # class Topic < ActiveRecord::Base
+ # before_destroy :delete_parents
+ #
+ # private
+ # def delete_parents
+ # self.class.delete_all "parent_id = #{id}"
+ # end
+ # end
+ #
+ # The callback objects have methods named after the callback called with the record as the only parameter, such as:
+ #
+ # class BankAccount < ActiveRecord::Base
+ # before_save EncryptionWrapper.new
+ # after_save EncryptionWrapper.new
+ # after_initialize EncryptionWrapper.new
+ # end
+ #
+ # class EncryptionWrapper
+ # def before_save(record)
+ # record.credit_card_number = encrypt(record.credit_card_number)
+ # end
+ #
+ # def after_save(record)
+ # record.credit_card_number = decrypt(record.credit_card_number)
+ # end
+ #
+ # alias_method :after_initialize, :after_save
+ #
+ # private
+ # def encrypt(value)
+ # # Secrecy is committed
+ # end
+ #
+ # def decrypt(value)
+ # # Secrecy is unveiled
+ # end
+ # end
+ #
+ # So you specify the object you want messaged on a given callback. When that callback is triggered, the object has
+ # a method by the name of the callback messaged. You can make these callbacks more flexible by passing in other
+ # initialization data such as the name of the attribute to work with:
+ #
+ # class BankAccount < ActiveRecord::Base
+ # before_save EncryptionWrapper.new("credit_card_number")
+ # after_save EncryptionWrapper.new("credit_card_number")
+ # after_initialize EncryptionWrapper.new("credit_card_number")
+ # end
+ #
+ # class EncryptionWrapper
+ # def initialize(attribute)
+ # @attribute = attribute
+ # end
+ #
+ # def before_save(record)
+ # record.send("#{@attribute}=", encrypt(record.send("#{@attribute}")))
+ # end
+ #
+ # def after_save(record)
+ # record.send("#{@attribute}=", decrypt(record.send("#{@attribute}")))
+ # end
+ #
+ # alias_method :after_initialize, :after_save
+ #
+ # private
+ # def encrypt(value)
+ # # Secrecy is committed
+ # end
+ #
+ # def decrypt(value)
+ # # Secrecy is unveiled
+ # end
+ # end
+ #
+ # The callback macros usually accept a symbol for the method they're supposed to run, but you can also
+ # pass a "method string", which will then be evaluated within the binding of the callback. Example:
+ #
+ # class Topic < ActiveRecord::Base
+ # before_destroy 'self.class.delete_all "parent_id = #{id}"'
+ # end
+ #
+ # Notice that single quotes (') are used so the <tt>#{id}</tt> part isn't evaluated until the callback
+ # is triggered. Also note that these inline callbacks can be stacked just like the regular ones:
+ #
+ # class Topic < ActiveRecord::Base
+ # before_destroy 'self.class.delete_all "parent_id = #{id}"',
+ # 'puts "Evaluated after parents are destroyed"'
+ # end
+ #
+ # == <tt>before_validation*</tt> returning statements
+ #
+ # If the returning value of a +before_validation+ callback can be evaluated to +false+, the process will be
+ # aborted and <tt>Base#save</tt> will return +false+. If Base#save! is called it will raise a
+ # ActiveRecord::RecordInvalid exception. Nothing will be appended to the errors object.
+ #
+ # == Canceling callbacks
+ #
+ # If a <tt>before_*</tt> callback returns +false+, all the later callbacks and the associated action are
+ # cancelled. If an <tt>after_*</tt> callback returns +false+, all the later callbacks are cancelled.
+ # Callbacks are generally run in the order they are defined, with the exception of callbacks defined as
+ # methods on the model, which are called last.
+ #
+ # == Ordering callbacks
+ #
+ # Sometimes the code needs that the callbacks execute in a specific order. For example, a +before_destroy+
+ # callback (+log_children+ in this case) should be executed before the children get destroyed by the +dependent: destroy+ option.
+ #
+ # Let's look at the code below:
+ #
+ # class Topic < ActiveRecord::Base
+ # has_many :children, dependent: destroy
+ #
+ # before_destroy :log_children
+ #
+ # private
+ # def log_children
+ # # Child processing
+ # end
+ # end
+ #
+ # In this case, the problem is that when the +before_destroy+ callback is executed, the children are not available
+ # because the +destroy+ callback gets executed first. You can use the +prepend+ option on the +before_destroy+ callback to avoid this.
+ #
+ # class Topic < ActiveRecord::Base
+ # has_many :children, dependent: destroy
+ #
+ # before_destroy :log_children, prepend: true
+ #
+ # private
+ # def log_children
+ # # Child processing
+ # end
+ # end
+ #
+ # This way, the +before_destroy+ gets executed before the <tt>dependent: destroy</tt> is called, and the data is still available.
+ #
+ # == Transactions
+ #
+ # The entire callback chain of a +save+, <tt>save!</tt>, or +destroy+ call runs
+ # within a transaction. That includes <tt>after_*</tt> hooks. If everything
+ # goes fine a COMMIT is executed once the chain has been completed.
+ #
+ # If a <tt>before_*</tt> callback cancels the action a ROLLBACK is issued. You
+ # can also trigger a ROLLBACK raising an exception in any of the callbacks,
+ # including <tt>after_*</tt> hooks. Note, however, that in that case the client
+ # needs to be aware of it because an ordinary +save+ will raise such exception
+ # instead of quietly returning +false+.
+ #
+ # == Debugging callbacks
+ #
+ # The callback chain is accessible via the <tt>_*_callbacks</tt> method on an object. ActiveModel Callbacks support
+ # <tt>:before</tt>, <tt>:after</tt> and <tt>:around</tt> as values for the <tt>kind</tt> property. The <tt>kind</tt> property
+ # defines what part of the chain the callback runs in.
+ #
+ # To find all callbacks in the before_save callback chain:
+ #
+ # Topic._save_callbacks.select { |cb| cb.kind.eql?(:before) }
+ #
+ # Returns an array of callback objects that form the before_save chain.
+ #
+ # To further check if the before_save chain contains a proc defined as <tt>rest_when_dead</tt> use the <tt>filter</tt> property of the callback object:
+ #
+ # Topic._save_callbacks.select { |cb| cb.kind.eql?(:before) }.collect(&:filter).include?(:rest_when_dead)
+ #
+ # Returns true or false depending on whether the proc is contained in the before_save callback chain on a Topic model.
+ #
+ module Callbacks
+ extend ActiveSupport::Concern
+
+ CALLBACKS = [
+ :after_initialize, :after_find, :after_touch, :before_validation, :after_validation,
+ :before_save, :around_save, :after_save, :before_create, :around_create,
+ :after_create, :before_update, :around_update, :after_update,
+ :before_destroy, :around_destroy, :after_destroy, :after_commit, :after_rollback
+ ]
+
+ module ClassMethods
+ include ActiveModel::Callbacks
+ end
+
+ included do
+ include ActiveModel::Validations::Callbacks
+
+ define_model_callbacks :initialize, :find, :touch, :only => :after
+ define_model_callbacks :save, :create, :update, :destroy
+ end
+
+ def destroy #:nodoc:
+ run_callbacks(:destroy) { super }
+ end
+
+ def touch(*) #:nodoc:
+ run_callbacks(:touch) { super }
+ end
+
+ private
+
+ def create_or_update #:nodoc:
+ run_callbacks(:save) { super }
+ end
+
+ def _create_record #:nodoc:
+ run_callbacks(:create) { super }
+ end
+
+ def _update_record(*) #:nodoc:
+ run_callbacks(:update) { super }
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/coders/json.rb b/activerecord/lib/active_record/coders/json.rb
new file mode 100644
index 0000000000..75d3bfe625
--- /dev/null
+++ b/activerecord/lib/active_record/coders/json.rb
@@ -0,0 +1,13 @@
+module ActiveRecord
+ module Coders # :nodoc:
+ class JSON # :nodoc:
+ def self.dump(obj)
+ ActiveSupport::JSON.encode(obj)
+ end
+
+ def self.load(json)
+ ActiveSupport::JSON.decode(json) unless json.nil?
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/coders/yaml_column.rb b/activerecord/lib/active_record/coders/yaml_column.rb
new file mode 100644
index 0000000000..d3d7396c91
--- /dev/null
+++ b/activerecord/lib/active_record/coders/yaml_column.rb
@@ -0,0 +1,38 @@
+require 'yaml'
+
+module ActiveRecord
+ module Coders # :nodoc:
+ class YAMLColumn # :nodoc:
+
+ attr_accessor :object_class
+
+ def initialize(object_class = Object)
+ @object_class = object_class
+ end
+
+ def dump(obj)
+ return if obj.nil?
+
+ unless obj.is_a?(object_class)
+ raise SerializationTypeMismatch,
+ "Attribute was supposed to be a #{object_class}, but was a #{obj.class}. -- #{obj.inspect}"
+ end
+ YAML.dump obj
+ end
+
+ def load(yaml)
+ return object_class.new if object_class != Object && yaml.nil?
+ return yaml unless yaml.is_a?(String) && yaml =~ /^---/
+ obj = YAML.load(yaml)
+
+ unless obj.is_a?(object_class) || obj.nil?
+ raise SerializationTypeMismatch,
+ "Attribute was supposed to be a #{object_class}, but was a #{obj.class}"
+ end
+ obj ||= object_class.new if object_class != Object
+
+ obj
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb b/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
new file mode 100644
index 0000000000..a5fa9d6adc
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/connection_pool.rb
@@ -0,0 +1,657 @@
+require 'thread'
+require 'thread_safe'
+require 'monitor'
+require 'set'
+
+module ActiveRecord
+ # Raised when a connection could not be obtained within the connection
+ # acquisition timeout period: because max connections in pool
+ # are in use.
+ class ConnectionTimeoutError < ConnectionNotEstablished
+ end
+
+ module ConnectionAdapters
+ # Connection pool base class for managing Active Record database
+ # connections.
+ #
+ # == Introduction
+ #
+ # A connection pool synchronizes thread access to a limited number of
+ # database connections. The basic idea is that each thread checks out a
+ # database connection from the pool, uses that connection, and checks the
+ # connection back in. ConnectionPool is completely thread-safe, and will
+ # ensure that a connection cannot be used by two threads at the same time,
+ # as long as ConnectionPool's contract is correctly followed. It will also
+ # handle cases in which there are more threads than connections: if all
+ # connections have been checked out, and a thread tries to checkout a
+ # connection anyway, then ConnectionPool will wait until some other thread
+ # has checked in a connection.
+ #
+ # == Obtaining (checking out) a connection
+ #
+ # Connections can be obtained and used from a connection pool in several
+ # ways:
+ #
+ # 1. Simply use ActiveRecord::Base.connection as with Active Record 2.1 and
+ # earlier (pre-connection-pooling). Eventually, when you're done with
+ # the connection(s) and wish it to be returned to the pool, you call
+ # ActiveRecord::Base.clear_active_connections!. This will be the
+ # default behavior for Active Record when used in conjunction with
+ # Action Pack's request handling cycle.
+ # 2. Manually check out a connection from the pool with
+ # ActiveRecord::Base.connection_pool.checkout. You are responsible for
+ # returning this connection to the pool when finished by calling
+ # ActiveRecord::Base.connection_pool.checkin(connection).
+ # 3. Use ActiveRecord::Base.connection_pool.with_connection(&block), which
+ # obtains a connection, yields it as the sole argument to the block,
+ # and returns it to the pool after the block completes.
+ #
+ # Connections in the pool are actually AbstractAdapter objects (or objects
+ # compatible with AbstractAdapter's interface).
+ #
+ # == Options
+ #
+ # There are several connection-pooling-related options that you can add to
+ # your database connection configuration:
+ #
+ # * +pool+: number indicating size of connection pool (default 5)
+ # * +checkout_timeout+: number of seconds to block and wait for a connection
+ # before giving up and raising a timeout error (default 5 seconds).
+ # * +reaping_frequency+: frequency in seconds to periodically run the
+ # Reaper, which attempts to find and recover connections from dead
+ # threads, which can occur if a programmer forgets to close a
+ # connection at the end of a thread or a thread dies unexpectedly.
+ # Regardless of this setting, the Reaper will be invoked before every
+ # blocking wait. (Default nil, which means don't schedule the Reaper).
+ class ConnectionPool
+ # Threadsafe, fair, FIFO queue. Meant to be used by ConnectionPool
+ # with which it shares a Monitor. But could be a generic Queue.
+ #
+ # The Queue in stdlib's 'thread' could replace this class except
+ # stdlib's doesn't support waiting with a timeout.
+ class Queue
+ def initialize(lock = Monitor.new)
+ @lock = lock
+ @cond = @lock.new_cond
+ @num_waiting = 0
+ @queue = []
+ end
+
+ # Test if any threads are currently waiting on the queue.
+ def any_waiting?
+ synchronize do
+ @num_waiting > 0
+ end
+ end
+
+ # Returns the number of threads currently waiting on this
+ # queue.
+ def num_waiting
+ synchronize do
+ @num_waiting
+ end
+ end
+
+ # Add +element+ to the queue. Never blocks.
+ def add(element)
+ synchronize do
+ @queue.push element
+ @cond.signal
+ end
+ end
+
+ # If +element+ is in the queue, remove and return it, or nil.
+ def delete(element)
+ synchronize do
+ @queue.delete(element)
+ end
+ end
+
+ # Remove all elements from the queue.
+ def clear
+ synchronize do
+ @queue.clear
+ end
+ end
+
+ # Remove the head of the queue.
+ #
+ # If +timeout+ is not given, remove and return the head the
+ # queue if the number of available elements is strictly
+ # greater than the number of threads currently waiting (that
+ # is, don't jump ahead in line). Otherwise, return nil.
+ #
+ # If +timeout+ is given, block if it there is no element
+ # available, waiting up to +timeout+ seconds for an element to
+ # become available.
+ #
+ # Raises:
+ # - ConnectionTimeoutError if +timeout+ is given and no element
+ # becomes available after +timeout+ seconds,
+ def poll(timeout = nil)
+ synchronize do
+ if timeout
+ no_wait_poll || wait_poll(timeout)
+ else
+ no_wait_poll
+ end
+ end
+ end
+
+ private
+
+ def synchronize(&block)
+ @lock.synchronize(&block)
+ end
+
+ # Test if the queue currently contains any elements.
+ def any?
+ !@queue.empty?
+ end
+
+ # A thread can remove an element from the queue without
+ # waiting if an only if the number of currently available
+ # connections is strictly greater than the number of waiting
+ # threads.
+ def can_remove_no_wait?
+ @queue.size > @num_waiting
+ end
+
+ # Removes and returns the head of the queue if possible, or nil.
+ def remove
+ @queue.shift
+ end
+
+ # Remove and return the head the queue if the number of
+ # available elements is strictly greater than the number of
+ # threads currently waiting. Otherwise, return nil.
+ def no_wait_poll
+ remove if can_remove_no_wait?
+ end
+
+ # Waits on the queue up to +timeout+ seconds, then removes and
+ # returns the head of the queue.
+ def wait_poll(timeout)
+ @num_waiting += 1
+
+ t0 = Time.now
+ elapsed = 0
+ loop do
+ @cond.wait(timeout - elapsed)
+
+ return remove if any?
+
+ elapsed = Time.now - t0
+ if elapsed >= timeout
+ msg = 'could not obtain a database connection within %0.3f seconds (waited %0.3f seconds)' %
+ [timeout, elapsed]
+ raise ConnectionTimeoutError, msg
+ end
+ end
+ ensure
+ @num_waiting -= 1
+ end
+ end
+
+ # Every +frequency+ seconds, the reaper will call +reap+ on +pool+.
+ # A reaper instantiated with a nil frequency will never reap the
+ # connection pool.
+ #
+ # Configure the frequency by setting "reaping_frequency" in your
+ # database yaml file.
+ class Reaper
+ attr_reader :pool, :frequency
+
+ def initialize(pool, frequency)
+ @pool = pool
+ @frequency = frequency
+ end
+
+ def run
+ return unless frequency
+ Thread.new(frequency, pool) { |t, p|
+ while true
+ sleep t
+ p.reap
+ end
+ }
+ end
+ end
+
+ include MonitorMixin
+
+ attr_accessor :automatic_reconnect, :checkout_timeout
+ attr_reader :spec, :connections, :size, :reaper
+
+ # Creates a new ConnectionPool object. +spec+ is a ConnectionSpecification
+ # object which describes database connection information (e.g. adapter,
+ # host name, username, password, etc), as well as the maximum size for
+ # this ConnectionPool.
+ #
+ # The default ConnectionPool maximum size is 5.
+ def initialize(spec)
+ super()
+
+ @spec = spec
+
+ @checkout_timeout = spec.config[:checkout_timeout] || 5
+ @reaper = Reaper.new self, spec.config[:reaping_frequency]
+ @reaper.run
+
+ # default max pool size to 5
+ @size = (spec.config[:pool] && spec.config[:pool].to_i) || 5
+
+ # The cache of reserved connections mapped to threads
+ @reserved_connections = ThreadSafe::Cache.new(:initial_capacity => @size)
+
+ @connections = []
+ @automatic_reconnect = true
+
+ @available = Queue.new self
+ end
+
+ # Retrieve the connection associated with the current thread, or call
+ # #checkout to obtain one if necessary.
+ #
+ # #connection can be called any number of times; the connection is
+ # held in a hash keyed by the thread id.
+ def connection
+ # this is correctly done double-checked locking
+ # (ThreadSafe::Cache's lookups have volatile semantics)
+ @reserved_connections[current_connection_id] || synchronize do
+ @reserved_connections[current_connection_id] ||= checkout
+ end
+ end
+
+ # Is there an open connection that is being used for the current thread?
+ def active_connection?
+ synchronize do
+ @reserved_connections.fetch(current_connection_id) {
+ return false
+ }.in_use?
+ end
+ end
+
+ # Signal that the thread is finished with the current connection.
+ # #release_connection releases the connection-thread association
+ # and returns the connection to the pool.
+ def release_connection(with_id = current_connection_id)
+ synchronize do
+ conn = @reserved_connections.delete(with_id)
+ checkin conn if conn
+ end
+ end
+
+ # If a connection already exists yield it to the block. If no connection
+ # exists checkout a connection, yield it to the block, and checkin the
+ # connection when finished.
+ def with_connection
+ connection_id = current_connection_id
+ fresh_connection = true unless active_connection?
+ yield connection
+ ensure
+ release_connection(connection_id) if fresh_connection
+ end
+
+ # Returns true if a connection has already been opened.
+ def connected?
+ synchronize { @connections.any? }
+ end
+
+ # Disconnects all connections in the pool, and clears the pool.
+ def disconnect!
+ synchronize do
+ @reserved_connections.clear
+ @connections.each do |conn|
+ checkin conn
+ conn.disconnect!
+ end
+ @connections = []
+ @available.clear
+ end
+ end
+
+ # Clears the cache which maps classes.
+ def clear_reloadable_connections!
+ synchronize do
+ @reserved_connections.clear
+ @connections.each do |conn|
+ checkin conn
+ conn.disconnect! if conn.requires_reloading?
+ end
+ @connections.delete_if do |conn|
+ conn.requires_reloading?
+ end
+ @available.clear
+ @connections.each do |conn|
+ @available.add conn
+ end
+ end
+ end
+
+ # Check-out a database connection from the pool, indicating that you want
+ # to use it. You should call #checkin when you no longer need this.
+ #
+ # This is done by either returning and leasing existing connection, or by
+ # creating a new connection and leasing it.
+ #
+ # If all connections are leased and the pool is at capacity (meaning the
+ # number of currently leased connections is greater than or equal to the
+ # size limit set), an ActiveRecord::ConnectionTimeoutError exception will be raised.
+ #
+ # Returns: an AbstractAdapter object.
+ #
+ # Raises:
+ # - ConnectionTimeoutError: no connection can be obtained from the pool.
+ def checkout
+ synchronize do
+ conn = acquire_connection
+ conn.lease
+ checkout_and_verify(conn)
+ end
+ end
+
+ # Check-in a database connection back into the pool, indicating that you
+ # no longer need this connection.
+ #
+ # +conn+: an AbstractAdapter object, which was obtained by earlier by
+ # calling +checkout+ on this pool.
+ def checkin(conn)
+ synchronize do
+ owner = conn.owner
+
+ conn.run_callbacks :checkin do
+ conn.expire
+ end
+
+ release owner
+
+ @available.add conn
+ end
+ end
+
+ # Remove a connection from the connection pool. The connection will
+ # remain open and active but will no longer be managed by this pool.
+ def remove(conn)
+ synchronize do
+ @connections.delete conn
+ @available.delete conn
+
+ release conn.owner
+
+ @available.add checkout_new_connection if @available.any_waiting?
+ end
+ end
+
+ # Recover lost connections for the pool. A lost connection can occur if
+ # a programmer forgets to checkin a connection at the end of a thread
+ # or a thread dies unexpectedly.
+ def reap
+ stale_connections = synchronize do
+ @connections.select do |conn|
+ conn.in_use? && !conn.owner.alive?
+ end
+ end
+
+ stale_connections.each do |conn|
+ synchronize do
+ if conn.active?
+ conn.reset!
+ checkin conn
+ else
+ remove conn
+ end
+ end
+ end
+ end
+
+ private
+
+ # Acquire a connection by one of 1) immediately removing one
+ # from the queue of available connections, 2) creating a new
+ # connection if the pool is not at capacity, 3) waiting on the
+ # queue for a connection to become available.
+ #
+ # Raises:
+ # - ConnectionTimeoutError if a connection could not be acquired
+ def acquire_connection
+ if conn = @available.poll
+ conn
+ elsif @connections.size < @size
+ checkout_new_connection
+ else
+ reap
+ @available.poll(@checkout_timeout)
+ end
+ end
+
+ def release(owner)
+ thread_id = owner.object_id
+
+ @reserved_connections.delete thread_id
+ end
+
+ def new_connection
+ Base.send(spec.adapter_method, spec.config)
+ end
+
+ def current_connection_id #:nodoc:
+ Base.connection_id ||= Thread.current.object_id
+ end
+
+ def checkout_new_connection
+ raise ConnectionNotEstablished unless @automatic_reconnect
+
+ c = new_connection
+ c.pool = self
+ @connections << c
+ c
+ end
+
+ def checkout_and_verify(c)
+ c.run_callbacks :checkout do
+ c.verify!
+ end
+ c
+ end
+ end
+
+ # ConnectionHandler is a collection of ConnectionPool objects. It is used
+ # for keeping separate connection pools for Active Record models that connect
+ # to different databases.
+ #
+ # For example, suppose that you have 5 models, with the following hierarchy:
+ #
+ # class Author < ActiveRecord::Base
+ # end
+ #
+ # class BankAccount < ActiveRecord::Base
+ # end
+ #
+ # class Book < ActiveRecord::Base
+ # establish_connection "library_db"
+ # end
+ #
+ # class ScaryBook < Book
+ # end
+ #
+ # class GoodBook < Book
+ # end
+ #
+ # And a database.yml that looked like this:
+ #
+ # development:
+ # database: my_application
+ # host: localhost
+ #
+ # library_db:
+ # database: library
+ # host: some.library.org
+ #
+ # Your primary database in the development environment is "my_application"
+ # but the Book model connects to a separate database called "library_db"
+ # (this can even be a database on a different machine).
+ #
+ # Book, ScaryBook and GoodBook will all use the same connection pool to
+ # "library_db" while Author, BankAccount, and any other models you create
+ # will use the default connection pool to "my_application".
+ #
+ # The various connection pools are managed by a single instance of
+ # ConnectionHandler accessible via ActiveRecord::Base.connection_handler.
+ # All Active Record models use this handler to determine the connection pool that they
+ # should use.
+ class ConnectionHandler
+ def initialize
+ # These caches are keyed by klass.name, NOT klass. Keying them by klass
+ # alone would lead to memory leaks in development mode as all previous
+ # instances of the class would stay in memory.
+ @owner_to_pool = ThreadSafe::Cache.new(:initial_capacity => 2) do |h,k|
+ h[k] = ThreadSafe::Cache.new(:initial_capacity => 2)
+ end
+ @class_to_pool = ThreadSafe::Cache.new(:initial_capacity => 2) do |h,k|
+ h[k] = ThreadSafe::Cache.new
+ end
+ end
+
+ def connection_pool_list
+ owner_to_pool.values.compact
+ end
+
+ def connection_pools
+ ActiveSupport::Deprecation.warn(
+ "In the next release, this will return the same as #connection_pool_list. " \
+ "(An array of pools, rather than a hash mapping specs to pools.)"
+ )
+ Hash[connection_pool_list.map { |pool| [pool.spec, pool] }]
+ end
+
+ def establish_connection(owner, spec)
+ @class_to_pool.clear
+ raise RuntimeError, "Anonymous class is not allowed." unless owner.name
+ owner_to_pool[owner.name] = ConnectionAdapters::ConnectionPool.new(spec)
+ end
+
+ # Returns true if there are any active connections among the connection
+ # pools that the ConnectionHandler is managing.
+ def active_connections?
+ connection_pool_list.any?(&:active_connection?)
+ end
+
+ # Returns any connections in use by the current thread back to the pool,
+ # and also returns connections to the pool cached by threads that are no
+ # longer alive.
+ def clear_active_connections!
+ connection_pool_list.each(&:release_connection)
+ end
+
+ # Clears the cache which maps classes.
+ def clear_reloadable_connections!
+ connection_pool_list.each(&:clear_reloadable_connections!)
+ end
+
+ def clear_all_connections!
+ connection_pool_list.each(&:disconnect!)
+ end
+
+ # Locate the connection of the nearest super class. This can be an
+ # active or defined connection: if it is the latter, it will be
+ # opened and set as the active connection for the class it was defined
+ # for (not necessarily the current class).
+ def retrieve_connection(klass) #:nodoc:
+ pool = retrieve_connection_pool(klass)
+ raise ConnectionNotEstablished, "No connection pool for #{klass}" unless pool
+ conn = pool.connection
+ raise ConnectionNotEstablished, "No connection for #{klass} in connection pool" unless conn
+ conn
+ end
+
+ # Returns true if a connection that's accessible to this class has
+ # already been opened.
+ def connected?(klass)
+ conn = retrieve_connection_pool(klass)
+ conn && conn.connected?
+ end
+
+ # Remove the connection for this class. This will close the active
+ # connection and the defined connection (if they exist). The result
+ # can be used as an argument for establish_connection, for easily
+ # re-establishing the connection.
+ def remove_connection(owner)
+ if pool = owner_to_pool.delete(owner.name)
+ @class_to_pool.clear
+ pool.automatic_reconnect = false
+ pool.disconnect!
+ pool.spec.config
+ end
+ end
+
+ # Retrieving the connection pool happens a lot so we cache it in @class_to_pool.
+ # This makes retrieving the connection pool O(1) once the process is warm.
+ # When a connection is established or removed, we invalidate the cache.
+ #
+ # Ideally we would use #fetch here, as class_to_pool[klass] may sometimes be nil.
+ # However, benchmarking (https://gist.github.com/jonleighton/3552829) showed that
+ # #fetch is significantly slower than #[]. So in the nil case, no caching will
+ # take place, but that's ok since the nil case is not the common one that we wish
+ # to optimise for.
+ def retrieve_connection_pool(klass)
+ class_to_pool[klass.name] ||= begin
+ until pool = pool_for(klass)
+ klass = klass.superclass
+ break unless klass <= Base
+ end
+
+ class_to_pool[klass.name] = pool
+ end
+ end
+
+ private
+
+ def owner_to_pool
+ @owner_to_pool[Process.pid]
+ end
+
+ def class_to_pool
+ @class_to_pool[Process.pid]
+ end
+
+ def pool_for(owner)
+ owner_to_pool.fetch(owner.name) {
+ if ancestor_pool = pool_from_any_process_for(owner)
+ # A connection was established in an ancestor process that must have
+ # subsequently forked. We can't reuse the connection, but we can copy
+ # the specification and establish a new connection with it.
+ establish_connection owner, ancestor_pool.spec
+ else
+ owner_to_pool[owner.name] = nil
+ end
+ }
+ end
+
+ def pool_from_any_process_for(owner)
+ owner_to_pool = @owner_to_pool.values.find { |v| v[owner.name] }
+ owner_to_pool && owner_to_pool[owner.name]
+ end
+ end
+
+ class ConnectionManagement
+ def initialize(app)
+ @app = app
+ end
+
+ def call(env)
+ testing = env.key?('rack.test')
+
+ response = @app.call(env)
+ response[2] = ::Rack::BodyProxy.new(response[2]) do
+ ActiveRecord::Base.clear_active_connections! unless testing
+ end
+
+ response
+ rescue Exception
+ ActiveRecord::Base.clear_active_connections! unless testing
+ raise
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb b/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb
new file mode 100644
index 0000000000..c0a2111571
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/database_limits.rb
@@ -0,0 +1,67 @@
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ module DatabaseLimits
+
+ # Returns the maximum length of a table alias.
+ def table_alias_length
+ 255
+ end
+
+ # Returns the maximum length of a column name.
+ def column_name_length
+ 64
+ end
+
+ # Returns the maximum length of a table name.
+ def table_name_length
+ 64
+ end
+
+ # Returns the maximum allowed length for an index name. This
+ # limit is enforced by rails and Is less than or equal to
+ # <tt>index_name_length</tt>. The gap between
+ # <tt>index_name_length</tt> is to allow internal rails
+ # operations to use prefixes in temporary operations.
+ def allowed_index_name_length
+ index_name_length
+ end
+
+ # Returns the maximum length of an index name.
+ def index_name_length
+ 64
+ end
+
+ # Returns the maximum number of columns per table.
+ def columns_per_table
+ 1024
+ end
+
+ # Returns the maximum number of indexes per table.
+ def indexes_per_table
+ 16
+ end
+
+ # Returns the maximum number of columns in a multicolumn index.
+ def columns_per_multicolumn_index
+ 16
+ end
+
+ # Returns the maximum number of elements in an IN (x,y,z) clause.
+ # nil means no limit.
+ def in_clause_length
+ nil
+ end
+
+ # Returns the maximum length of an SQL query.
+ def sql_query_length
+ 1048575
+ end
+
+ # Returns maximum number of joins in a single query.
+ def joins_per_query
+ 256
+ end
+
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb b/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
new file mode 100644
index 0000000000..98e96099cb
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/database_statements.rb
@@ -0,0 +1,371 @@
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ module DatabaseStatements
+ def initialize
+ super
+ reset_transaction
+ end
+
+ # Converts an arel AST to SQL
+ def to_sql(arel, binds = [])
+ if arel.respond_to?(:ast)
+ collected = visitor.accept(arel.ast, collector)
+ collected.compile(binds.dup, self)
+ else
+ arel
+ end
+ end
+
+ # This is used in the StatementCache object. It returns an object that
+ # can be used to query the database repeatedly.
+ def cacheable_query(arel) # :nodoc:
+ if prepared_statements
+ ActiveRecord::StatementCache.query visitor, arel.ast
+ else
+ ActiveRecord::StatementCache.partial_query visitor, arel.ast, collector
+ end
+ end
+
+ # Returns an ActiveRecord::Result instance.
+ def select_all(arel, name = nil, binds = [])
+ arel, binds = binds_from_relation arel, binds
+ select(to_sql(arel, binds), name, binds)
+ end
+
+ # Returns a record hash with the column names as keys and column values
+ # as values.
+ def select_one(arel, name = nil, binds = [])
+ select_all(arel, name, binds).first
+ end
+
+ # Returns a single value from a record
+ def select_value(arel, name = nil, binds = [])
+ if result = select_one(arel, name, binds)
+ result.values.first
+ end
+ end
+
+ # Returns an array of the values of the first column in a select:
+ # select_values("SELECT id FROM companies LIMIT 3") => [1,2,3]
+ def select_values(arel, name = nil)
+ arel, binds = binds_from_relation arel, []
+ select_rows(to_sql(arel, binds), name, binds).map(&:first)
+ end
+
+ # Returns an array of arrays containing the field values.
+ # Order is the same as that returned by +columns+.
+ def select_rows(sql, name = nil, binds = [])
+ end
+ undef_method :select_rows
+
+ # Executes the SQL statement in the context of this connection.
+ def execute(sql, name = nil)
+ end
+ undef_method :execute
+
+ # Executes +sql+ statement in the context of this connection using
+ # +binds+ as the bind substitutes. +name+ is logged along with
+ # the executed +sql+ statement.
+ def exec_query(sql, name = 'SQL', binds = [])
+ end
+
+ # Executes insert +sql+ statement in the context of this connection using
+ # +binds+ as the bind substitutes. +name+ is logged along with
+ # the executed +sql+ statement.
+ def exec_insert(sql, name, binds, pk = nil, sequence_name = nil)
+ exec_query(sql, name, binds)
+ end
+
+ # Executes delete +sql+ statement in the context of this connection using
+ # +binds+ as the bind substitutes. +name+ is logged along with
+ # the executed +sql+ statement.
+ def exec_delete(sql, name, binds)
+ exec_query(sql, name, binds)
+ end
+
+ # Executes update +sql+ statement in the context of this connection using
+ # +binds+ as the bind substitutes. +name+ is logged along with
+ # the executed +sql+ statement.
+ def exec_update(sql, name, binds)
+ exec_query(sql, name, binds)
+ end
+
+ # Returns the last auto-generated ID from the affected table.
+ #
+ # +id_value+ will be returned unless the value is nil, in
+ # which case the database will attempt to calculate the last inserted
+ # id and return that value.
+ #
+ # If the next id was calculated in advance (as in Oracle), it should be
+ # passed in as +id_value+.
+ def insert(arel, name = nil, pk = nil, id_value = nil, sequence_name = nil, binds = [])
+ sql, binds = sql_for_insert(to_sql(arel, binds), pk, id_value, sequence_name, binds)
+ value = exec_insert(sql, name, binds, pk, sequence_name)
+ id_value || last_inserted_id(value)
+ end
+
+ # Executes the update statement and returns the number of rows affected.
+ def update(arel, name = nil, binds = [])
+ exec_update(to_sql(arel, binds), name, binds)
+ end
+
+ # Executes the delete statement and returns the number of rows affected.
+ def delete(arel, name = nil, binds = [])
+ exec_delete(to_sql(arel, binds), name, binds)
+ end
+
+ # Returns +true+ when the connection adapter supports prepared statement
+ # caching, otherwise returns +false+
+ def supports_statement_cache?
+ false
+ end
+
+ # Runs the given block in a database transaction, and returns the result
+ # of the block.
+ #
+ # == Nested transactions support
+ #
+ # Most databases don't support true nested transactions. At the time of
+ # writing, the only database that supports true nested transactions that
+ # we're aware of, is MS-SQL.
+ #
+ # In order to get around this problem, #transaction will emulate the effect
+ # of nested transactions, by using savepoints:
+ # http://dev.mysql.com/doc/refman/5.0/en/savepoint.html
+ # Savepoints are supported by MySQL and PostgreSQL. SQLite3 version >= '3.6.8'
+ # supports savepoints.
+ #
+ # It is safe to call this method if a database transaction is already open,
+ # i.e. if #transaction is called within another #transaction block. In case
+ # of a nested call, #transaction will behave as follows:
+ #
+ # - The block will be run without doing anything. All database statements
+ # that happen within the block are effectively appended to the already
+ # open database transaction.
+ # - However, if +:requires_new+ is set, the block will be wrapped in a
+ # database savepoint acting as a sub-transaction.
+ #
+ # === Caveats
+ #
+ # MySQL doesn't support DDL transactions. If you perform a DDL operation,
+ # then any created savepoints will be automatically released. For example,
+ # if you've created a savepoint, then you execute a CREATE TABLE statement,
+ # then the savepoint that was created will be automatically released.
+ #
+ # This means that, on MySQL, you shouldn't execute DDL operations inside
+ # a #transaction call that you know might create a savepoint. Otherwise,
+ # #transaction will raise exceptions when it tries to release the
+ # already-automatically-released savepoints:
+ #
+ # Model.connection.transaction do # BEGIN
+ # Model.connection.transaction(requires_new: true) do # CREATE SAVEPOINT active_record_1
+ # Model.connection.create_table(...)
+ # # active_record_1 now automatically released
+ # end # RELEASE SAVEPOINT active_record_1 <--- BOOM! database error!
+ # end
+ #
+ # == Transaction isolation
+ #
+ # If your database supports setting the isolation level for a transaction, you can set
+ # it like so:
+ #
+ # Post.transaction(isolation: :serializable) do
+ # # ...
+ # end
+ #
+ # Valid isolation levels are:
+ #
+ # * <tt>:read_uncommitted</tt>
+ # * <tt>:read_committed</tt>
+ # * <tt>:repeatable_read</tt>
+ # * <tt>:serializable</tt>
+ #
+ # You should consult the documentation for your database to understand the
+ # semantics of these different levels:
+ #
+ # * http://www.postgresql.org/docs/9.1/static/transaction-iso.html
+ # * https://dev.mysql.com/doc/refman/5.0/en/set-transaction.html
+ #
+ # An <tt>ActiveRecord::TransactionIsolationError</tt> will be raised if:
+ #
+ # * The adapter does not support setting the isolation level
+ # * You are joining an existing open transaction
+ # * You are creating a nested (savepoint) transaction
+ #
+ # The mysql, mysql2 and postgresql adapters support setting the transaction
+ # isolation level. However, support is disabled for MySQL versions below 5,
+ # because they are affected by a bug[http://bugs.mysql.com/bug.php?id=39170]
+ # which means the isolation level gets persisted outside the transaction.
+ def transaction(options = {})
+ options.assert_valid_keys :requires_new, :joinable, :isolation
+
+ if !options[:requires_new] && current_transaction.joinable?
+ if options[:isolation]
+ raise ActiveRecord::TransactionIsolationError, "cannot set isolation when joining a transaction"
+ end
+ yield
+ else
+ transaction_manager.within_new_transaction(options) { yield }
+ end
+ rescue ActiveRecord::Rollback
+ # rollbacks are silently swallowed
+ end
+
+ attr_reader :transaction_manager #:nodoc:
+
+ delegate :within_new_transaction, :open_transactions, :current_transaction, :begin_transaction, :commit_transaction, :rollback_transaction, to: :transaction_manager
+
+ def transaction_open?
+ current_transaction.open?
+ end
+
+ def reset_transaction #:nodoc:
+ @transaction_manager = TransactionManager.new(self)
+ end
+
+ # Register a record with the current transaction so that its after_commit and after_rollback callbacks
+ # can be called.
+ def add_transaction_record(record)
+ current_transaction.add_record(record)
+ end
+
+ # Begins the transaction (and turns off auto-committing).
+ def begin_db_transaction() end
+
+ def transaction_isolation_levels
+ {
+ read_uncommitted: "READ UNCOMMITTED",
+ read_committed: "READ COMMITTED",
+ repeatable_read: "REPEATABLE READ",
+ serializable: "SERIALIZABLE"
+ }
+ end
+
+ # Begins the transaction with the isolation level set. Raises an error by
+ # default; adapters that support setting the isolation level should implement
+ # this method.
+ def begin_isolated_db_transaction(isolation)
+ raise ActiveRecord::TransactionIsolationError, "adapter does not support setting transaction isolation"
+ end
+
+ # Commits the transaction (and turns on auto-committing).
+ def commit_db_transaction() end
+
+ # Rolls back the transaction (and turns on auto-committing). Must be
+ # done if the transaction block raises an exception or returns false.
+ def rollback_db_transaction() end
+
+ def default_sequence_name(table, column)
+ nil
+ end
+
+ # Set the sequence to the max value of the table's column.
+ def reset_sequence!(table, column, sequence = nil)
+ # Do nothing by default. Implement for PostgreSQL, Oracle, ...
+ end
+
+ # Inserts the given fixture into the table. Overridden in adapters that require
+ # something beyond a simple insert (eg. Oracle).
+ def insert_fixture(fixture, table_name)
+ columns = schema_cache.columns_hash(table_name)
+
+ key_list = []
+ value_list = fixture.map do |name, value|
+ key_list << quote_column_name(name)
+ quote(value, columns[name])
+ end
+
+ execute "INSERT INTO #{quote_table_name(table_name)} (#{key_list.join(', ')}) VALUES (#{value_list.join(', ')})", 'Fixture Insert'
+ end
+
+ def empty_insert_statement_value
+ "DEFAULT VALUES"
+ end
+
+ def limited_update_conditions(where_sql, quoted_table_name, quoted_primary_key)
+ "WHERE #{quoted_primary_key} IN (SELECT #{quoted_primary_key} FROM #{quoted_table_name} #{where_sql})"
+ end
+
+ # Sanitizes the given LIMIT parameter in order to prevent SQL injection.
+ #
+ # The +limit+ may be anything that can evaluate to a string via #to_s. It
+ # should look like an integer, or a comma-delimited list of integers, or
+ # an Arel SQL literal.
+ #
+ # Returns Integer and Arel::Nodes::SqlLiteral limits as is.
+ # Returns the sanitized limit parameter, either as an integer, or as a
+ # string which contains a comma-delimited list of integers.
+ def sanitize_limit(limit)
+ if limit.is_a?(Integer) || limit.is_a?(Arel::Nodes::SqlLiteral)
+ limit
+ elsif limit.to_s.include?(',')
+ Arel.sql limit.to_s.split(',').map{ |i| Integer(i) }.join(',')
+ else
+ Integer(limit)
+ end
+ end
+
+ # The default strategy for an UPDATE with joins is to use a subquery. This doesn't work
+ # on MySQL (even when aliasing the tables), but MySQL allows using JOIN directly in
+ # an UPDATE statement, so in the MySQL adapters we redefine this to do that.
+ def join_to_update(update, select) #:nodoc:
+ key = update.key
+ subselect = subquery_for(key, select)
+
+ update.where key.in(subselect)
+ end
+
+ def join_to_delete(delete, select, key) #:nodoc:
+ subselect = subquery_for(key, select)
+
+ delete.where key.in(subselect)
+ end
+
+ protected
+
+ # Returns a subquery for the given key using the join information.
+ def subquery_for(key, select)
+ subselect = select.clone
+ subselect.projections = [key]
+ subselect
+ end
+
+ # Returns an ActiveRecord::Result instance.
+ def select(sql, name = nil, binds = [])
+ end
+ undef_method :select
+
+ # Returns the last auto-generated ID from the affected table.
+ def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
+ execute(sql, name)
+ id_value
+ end
+
+ # Executes the update statement and returns the number of rows affected.
+ def update_sql(sql, name = nil)
+ execute(sql, name)
+ end
+
+ # Executes the delete statement and returns the number of rows affected.
+ def delete_sql(sql, name = nil)
+ update_sql(sql, name)
+ end
+
+ def sql_for_insert(sql, pk, id_value, sequence_name, binds)
+ [sql, binds]
+ end
+
+ def last_inserted_id(result)
+ row = result.rows.first
+ row && row.first
+ end
+
+ def binds_from_relation(relation, binds)
+ if relation.is_a?(Relation) && binds.empty?
+ relation, binds = relation.arel, relation.bind_values
+ end
+ [relation, binds]
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb b/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb
new file mode 100644
index 0000000000..4a4506c7f5
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/query_cache.rb
@@ -0,0 +1,95 @@
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ module QueryCache
+ class << self
+ def included(base) #:nodoc:
+ dirties_query_cache base, :insert, :update, :delete
+ end
+
+ def dirties_query_cache(base, *method_names)
+ method_names.each do |method_name|
+ base.class_eval <<-end_code, __FILE__, __LINE__ + 1
+ def #{method_name}(*)
+ clear_query_cache if @query_cache_enabled
+ super
+ end
+ end_code
+ end
+ end
+ end
+
+ attr_reader :query_cache, :query_cache_enabled
+
+ def initialize(*)
+ super
+ @query_cache = Hash.new { |h,sql| h[sql] = {} }
+ @query_cache_enabled = false
+ end
+
+ # Enable the query cache within the block.
+ def cache
+ old, @query_cache_enabled = @query_cache_enabled, true
+ yield
+ ensure
+ @query_cache_enabled = old
+ clear_query_cache unless @query_cache_enabled
+ end
+
+ def enable_query_cache!
+ @query_cache_enabled = true
+ end
+
+ def disable_query_cache!
+ @query_cache_enabled = false
+ end
+
+ # Disable the query cache within the block.
+ def uncached
+ old, @query_cache_enabled = @query_cache_enabled, false
+ yield
+ ensure
+ @query_cache_enabled = old
+ end
+
+ # Clears the query cache.
+ #
+ # One reason you may wish to call this method explicitly is between queries
+ # that ask the database to randomize results. Otherwise the cache would see
+ # the same SQL query and repeatedly return the same result each time, silently
+ # undermining the randomness you were expecting.
+ def clear_query_cache
+ @query_cache.clear
+ end
+
+ def select_all(arel, name = nil, binds = [])
+ if @query_cache_enabled && !locked?(arel)
+ arel, binds = binds_from_relation arel, binds
+ sql = to_sql(arel, binds)
+ cache_sql(sql, binds) { super(sql, name, binds) }
+ else
+ super
+ end
+ end
+
+ private
+
+ def cache_sql(sql, binds)
+ result =
+ if @query_cache[sql].key?(binds)
+ ActiveSupport::Notifications.instrument("sql.active_record",
+ :sql => sql, :binds => binds, :name => "CACHE", :connection_id => object_id)
+ @query_cache[sql][binds]
+ else
+ @query_cache[sql][binds] = yield
+ end
+ result.dup
+ end
+
+ # If arel is locked this is a SELECT ... FOR UPDATE or somesuch. Such
+ # queries should not be cached.
+ def locked?(arel)
+ arel.respond_to?(:locked) && arel.locked
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb b/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb
new file mode 100644
index 0000000000..eb88845913
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/quoting.rb
@@ -0,0 +1,133 @@
+require 'active_support/core_ext/big_decimal/conversions'
+
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ module Quoting
+ # Quotes the column value to help prevent
+ # {SQL injection attacks}[http://en.wikipedia.org/wiki/SQL_injection].
+ def quote(value, column = nil)
+ # records are quoted as their primary key
+ return value.quoted_id if value.respond_to?(:quoted_id)
+
+ if column
+ value = column.cast_type.type_cast_for_database(value)
+ end
+
+ _quote(value)
+ end
+
+ # Cast a +value+ to a type that the database understands. For example,
+ # SQLite does not understand dates, so this method will convert a Date
+ # to a String.
+ def type_cast(value, column)
+ if value.respond_to?(:quoted_id) && value.respond_to?(:id)
+ return value.id
+ end
+
+ if column
+ value = column.cast_type.type_cast_for_database(value)
+ end
+
+ _type_cast(value)
+ rescue TypeError
+ to_type = column ? " to #{column.type}" : ""
+ raise TypeError, "can't cast #{value.class}#{to_type}"
+ end
+
+ # Quotes a string, escaping any ' (single quote) and \ (backslash)
+ # characters.
+ def quote_string(s)
+ s.gsub(/\\/, '\&\&').gsub(/'/, "''") # ' (for ruby-mode)
+ end
+
+ # Quotes the column name. Defaults to no quoting.
+ def quote_column_name(column_name)
+ column_name
+ end
+
+ # Quotes the table name. Defaults to column name quoting.
+ def quote_table_name(table_name)
+ quote_column_name(table_name)
+ end
+
+ # Override to return the quoted table name for assignment. Defaults to
+ # table quoting.
+ #
+ # This works for mysql and mysql2 where table.column can be used to
+ # resolve ambiguity.
+ #
+ # We override this in the sqlite3 and postgresql adapters to use only
+ # the column name (as per syntax requirements).
+ def quote_table_name_for_assignment(table, attr)
+ quote_table_name("#{table}.#{attr}")
+ end
+
+ def quoted_true
+ "'t'"
+ end
+
+ def unquoted_true
+ 't'
+ end
+
+ def quoted_false
+ "'f'"
+ end
+
+ def unquoted_false
+ 'f'
+ end
+
+ def quoted_date(value)
+ if value.acts_like?(:time)
+ zone_conversion_method = ActiveRecord::Base.default_timezone == :utc ? :getutc : :getlocal
+
+ if value.respond_to?(zone_conversion_method)
+ value = value.send(zone_conversion_method)
+ end
+ end
+
+ value.to_s(:db)
+ end
+
+ private
+
+ def types_which_need_no_typecasting
+ [nil, Numeric, String]
+ end
+
+ def _quote(value)
+ case value
+ when String, ActiveSupport::Multibyte::Chars, Type::Binary::Data
+ "'#{quote_string(value.to_s)}'"
+ when true then quoted_true
+ when false then quoted_false
+ when nil then "NULL"
+ # BigDecimals need to be put in a non-normalized form and quoted.
+ when BigDecimal then value.to_s('F')
+ when Numeric, ActiveSupport::Duration then value.to_s
+ when Date, Time then "'#{quoted_date(value)}'"
+ when Symbol then "'#{quote_string(value.to_s)}'"
+ when Class then "'#{value.to_s}'"
+ else
+ "'#{quote_string(YAML.dump(value))}'"
+ end
+ end
+
+ def _type_cast(value)
+ case value
+ when Symbol, ActiveSupport::Multibyte::Chars, Type::Binary::Data
+ value.to_s
+ when true then unquoted_true
+ when false then unquoted_false
+ # BigDecimals need to be put in a non-normalized form and quoted.
+ when BigDecimal then value.to_s('F')
+ when Date, Time then quoted_date(value)
+ when *types_which_need_no_typecasting
+ value
+ else raise TypeError
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/savepoints.rb b/activerecord/lib/active_record/connection_adapters/abstract/savepoints.rb
new file mode 100644
index 0000000000..25c17ce971
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/savepoints.rb
@@ -0,0 +1,21 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module Savepoints #:nodoc:
+ def supports_savepoints?
+ true
+ end
+
+ def create_savepoint(name = current_savepoint_name)
+ execute("SAVEPOINT #{name}")
+ end
+
+ def rollback_to_savepoint(name = current_savepoint_name)
+ execute("ROLLBACK TO SAVEPOINT #{name}")
+ end
+
+ def release_savepoint(name = current_savepoint_name)
+ execute("RELEASE SAVEPOINT #{name}")
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb
new file mode 100644
index 0000000000..adad6cd542
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb
@@ -0,0 +1,125 @@
+module ActiveRecord
+ module ConnectionAdapters
+ class AbstractAdapter
+ class SchemaCreation # :nodoc:
+ def initialize(conn)
+ @conn = conn
+ @cache = {}
+ end
+
+ def accept(o)
+ m = @cache[o.class] ||= "visit_#{o.class.name.split('::').last}"
+ send m, o
+ end
+
+ def visit_AddColumn(o)
+ sql_type = type_to_sql(o.type, o.limit, o.precision, o.scale)
+ sql = "ADD #{quote_column_name(o.name)} #{sql_type}"
+ add_column_options!(sql, column_options(o))
+ end
+
+ private
+
+ def visit_AlterTable(o)
+ sql = "ALTER TABLE #{quote_table_name(o.name)} "
+ sql << o.adds.map { |col| visit_AddColumn col }.join(' ')
+ sql << o.foreign_key_adds.map { |fk| visit_AddForeignKey fk }.join(' ')
+ sql << o.foreign_key_drops.map { |fk| visit_DropForeignKey fk }.join(' ')
+ end
+
+ def visit_ColumnDefinition(o)
+ sql_type = type_to_sql(o.type, o.limit, o.precision, o.scale)
+ column_sql = "#{quote_column_name(o.name)} #{sql_type}"
+ add_column_options!(column_sql, column_options(o)) unless o.primary_key?
+ column_sql
+ end
+
+ def visit_TableDefinition(o)
+ create_sql = "CREATE#{' TEMPORARY' if o.temporary} TABLE "
+ create_sql << "#{quote_table_name(o.name)} "
+ create_sql << "(#{o.columns.map { |c| accept c }.join(', ')}) " unless o.as
+ create_sql << "#{o.options}"
+ create_sql << " AS #{@conn.to_sql(o.as)}" if o.as
+ create_sql
+ end
+
+ def visit_AddForeignKey(o)
+ sql = <<-SQL.strip_heredoc
+ ADD CONSTRAINT #{quote_column_name(o.name)}
+ FOREIGN KEY (#{quote_column_name(o.column)})
+ REFERENCES #{quote_table_name(o.to_table)} (#{quote_column_name(o.primary_key)})
+ SQL
+ sql << " #{action_sql('DELETE', o.on_delete)}" if o.on_delete
+ sql << " #{action_sql('UPDATE', o.on_update)}" if o.on_update
+ sql
+ end
+
+ def visit_DropForeignKey(name)
+ "DROP CONSTRAINT #{quote_column_name(name)}"
+ end
+
+ def column_options(o)
+ column_options = {}
+ column_options[:null] = o.null unless o.null.nil?
+ column_options[:default] = o.default unless o.default.nil?
+ column_options[:column] = o
+ column_options[:first] = o.first
+ column_options[:after] = o.after
+ column_options
+ end
+
+ def quote_column_name(name)
+ @conn.quote_column_name name
+ end
+
+ def quote_table_name(name)
+ @conn.quote_table_name name
+ end
+
+ def type_to_sql(type, limit, precision, scale)
+ @conn.type_to_sql type.to_sym, limit, precision, scale
+ end
+
+ def add_column_options!(sql, options)
+ sql << " DEFAULT #{quote_value(options[:default], options[:column])}" if options_include_default?(options)
+ # must explicitly check for :null to allow change_column to work on migrations
+ if options[:null] == false
+ sql << " NOT NULL"
+ end
+ if options[:auto_increment] == true
+ sql << " AUTO_INCREMENT"
+ end
+ sql
+ end
+
+ def quote_value(value, column)
+ column.sql_type ||= type_to_sql(column.type, column.limit, column.precision, column.scale)
+ column.cast_type ||= type_for_column(column)
+
+ @conn.quote(value, column)
+ end
+
+ def options_include_default?(options)
+ options.include?(:default) && !(options[:null] == false && options[:default].nil?)
+ end
+
+ def action_sql(action, dependency)
+ case dependency
+ when :nullify then "ON #{action} SET NULL"
+ when :cascade then "ON #{action} CASCADE"
+ when :restrict then "ON #{action} RESTRICT"
+ else
+ raise ArgumentError, <<-MSG.strip_heredoc
+ '#{dependency}' is not supported for :on_update or :on_delete.
+ Supported values are: :nullify, :cascade, :restrict
+ MSG
+ end
+ end
+
+ def type_for_column(column)
+ @conn.lookup_cast_type(column.sql_type)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
new file mode 100644
index 0000000000..e44ccb7d81
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_definitions.rb
@@ -0,0 +1,564 @@
+require 'date'
+require 'set'
+require 'bigdecimal'
+require 'bigdecimal/util'
+
+module ActiveRecord
+ module ConnectionAdapters #:nodoc:
+ # Abstract representation of an index definition on a table. Instances of
+ # this type are typically created and returned by methods in database
+ # adapters. e.g. ActiveRecord::ConnectionAdapters::AbstractMysqlAdapter#indexes
+ class IndexDefinition < Struct.new(:table, :name, :unique, :columns, :lengths, :orders, :where, :type, :using) #:nodoc:
+ end
+
+ # Abstract representation of a column definition. Instances of this type
+ # are typically created by methods in TableDefinition, and added to the
+ # +columns+ attribute of said TableDefinition object, in order to be used
+ # for generating a number of table creation or table changing SQL statements.
+ class ColumnDefinition < Struct.new(:name, :type, :limit, :precision, :scale, :default, :null, :first, :after, :primary_key, :sql_type, :cast_type) #:nodoc:
+
+ def primary_key?
+ primary_key || type.to_sym == :primary_key
+ end
+ end
+
+ class ChangeColumnDefinition < Struct.new(:column, :type, :options) #:nodoc:
+ end
+
+ class ForeignKeyDefinition < Struct.new(:from_table, :to_table, :options) #:nodoc:
+ def name
+ options[:name]
+ end
+
+ def column
+ options[:column]
+ end
+
+ def primary_key
+ options[:primary_key] || default_primary_key
+ end
+
+ def on_delete
+ options[:on_delete]
+ end
+
+ def on_update
+ options[:on_update]
+ end
+
+ def custom_primary_key?
+ options[:primary_key] != default_primary_key
+ end
+
+ private
+ def default_primary_key
+ "id"
+ end
+ end
+
+ # Represents the schema of an SQL table in an abstract way. This class
+ # provides methods for manipulating the schema representation.
+ #
+ # Inside migration files, the +t+ object in +create_table+
+ # is actually of this type:
+ #
+ # class SomeMigration < ActiveRecord::Migration
+ # def up
+ # create_table :foo do |t|
+ # puts t.class # => "ActiveRecord::ConnectionAdapters::TableDefinition"
+ # end
+ # end
+ #
+ # def down
+ # ...
+ # end
+ # end
+ #
+ # The table definitions
+ # The Columns are stored as a ColumnDefinition in the +columns+ attribute.
+ class TableDefinition
+ # An array of ColumnDefinition objects, representing the column changes
+ # that have been defined.
+ attr_accessor :indexes
+ attr_reader :name, :temporary, :options, :as
+
+ def initialize(types, name, temporary, options, as = nil)
+ @columns_hash = {}
+ @indexes = {}
+ @native = types
+ @temporary = temporary
+ @options = options
+ @as = as
+ @name = name
+ end
+
+ def columns; @columns_hash.values; end
+
+ # Appends a primary key definition to the table definition.
+ # Can be called multiple times, but this is probably not a good idea.
+ def primary_key(name, type = :primary_key, options = {})
+ column(name, type, options.merge(:primary_key => true))
+ end
+
+ # Returns a ColumnDefinition for the column with name +name+.
+ def [](name)
+ @columns_hash[name.to_s]
+ end
+
+ # Instantiates a new column for the table.
+ # The +type+ parameter is normally one of the migrations native types,
+ # which is one of the following:
+ # <tt>:primary_key</tt>, <tt>:string</tt>, <tt>:text</tt>,
+ # <tt>:integer</tt>, <tt>:float</tt>, <tt>:decimal</tt>,
+ # <tt>:datetime</tt>, <tt>:timestamp</tt>, <tt>:time</tt>,
+ # <tt>:date</tt>, <tt>:binary</tt>, <tt>:boolean</tt>.
+ #
+ # You may use a type not in this list as long as it is supported by your
+ # database (for example, "polygon" in MySQL), but this will not be database
+ # agnostic and should usually be avoided.
+ #
+ # Available options are (none of these exists by default):
+ # * <tt>:limit</tt> -
+ # Requests a maximum column length. This is number of characters for <tt>:string</tt> and
+ # <tt>:text</tt> columns and number of bytes for <tt>:binary</tt> and <tt>:integer</tt> columns.
+ # * <tt>:default</tt> -
+ # The column's default value. Use nil for NULL.
+ # * <tt>:null</tt> -
+ # Allows or disallows +NULL+ values in the column. This option could
+ # have been named <tt>:null_allowed</tt>.
+ # * <tt>:precision</tt> -
+ # Specifies the precision for a <tt>:decimal</tt> column.
+ # * <tt>:scale</tt> -
+ # Specifies the scale for a <tt>:decimal</tt> column.
+ # * <tt>:index</tt> -
+ # Create an index for the column. Can be either <tt>true</tt> or an options hash.
+ #
+ # Note: The precision is the total number of significant digits
+ # and the scale is the number of digits that can be stored following
+ # the decimal point. For example, the number 123.45 has a precision of 5
+ # and a scale of 2. A decimal with a precision of 5 and a scale of 2 can
+ # range from -999.99 to 999.99.
+ #
+ # Please be aware of different RDBMS implementations behavior with
+ # <tt>:decimal</tt> columns:
+ # * The SQL standard says the default scale should be 0, <tt>:scale</tt> <=
+ # <tt>:precision</tt>, and makes no comments about the requirements of
+ # <tt>:precision</tt>.
+ # * MySQL: <tt>:precision</tt> [1..63], <tt>:scale</tt> [0..30].
+ # Default is (10,0).
+ # * PostgreSQL: <tt>:precision</tt> [1..infinity],
+ # <tt>:scale</tt> [0..infinity]. No default.
+ # * SQLite2: Any <tt>:precision</tt> and <tt>:scale</tt> may be used.
+ # Internal storage as strings. No default.
+ # * SQLite3: No restrictions on <tt>:precision</tt> and <tt>:scale</tt>,
+ # but the maximum supported <tt>:precision</tt> is 16. No default.
+ # * Oracle: <tt>:precision</tt> [1..38], <tt>:scale</tt> [-84..127].
+ # Default is (38,0).
+ # * DB2: <tt>:precision</tt> [1..63], <tt>:scale</tt> [0..62].
+ # Default unknown.
+ # * SqlServer?: <tt>:precision</tt> [1..38], <tt>:scale</tt> [0..38].
+ # Default (38,0).
+ #
+ # This method returns <tt>self</tt>.
+ #
+ # == Examples
+ # # Assuming +td+ is an instance of TableDefinition
+ # td.column(:granted, :boolean)
+ # # granted BOOLEAN
+ #
+ # td.column(:picture, :binary, limit: 2.megabytes)
+ # # => picture BLOB(2097152)
+ #
+ # td.column(:sales_stage, :string, limit: 20, default: 'new', null: false)
+ # # => sales_stage VARCHAR(20) DEFAULT 'new' NOT NULL
+ #
+ # td.column(:bill_gates_money, :decimal, precision: 15, scale: 2)
+ # # => bill_gates_money DECIMAL(15,2)
+ #
+ # td.column(:sensor_reading, :decimal, precision: 30, scale: 20)
+ # # => sensor_reading DECIMAL(30,20)
+ #
+ # # While <tt>:scale</tt> defaults to zero on most databases, it
+ # # probably wouldn't hurt to include it.
+ # td.column(:huge_integer, :decimal, precision: 30)
+ # # => huge_integer DECIMAL(30)
+ #
+ # # Defines a column with a database-specific type.
+ # td.column(:foo, 'polygon')
+ # # => foo polygon
+ #
+ # == Short-hand examples
+ #
+ # Instead of calling +column+ directly, you can also work with the short-hand definitions for the default types.
+ # They use the type as the method name instead of as a parameter and allow for multiple columns to be defined
+ # in a single statement.
+ #
+ # What can be written like this with the regular calls to column:
+ #
+ # create_table :products do |t|
+ # t.column :shop_id, :integer
+ # t.column :creator_id, :integer
+ # t.column :item_number, :string
+ # t.column :name, :string, default: "Untitled"
+ # t.column :value, :string, default: "Untitled"
+ # t.column :created_at, :datetime
+ # t.column :updated_at, :datetime
+ # end
+ # add_index :products, :item_number
+ #
+ # can also be written as follows using the short-hand:
+ #
+ # create_table :products do |t|
+ # t.integer :shop_id, :creator_id
+ # t.string :item_number, index: true
+ # t.string :name, :value, default: "Untitled"
+ # t.timestamps
+ # end
+ #
+ # There's a short-hand method for each of the type values declared at the top. And then there's
+ # TableDefinition#timestamps that'll add +created_at+ and +updated_at+ as datetimes.
+ #
+ # TableDefinition#references will add an appropriately-named _id column, plus a corresponding _type
+ # column if the <tt>:polymorphic</tt> option is supplied. If <tt>:polymorphic</tt> is a hash of
+ # options, these will be used when creating the <tt>_type</tt> column. The <tt>:index</tt> option
+ # will also create an index, similar to calling <tt>add_index</tt>. So what can be written like this:
+ #
+ # create_table :taggings do |t|
+ # t.integer :tag_id, :tagger_id, :taggable_id
+ # t.string :tagger_type
+ # t.string :taggable_type, default: 'Photo'
+ # end
+ # add_index :taggings, :tag_id, name: 'index_taggings_on_tag_id'
+ # add_index :taggings, [:tagger_id, :tagger_type]
+ #
+ # Can also be written as follows using references:
+ #
+ # create_table :taggings do |t|
+ # t.references :tag, index: { name: 'index_taggings_on_tag_id' }
+ # t.references :tagger, polymorphic: true, index: true
+ # t.references :taggable, polymorphic: { default: 'Photo' }
+ # end
+ def column(name, type, options = {})
+ name = name.to_s
+ type = type.to_sym
+
+ if primary_key_column_name == name
+ raise ArgumentError, "you can't redefine the primary key column '#{name}'. To define a custom primary key, pass { id: false } to create_table."
+ end
+
+ index_options = options.delete(:index)
+ index(name, index_options.is_a?(Hash) ? index_options : {}) if index_options
+ @columns_hash[name] = new_column_definition(name, type, options)
+ self
+ end
+
+ def remove_column(name)
+ @columns_hash.delete name.to_s
+ end
+
+ [:string, :text, :integer, :float, :decimal, :datetime, :timestamp, :time, :date, :binary, :boolean].each do |column_type|
+ define_method column_type do |*args|
+ options = args.extract_options!
+ column_names = args
+ column_names.each { |name| column(name, column_type, options) }
+ end
+ end
+
+ # Adds index options to the indexes hash, keyed by column name
+ # This is primarily used to track indexes that need to be created after the table
+ #
+ # index(:account_id, name: 'index_projects_on_account_id')
+ def index(column_name, options = {})
+ indexes[column_name] = options
+ end
+
+ # Appends <tt>:datetime</tt> columns <tt>:created_at</tt> and
+ # <tt>:updated_at</tt> to the table.
+ def timestamps(*args)
+ options = args.extract_options!
+ column(:created_at, :datetime, options)
+ column(:updated_at, :datetime, options)
+ end
+
+ # Adds a reference. Optionally adds a +type+ column, if <tt>:polymorphic</tt> option is provided.
+ # <tt>references</tt> and <tt>belongs_to</tt> are acceptable. The reference column will be an +integer+
+ # by default, the <tt>:type</tt> option can be used to specify a different type.
+ #
+ # t.references(:user)
+ # t.references(:user, type: "string")
+ # t.belongs_to(:supplier, polymorphic: true)
+ #
+ # See SchemaStatements#add_reference
+ def references(*args)
+ options = args.extract_options!
+ polymorphic = options.delete(:polymorphic)
+ index_options = options.delete(:index)
+ type = options.delete(:type) || :integer
+ args.each do |col|
+ column("#{col}_id", type, options)
+ column("#{col}_type", :string, polymorphic.is_a?(Hash) ? polymorphic : options) if polymorphic
+ index(polymorphic ? %w(id type).map { |t| "#{col}_#{t}" } : "#{col}_id", index_options.is_a?(Hash) ? index_options : {}) if index_options
+ end
+ end
+ alias :belongs_to :references
+
+ def new_column_definition(name, type, options) # :nodoc:
+ type = aliased_types[type] || type
+ column = create_column_definition name, type
+ limit = options.fetch(:limit) do
+ native[type][:limit] if native[type].is_a?(Hash)
+ end
+
+ column.limit = limit
+ column.array = options[:array] if column.respond_to?(:array)
+ column.precision = options[:precision]
+ column.scale = options[:scale]
+ column.default = options[:default]
+ column.null = options[:null]
+ column.first = options[:first]
+ column.after = options[:after]
+ column.primary_key = type == :primary_key || options[:primary_key]
+ column
+ end
+
+ private
+ def create_column_definition(name, type)
+ ColumnDefinition.new name, type
+ end
+
+ def primary_key_column_name
+ primary_key_column = columns.detect { |c| c.primary_key? }
+ primary_key_column && primary_key_column.name
+ end
+
+ def native
+ @native
+ end
+
+ def aliased_types
+ HashWithIndifferentAccess.new(
+ timestamp: :datetime,
+ )
+ end
+ end
+
+ class AlterTable # :nodoc:
+ attr_reader :adds
+ attr_reader :foreign_key_adds
+ attr_reader :foreign_key_drops
+
+ def initialize(td)
+ @td = td
+ @adds = []
+ @foreign_key_adds = []
+ @foreign_key_drops = []
+ end
+
+ def name; @td.name; end
+
+ def add_foreign_key(to_table, options)
+ @foreign_key_adds << ForeignKeyDefinition.new(name, to_table, options)
+ end
+
+ def drop_foreign_key(name)
+ @foreign_key_drops << name
+ end
+
+ def add_column(name, type, options)
+ name = name.to_s
+ type = type.to_sym
+ @adds << @td.new_column_definition(name, type, options)
+ end
+ end
+
+ # Represents an SQL table in an abstract way for updating a table.
+ # Also see TableDefinition and SchemaStatements#create_table
+ #
+ # Available transformations are:
+ #
+ # change_table :table do |t|
+ # t.column
+ # t.index
+ # t.rename_index
+ # t.timestamps
+ # t.change
+ # t.change_default
+ # t.rename
+ # t.references
+ # t.belongs_to
+ # t.string
+ # t.text
+ # t.integer
+ # t.float
+ # t.decimal
+ # t.datetime
+ # t.timestamp
+ # t.time
+ # t.date
+ # t.binary
+ # t.boolean
+ # t.remove
+ # t.remove_references
+ # t.remove_belongs_to
+ # t.remove_index
+ # t.remove_timestamps
+ # end
+ #
+ class Table
+ def initialize(table_name, base)
+ @table_name = table_name
+ @base = base
+ end
+
+ # Adds a new column to the named table.
+ # See TableDefinition#column for details of the options you can use.
+ #
+ # ====== Creating a simple column
+ # t.column(:name, :string)
+ def column(column_name, type, options = {})
+ @base.add_column(@table_name, column_name, type, options)
+ end
+
+ # Checks to see if a column exists. See SchemaStatements#column_exists?
+ def column_exists?(column_name, type = nil, options = {})
+ @base.column_exists?(@table_name, column_name, type, options)
+ end
+
+ # Adds a new index to the table. +column_name+ can be a single Symbol, or
+ # an Array of Symbols. See SchemaStatements#add_index
+ #
+ # ====== Creating a simple index
+ # t.index(:name)
+ # ====== Creating a unique index
+ # t.index([:branch_id, :party_id], unique: true)
+ # ====== Creating a named index
+ # t.index([:branch_id, :party_id], unique: true, name: 'by_branch_party')
+ def index(column_name, options = {})
+ @base.add_index(@table_name, column_name, options)
+ end
+
+ # Checks to see if an index exists. See SchemaStatements#index_exists?
+ def index_exists?(column_name, options = {})
+ @base.index_exists?(@table_name, column_name, options)
+ end
+
+ # Renames the given index on the table.
+ #
+ # t.rename_index(:user_id, :account_id)
+ def rename_index(index_name, new_index_name)
+ @base.rename_index(@table_name, index_name, new_index_name)
+ end
+
+ # Adds timestamps (+created_at+ and +updated_at+) columns to the table. See SchemaStatements#add_timestamps
+ #
+ # t.timestamps
+ def timestamps
+ @base.add_timestamps(@table_name)
+ end
+
+ # Changes the column's definition according to the new options.
+ # See TableDefinition#column for details of the options you can use.
+ #
+ # t.change(:name, :string, limit: 80)
+ # t.change(:description, :text)
+ def change(column_name, type, options = {})
+ @base.change_column(@table_name, column_name, type, options)
+ end
+
+ # Sets a new default value for a column. See SchemaStatements#change_column_default
+ #
+ # t.change_default(:qualification, 'new')
+ # t.change_default(:authorized, 1)
+ def change_default(column_name, default)
+ @base.change_column_default(@table_name, column_name, default)
+ end
+
+ # Removes the column(s) from the table definition.
+ #
+ # t.remove(:qualification)
+ # t.remove(:qualification, :experience)
+ def remove(*column_names)
+ @base.remove_columns(@table_name, *column_names)
+ end
+
+ # Removes the given index from the table.
+ #
+ # ====== Remove the index_table_name_on_column in the table_name table
+ # t.remove_index :column
+ # ====== Remove the index named index_table_name_on_branch_id in the table_name table
+ # t.remove_index column: :branch_id
+ # ====== Remove the index named index_table_name_on_branch_id_and_party_id in the table_name table
+ # t.remove_index column: [:branch_id, :party_id]
+ # ====== Remove the index named by_branch_party in the table_name table
+ # t.remove_index name: :by_branch_party
+ def remove_index(options = {})
+ @base.remove_index(@table_name, options)
+ end
+
+ # Removes the timestamp columns (+created_at+ and +updated_at+) from the table.
+ #
+ # t.remove_timestamps
+ def remove_timestamps
+ @base.remove_timestamps(@table_name)
+ end
+
+ # Renames a column.
+ #
+ # t.rename(:description, :name)
+ def rename(column_name, new_column_name)
+ @base.rename_column(@table_name, column_name, new_column_name)
+ end
+
+ # Adds a reference. Optionally adds a +type+ column, if <tt>:polymorphic</tt> option is provided.
+ # <tt>references</tt> and <tt>belongs_to</tt> are acceptable. The reference column will be an +integer+
+ # by default, the <tt>:type</tt> option can be used to specify a different type.
+ #
+ # t.references(:user)
+ # t.references(:user, type: "string")
+ # t.belongs_to(:supplier, polymorphic: true)
+ #
+ # See SchemaStatements#add_reference
+ def references(*args)
+ options = args.extract_options!
+ args.each do |ref_name|
+ @base.add_reference(@table_name, ref_name, options)
+ end
+ end
+ alias :belongs_to :references
+
+ # Removes a reference. Optionally removes a +type+ column.
+ # <tt>remove_references</tt> and <tt>remove_belongs_to</tt> are acceptable.
+ #
+ # t.remove_references(:user)
+ # t.remove_belongs_to(:supplier, polymorphic: true)
+ #
+ # See SchemaStatements#remove_reference
+ def remove_references(*args)
+ options = args.extract_options!
+ args.each do |ref_name|
+ @base.remove_reference(@table_name, ref_name, options)
+ end
+ end
+ alias :remove_belongs_to :remove_references
+
+ # Adds a column or columns of a specified type
+ #
+ # t.string(:goat)
+ # t.string(:goat, :sheep)
+ [:string, :text, :integer, :float, :decimal, :datetime, :timestamp, :time, :date, :binary, :boolean].each do |column_type|
+ define_method column_type do |*args|
+ options = args.extract_options!
+ args.each do |name|
+ @base.add_column(@table_name, name, column_type, options)
+ end
+ end
+ end
+
+ private
+ def native
+ @base.native_database_types
+ end
+ end
+
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb
new file mode 100644
index 0000000000..9bd0401e40
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_dumper.rb
@@ -0,0 +1,46 @@
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ # The goal of this module is to move Adapter specific column
+ # definitions to the Adapter instead of having it in the schema
+ # dumper itself. This code represents the normal case.
+ # We can then redefine how certain data types may be handled in the schema dumper on the
+ # Adapter level by over-writing this code inside the database specific adapters
+ module ColumnDumper
+ def column_spec(column, types)
+ spec = prepare_column_options(column, types)
+ (spec.keys - [:name, :type]).each{ |k| spec[k].insert(0, "#{k.to_s}: ")}
+ spec
+ end
+
+ # This can be overridden on a Adapter level basis to support other
+ # extended datatypes (Example: Adding an array option in the
+ # PostgreSQLAdapter)
+ def prepare_column_options(column, types)
+ spec = {}
+ spec[:name] = column.name.inspect
+ spec[:type] = column.type.to_s
+ spec[:limit] = column.limit.inspect if column.limit != types[column.type][:limit]
+ spec[:precision] = column.precision.inspect if column.precision
+ spec[:scale] = column.scale.inspect if column.scale
+ spec[:null] = 'false' unless column.null
+ spec[:default] = schema_default(column) if column.has_default?
+ spec.delete(:default) if spec[:default].nil?
+ spec
+ end
+
+ # Lists the valid migration options
+ def migration_keys
+ [:name, :limit, :precision, :scale, :default, :null]
+ end
+
+ private
+
+ def schema_default(column)
+ default = column.type_cast_from_database(column.default)
+ unless default.nil?
+ column.type_cast_for_schema(default)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
new file mode 100644
index 0000000000..10753defc2
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
@@ -0,0 +1,979 @@
+require 'active_record/migration/join_table'
+
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ module SchemaStatements
+ include ActiveRecord::Migration::JoinTable
+
+ # Returns a hash of mappings from the abstract data types to the native
+ # database types. See TableDefinition#column for details on the recognized
+ # abstract data types.
+ def native_database_types
+ {}
+ end
+
+ # Truncates a table alias according to the limits of the current adapter.
+ def table_alias_for(table_name)
+ table_name[0...table_alias_length].tr('.', '_')
+ end
+
+ # Checks to see if the table +table_name+ exists on the database.
+ #
+ # table_exists?(:developers)
+ #
+ def table_exists?(table_name)
+ tables.include?(table_name.to_s)
+ end
+
+ # Returns an array of indexes for the given table.
+ # def indexes(table_name, name = nil) end
+
+ # Checks to see if an index exists on a table for a given index definition.
+ #
+ # # Check an index exists
+ # index_exists?(:suppliers, :company_id)
+ #
+ # # Check an index on multiple columns exists
+ # index_exists?(:suppliers, [:company_id, :company_type])
+ #
+ # # Check a unique index exists
+ # index_exists?(:suppliers, :company_id, unique: true)
+ #
+ # # Check an index with a custom name exists
+ # index_exists?(:suppliers, :company_id, name: "idx_company_id")
+ #
+ def index_exists?(table_name, column_name, options = {})
+ column_names = Array(column_name)
+ index_name = options.key?(:name) ? options[:name].to_s : index_name(table_name, :column => column_names)
+ if options[:unique]
+ indexes(table_name).any?{ |i| i.unique && i.name == index_name }
+ else
+ indexes(table_name).any?{ |i| i.name == index_name }
+ end
+ end
+
+ # Returns an array of Column objects for the table specified by +table_name+.
+ # See the concrete implementation for details on the expected parameter values.
+ def columns(table_name) end
+
+ # Checks to see if a column exists in a given table.
+ #
+ # # Check a column exists
+ # column_exists?(:suppliers, :name)
+ #
+ # # Check a column exists of a particular type
+ # column_exists?(:suppliers, :name, :string)
+ #
+ # # Check a column exists with a specific definition
+ # column_exists?(:suppliers, :name, :string, limit: 100)
+ # column_exists?(:suppliers, :name, :string, default: 'default')
+ # column_exists?(:suppliers, :name, :string, null: false)
+ # column_exists?(:suppliers, :tax, :decimal, precision: 8, scale: 2)
+ #
+ def column_exists?(table_name, column_name, type = nil, options = {})
+ column_name = column_name.to_s
+ columns(table_name).any?{ |c| c.name == column_name &&
+ (!type || c.type == type) &&
+ (!options.key?(:limit) || c.limit == options[:limit]) &&
+ (!options.key?(:precision) || c.precision == options[:precision]) &&
+ (!options.key?(:scale) || c.scale == options[:scale]) &&
+ (!options.key?(:default) || c.default == options[:default]) &&
+ (!options.key?(:null) || c.null == options[:null]) }
+ end
+
+ # Creates a new table with the name +table_name+. +table_name+ may either
+ # be a String or a Symbol.
+ #
+ # There are two ways to work with +create_table+. You can use the block
+ # form or the regular form, like this:
+ #
+ # === Block form
+ #
+ # # create_table() passes a TableDefinition object to the block.
+ # # This form will not only create the table, but also columns for the
+ # # table.
+ #
+ # create_table(:suppliers) do |t|
+ # t.column :name, :string, limit: 60
+ # # Other fields here
+ # end
+ #
+ # === Block form, with shorthand
+ #
+ # # You can also use the column types as method calls, rather than calling the column method.
+ # create_table(:suppliers) do |t|
+ # t.string :name, limit: 60
+ # # Other fields here
+ # end
+ #
+ # === Regular form
+ #
+ # # Creates a table called 'suppliers' with no columns.
+ # create_table(:suppliers)
+ # # Add a column to 'suppliers'.
+ # add_column(:suppliers, :name, :string, {limit: 60})
+ #
+ # The +options+ hash can include the following keys:
+ # [<tt>:id</tt>]
+ # Whether to automatically add a primary key column. Defaults to true.
+ # Join tables for +has_and_belongs_to_many+ should set it to false.
+ # [<tt>:primary_key</tt>]
+ # The name of the primary key, if one is to be added automatically.
+ # Defaults to +id+. If <tt>:id</tt> is false this option is ignored.
+ #
+ # Note that Active Record models will automatically detect their
+ # primary key. This can be avoided by using +self.primary_key=+ on the model
+ # to define the key explicitly.
+ #
+ # [<tt>:options</tt>]
+ # Any extra options you want appended to the table definition.
+ # [<tt>:temporary</tt>]
+ # Make a temporary table.
+ # [<tt>:force</tt>]
+ # Set to true to drop the table before creating it.
+ # Defaults to false.
+ # [<tt>:as</tt>]
+ # SQL to use to generate the table. When this option is used, the block is
+ # ignored, as are the <tt>:id</tt> and <tt>:primary_key</tt> options.
+ #
+ # ====== Add a backend specific option to the generated SQL (MySQL)
+ #
+ # create_table(:suppliers, options: 'ENGINE=InnoDB DEFAULT CHARSET=utf8')
+ #
+ # generates:
+ #
+ # CREATE TABLE suppliers (
+ # id int(11) DEFAULT NULL auto_increment PRIMARY KEY
+ # ) ENGINE=InnoDB DEFAULT CHARSET=utf8
+ #
+ # ====== Rename the primary key column
+ #
+ # create_table(:objects, primary_key: 'guid') do |t|
+ # t.column :name, :string, limit: 80
+ # end
+ #
+ # generates:
+ #
+ # CREATE TABLE objects (
+ # guid int(11) DEFAULT NULL auto_increment PRIMARY KEY,
+ # name varchar(80)
+ # )
+ #
+ # ====== Do not add a primary key column
+ #
+ # create_table(:categories_suppliers, id: false) do |t|
+ # t.column :category_id, :integer
+ # t.column :supplier_id, :integer
+ # end
+ #
+ # generates:
+ #
+ # CREATE TABLE categories_suppliers (
+ # category_id int,
+ # supplier_id int
+ # )
+ #
+ # ====== Create a temporary table based on a query
+ #
+ # create_table(:long_query, temporary: true,
+ # as: "SELECT * FROM orders INNER JOIN line_items ON order_id=orders.id")
+ #
+ # generates:
+ #
+ # CREATE TEMPORARY TABLE long_query AS
+ # SELECT * FROM orders INNER JOIN line_items ON order_id=orders.id
+ #
+ # See also TableDefinition#column for details on how to create columns.
+ def create_table(table_name, options = {})
+ td = create_table_definition table_name, options[:temporary], options[:options], options[:as]
+
+ if options[:id] != false && !options[:as]
+ pk = options.fetch(:primary_key) do
+ Base.get_primary_key table_name.to_s.singularize
+ end
+
+ td.primary_key pk, options.fetch(:id, :primary_key), options
+ end
+
+ yield td if block_given?
+
+ if options[:force] && table_exists?(table_name)
+ drop_table(table_name, options)
+ end
+
+ result = execute schema_creation.accept td
+ td.indexes.each_pair { |c, o| add_index(table_name, c, o) } unless supports_indexes_in_create?
+ result
+ end
+
+ # Creates a new join table with the name created using the lexical order of the first two
+ # arguments. These arguments can be a String or a Symbol.
+ #
+ # # Creates a table called 'assemblies_parts' with no id.
+ # create_join_table(:assemblies, :parts)
+ #
+ # You can pass a +options+ hash can include the following keys:
+ # [<tt>:table_name</tt>]
+ # Sets the table name overriding the default
+ # [<tt>:column_options</tt>]
+ # Any extra options you want appended to the columns definition.
+ # [<tt>:options</tt>]
+ # Any extra options you want appended to the table definition.
+ # [<tt>:temporary</tt>]
+ # Make a temporary table.
+ # [<tt>:force</tt>]
+ # Set to true to drop the table before creating it.
+ # Defaults to false.
+ #
+ # Note that +create_join_table+ does not create any indices by default; you can use
+ # its block form to do so yourself:
+ #
+ # create_join_table :products, :categories do |t|
+ # t.index :product_id
+ # t.index :category_id
+ # end
+ #
+ # ====== Add a backend specific option to the generated SQL (MySQL)
+ #
+ # create_join_table(:assemblies, :parts, options: 'ENGINE=InnoDB DEFAULT CHARSET=utf8')
+ #
+ # generates:
+ #
+ # CREATE TABLE assemblies_parts (
+ # assembly_id int NOT NULL,
+ # part_id int NOT NULL,
+ # ) ENGINE=InnoDB DEFAULT CHARSET=utf8
+ #
+ def create_join_table(table_1, table_2, options = {})
+ join_table_name = find_join_table_name(table_1, table_2, options)
+
+ column_options = options.delete(:column_options) || {}
+ column_options.reverse_merge!(null: false)
+
+ t1_column, t2_column = [table_1, table_2].map{ |t| t.to_s.singularize.foreign_key }
+
+ create_table(join_table_name, options.merge!(id: false)) do |td|
+ td.integer t1_column, column_options
+ td.integer t2_column, column_options
+ yield td if block_given?
+ end
+ end
+
+ # Drops the join table specified by the given arguments.
+ # See +create_join_table+ for details.
+ #
+ # Although this command ignores the block if one is given, it can be helpful
+ # to provide one in a migration's +change+ method so it can be reverted.
+ # In that case, the block will be used by create_join_table.
+ def drop_join_table(table_1, table_2, options = {})
+ join_table_name = find_join_table_name(table_1, table_2, options)
+ drop_table(join_table_name)
+ end
+
+ # A block for changing columns in +table+.
+ #
+ # # change_table() yields a Table instance
+ # change_table(:suppliers) do |t|
+ # t.column :name, :string, limit: 60
+ # # Other column alterations here
+ # end
+ #
+ # The +options+ hash can include the following keys:
+ # [<tt>:bulk</tt>]
+ # Set this to true to make this a bulk alter query, such as
+ #
+ # ALTER TABLE `users` ADD COLUMN age INT(11), ADD COLUMN birthdate DATETIME ...
+ #
+ # Defaults to false.
+ #
+ # ====== Add a column
+ #
+ # change_table(:suppliers) do |t|
+ # t.column :name, :string, limit: 60
+ # end
+ #
+ # ====== Add 2 integer columns
+ #
+ # change_table(:suppliers) do |t|
+ # t.integer :width, :height, null: false, default: 0
+ # end
+ #
+ # ====== Add created_at/updated_at columns
+ #
+ # change_table(:suppliers) do |t|
+ # t.timestamps
+ # end
+ #
+ # ====== Add a foreign key column
+ #
+ # change_table(:suppliers) do |t|
+ # t.references :company
+ # end
+ #
+ # Creates a <tt>company_id(integer)</tt> column.
+ #
+ # ====== Add a polymorphic foreign key column
+ #
+ # change_table(:suppliers) do |t|
+ # t.belongs_to :company, polymorphic: true
+ # end
+ #
+ # Creates <tt>company_type(varchar)</tt> and <tt>company_id(integer)</tt> columns.
+ #
+ # ====== Remove a column
+ #
+ # change_table(:suppliers) do |t|
+ # t.remove :company
+ # end
+ #
+ # ====== Remove several columns
+ #
+ # change_table(:suppliers) do |t|
+ # t.remove :company_id
+ # t.remove :width, :height
+ # end
+ #
+ # ====== Remove an index
+ #
+ # change_table(:suppliers) do |t|
+ # t.remove_index :company_id
+ # end
+ #
+ # See also Table for details on all of the various column transformation.
+ def change_table(table_name, options = {})
+ if supports_bulk_alter? && options[:bulk]
+ recorder = ActiveRecord::Migration::CommandRecorder.new(self)
+ yield update_table_definition(table_name, recorder)
+ bulk_change_table(table_name, recorder.commands)
+ else
+ yield update_table_definition(table_name, self)
+ end
+ end
+
+ # Renames a table.
+ #
+ # rename_table('octopuses', 'octopi')
+ #
+ def rename_table(table_name, new_name)
+ raise NotImplementedError, "rename_table is not implemented"
+ end
+
+ # Drops a table from the database.
+ #
+ # Although this command ignores +options+ and the block if one is given, it can be helpful
+ # to provide these in a migration's +change+ method so it can be reverted.
+ # In that case, +options+ and the block will be used by create_table.
+ def drop_table(table_name, options = {})
+ execute "DROP TABLE #{quote_table_name(table_name)}"
+ end
+
+ # Adds a new column to the named table.
+ # See TableDefinition#column for details of the options you can use.
+ def add_column(table_name, column_name, type, options = {})
+ at = create_alter_table table_name
+ at.add_column(column_name, type, options)
+ execute schema_creation.accept at
+ end
+
+ # Removes the given columns from the table definition.
+ #
+ # remove_columns(:suppliers, :qualification, :experience)
+ #
+ def remove_columns(table_name, *column_names)
+ raise ArgumentError.new("You must specify at least one column name. Example: remove_columns(:people, :first_name)") if column_names.empty?
+ column_names.each do |column_name|
+ remove_column(table_name, column_name)
+ end
+ end
+
+ # Removes the column from the table definition.
+ #
+ # remove_column(:suppliers, :qualification)
+ #
+ # The +type+ and +options+ parameters will be ignored if present. It can be helpful
+ # to provide these in a migration's +change+ method so it can be reverted.
+ # In that case, +type+ and +options+ will be used by add_column.
+ def remove_column(table_name, column_name, type = nil, options = {})
+ execute "ALTER TABLE #{quote_table_name(table_name)} DROP #{quote_column_name(column_name)}"
+ end
+
+ # Changes the column's definition according to the new options.
+ # See TableDefinition#column for details of the options you can use.
+ #
+ # change_column(:suppliers, :name, :string, limit: 80)
+ # change_column(:accounts, :description, :text)
+ #
+ def change_column(table_name, column_name, type, options = {})
+ raise NotImplementedError, "change_column is not implemented"
+ end
+
+ # Sets a new default value for a column:
+ #
+ # change_column_default(:suppliers, :qualification, 'new')
+ # change_column_default(:accounts, :authorized, 1)
+ #
+ # Setting the default to +nil+ effectively drops the default:
+ #
+ # change_column_default(:users, :email, nil)
+ #
+ def change_column_default(table_name, column_name, default)
+ raise NotImplementedError, "change_column_default is not implemented"
+ end
+
+ # Sets or removes a +NOT NULL+ constraint on a column. The +null+ flag
+ # indicates whether the value can be +NULL+. For example
+ #
+ # change_column_null(:users, :nickname, false)
+ #
+ # says nicknames cannot be +NULL+ (adds the constraint), whereas
+ #
+ # change_column_null(:users, :nickname, true)
+ #
+ # allows them to be +NULL+ (drops the constraint).
+ #
+ # The method accepts an optional fourth argument to replace existing
+ # +NULL+s with some other value. Use that one when enabling the
+ # constraint if needed, since otherwise those rows would not be valid.
+ #
+ # Please note the fourth argument does not set a column's default.
+ def change_column_null(table_name, column_name, null, default = nil)
+ raise NotImplementedError, "change_column_null is not implemented"
+ end
+
+ # Renames a column.
+ #
+ # rename_column(:suppliers, :description, :name)
+ #
+ def rename_column(table_name, column_name, new_column_name)
+ raise NotImplementedError, "rename_column is not implemented"
+ end
+
+ # Adds a new index to the table. +column_name+ can be a single Symbol, or
+ # an Array of Symbols.
+ #
+ # The index will be named after the table and the column name(s), unless
+ # you pass <tt>:name</tt> as an option.
+ #
+ # ====== Creating a simple index
+ #
+ # add_index(:suppliers, :name)
+ #
+ # generates:
+ #
+ # CREATE INDEX suppliers_name_index ON suppliers(name)
+ #
+ # ====== Creating a unique index
+ #
+ # add_index(:accounts, [:branch_id, :party_id], unique: true)
+ #
+ # generates:
+ #
+ # CREATE UNIQUE INDEX accounts_branch_id_party_id_index ON accounts(branch_id, party_id)
+ #
+ # ====== Creating a named index
+ #
+ # add_index(:accounts, [:branch_id, :party_id], unique: true, name: 'by_branch_party')
+ #
+ # generates:
+ #
+ # CREATE UNIQUE INDEX by_branch_party ON accounts(branch_id, party_id)
+ #
+ # ====== Creating an index with specific key length
+ #
+ # add_index(:accounts, :name, name: 'by_name', length: 10)
+ #
+ # generates:
+ #
+ # CREATE INDEX by_name ON accounts(name(10))
+ #
+ # add_index(:accounts, [:name, :surname], name: 'by_name_surname', length: {name: 10, surname: 15})
+ #
+ # generates:
+ #
+ # CREATE INDEX by_name_surname ON accounts(name(10), surname(15))
+ #
+ # Note: SQLite doesn't support index length.
+ #
+ # ====== Creating an index with a sort order (desc or asc, asc is the default)
+ #
+ # add_index(:accounts, [:branch_id, :party_id, :surname], order: {branch_id: :desc, party_id: :asc})
+ #
+ # generates:
+ #
+ # CREATE INDEX by_branch_desc_party ON accounts(branch_id DESC, party_id ASC, surname)
+ #
+ # Note: MySQL doesn't yet support index order (it accepts the syntax but ignores it).
+ #
+ # ====== Creating a partial index
+ #
+ # add_index(:accounts, [:branch_id, :party_id], unique: true, where: "active")
+ #
+ # generates:
+ #
+ # CREATE UNIQUE INDEX index_accounts_on_branch_id_and_party_id ON accounts(branch_id, party_id) WHERE active
+ #
+ # ====== Creating an index with a specific method
+ #
+ # add_index(:developers, :name, using: 'btree')
+ #
+ # generates:
+ #
+ # CREATE INDEX index_developers_on_name ON developers USING btree (name) -- PostgreSQL
+ # CREATE INDEX index_developers_on_name USING btree ON developers (name) -- MySQL
+ #
+ # Note: only supported by PostgreSQL and MySQL
+ #
+ # ====== Creating an index with a specific type
+ #
+ # add_index(:developers, :name, type: :fulltext)
+ #
+ # generates:
+ #
+ # CREATE FULLTEXT INDEX index_developers_on_name ON developers (name) -- MySQL
+ #
+ # Note: only supported by MySQL. Supported: <tt>:fulltext</tt> and <tt>:spatial</tt> on MyISAM tables.
+ def add_index(table_name, column_name, options = {})
+ index_name, index_type, index_columns, index_options = add_index_options(table_name, column_name, options)
+ execute "CREATE #{index_type} INDEX #{quote_column_name(index_name)} ON #{quote_table_name(table_name)} (#{index_columns})#{index_options}"
+ end
+
+ # Removes the given index from the table.
+ #
+ # Removes the +index_accounts_on_column+ in the +accounts+ table.
+ #
+ # remove_index :accounts, :column
+ #
+ # Removes the index named +index_accounts_on_branch_id+ in the +accounts+ table.
+ #
+ # remove_index :accounts, column: :branch_id
+ #
+ # Removes the index named +index_accounts_on_branch_id_and_party_id+ in the +accounts+ table.
+ #
+ # remove_index :accounts, column: [:branch_id, :party_id]
+ #
+ # Removes the index named +by_branch_party+ in the +accounts+ table.
+ #
+ # remove_index :accounts, name: :by_branch_party
+ #
+ def remove_index(table_name, options = {})
+ remove_index!(table_name, index_name_for_remove(table_name, options))
+ end
+
+ def remove_index!(table_name, index_name) #:nodoc:
+ execute "DROP INDEX #{quote_column_name(index_name)} ON #{quote_table_name(table_name)}"
+ end
+
+ # Renames an index.
+ #
+ # Rename the +index_people_on_last_name+ index to +index_users_on_last_name+:
+ #
+ # rename_index :people, 'index_people_on_last_name', 'index_users_on_last_name'
+ #
+ def rename_index(table_name, old_name, new_name)
+ # this is a naive implementation; some DBs may support this more efficiently (Postgres, for instance)
+ old_index_def = indexes(table_name).detect { |i| i.name == old_name }
+ return unless old_index_def
+ add_index(table_name, old_index_def.columns, name: new_name, unique: old_index_def.unique)
+ remove_index(table_name, name: old_name)
+ end
+
+ def index_name(table_name, options) #:nodoc:
+ if Hash === options
+ if options[:column]
+ "index_#{table_name}_on_#{Array(options[:column]) * '_and_'}"
+ elsif options[:name]
+ options[:name]
+ else
+ raise ArgumentError, "You must specify the index name"
+ end
+ else
+ index_name(table_name, :column => options)
+ end
+ end
+
+ # Verifies the existence of an index with a given name.
+ #
+ # The default argument is returned if the underlying implementation does not define the indexes method,
+ # as there's no way to determine the correct answer in that case.
+ def index_name_exists?(table_name, index_name, default)
+ return default unless respond_to?(:indexes)
+ index_name = index_name.to_s
+ indexes(table_name).detect { |i| i.name == index_name }
+ end
+
+ # Adds a reference. Optionally adds a +type+ column, if <tt>:polymorphic</tt> option is provided.
+ # The reference column is an +integer+ by default, the <tt>:type</tt> option can be used to specify
+ # a different type.
+ # <tt>add_reference</tt> and <tt>add_belongs_to</tt> are acceptable.
+ #
+ # ====== Create a user_id integer column
+ #
+ # add_reference(:products, :user)
+ #
+ # ====== Create a user_id string column
+ #
+ # add_reference(:products, :user, type: :string)
+ #
+ # ====== Create a supplier_id and supplier_type columns
+ #
+ # add_belongs_to(:products, :supplier, polymorphic: true)
+ #
+ # ====== Create a supplier_id, supplier_type columns and appropriate index
+ #
+ # add_reference(:products, :supplier, polymorphic: true, index: true)
+ #
+ def add_reference(table_name, ref_name, options = {})
+ polymorphic = options.delete(:polymorphic)
+ index_options = options.delete(:index)
+ type = options.delete(:type) || :integer
+ add_column(table_name, "#{ref_name}_id", type, options)
+ add_column(table_name, "#{ref_name}_type", :string, polymorphic.is_a?(Hash) ? polymorphic : options) if polymorphic
+ add_index(table_name, polymorphic ? %w[id type].map{ |t| "#{ref_name}_#{t}" } : "#{ref_name}_id", index_options.is_a?(Hash) ? index_options : {}) if index_options
+ end
+ alias :add_belongs_to :add_reference
+
+ # Removes the reference(s). Also removes a +type+ column if one exists.
+ # <tt>remove_reference</tt>, <tt>remove_references</tt> and <tt>remove_belongs_to</tt> are acceptable.
+ #
+ # ====== Remove the reference
+ #
+ # remove_reference(:products, :user, index: true)
+ #
+ # ====== Remove polymorphic reference
+ #
+ # remove_reference(:products, :supplier, polymorphic: true)
+ #
+ def remove_reference(table_name, ref_name, options = {})
+ remove_column(table_name, "#{ref_name}_id")
+ remove_column(table_name, "#{ref_name}_type") if options[:polymorphic]
+ end
+ alias :remove_belongs_to :remove_reference
+
+ # Returns an array of foreign keys for the given table.
+ # The foreign keys are represented as +ForeignKeyDefinition+ objects.
+ def foreign_keys(table_name)
+ raise NotImplementedError, "foreign_keys is not implemented"
+ end
+
+ # Adds a new foreign key. +from_table+ is the table with the key column,
+ # +to_table+ contains the referenced primary key.
+ #
+ # The foreign key will be named after the following pattern: <tt>fk_rails_<identifier></tt>.
+ # +identifier+ is a 10 character long random string. A custom name can be specified with
+ # the <tt>:name</tt> option.
+ #
+ # ====== Creating a simple foreign key
+ #
+ # add_foreign_key :articles, :authors
+ #
+ # generates:
+ #
+ # ALTER TABLE "articles" ADD CONSTRAINT articles_author_id_fk FOREIGN KEY ("author_id") REFERENCES "authors" ("id")
+ #
+ # ====== Creating a foreign key on a specific column
+ #
+ # add_foreign_key :articles, :users, column: :author_id, primary_key: "lng_id"
+ #
+ # generates:
+ #
+ # ALTER TABLE "articles" ADD CONSTRAINT fk_rails_58ca3d3a82 FOREIGN KEY ("author_id") REFERENCES "users" ("lng_id")
+ #
+ # ====== Creating a cascading foreign key
+ #
+ # add_foreign_key :articles, :authors, on_delete: :cascade
+ #
+ # generates:
+ #
+ # ALTER TABLE "articles" ADD CONSTRAINT articles_author_id_fk FOREIGN KEY ("author_id") REFERENCES "authors" ("id") ON DELETE CASCADE
+ #
+ # The +options+ hash can include the following keys:
+ # [<tt>:column</tt>]
+ # The foreign key column name on +from_table+. Defaults to <tt>to_table.singularize + "_id"</tt>
+ # [<tt>:primary_key</tt>]
+ # The primary key column name on +to_table+. Defaults to +id+.
+ # [<tt>:name</tt>]
+ # The constraint name. Defaults to <tt>fk_rails_<identifier></tt>.
+ # [<tt>:on_delete</tt>]
+ # Action that happens <tt>ON DELETE</tt>. Valid values are +:nullify+, +:cascade:+ and +:restrict+
+ # [<tt>:on_update</tt>]
+ # Action that happens <tt>ON UPDATE</tt>. Valid values are +:nullify+, +:cascade:+ and +:restrict+
+ def add_foreign_key(from_table, to_table, options = {})
+ return unless supports_foreign_keys?
+
+ options[:column] ||= foreign_key_column_for(to_table)
+
+ options = {
+ column: options[:column],
+ primary_key: options[:primary_key],
+ name: foreign_key_name(from_table, options),
+ on_delete: options[:on_delete],
+ on_update: options[:on_update]
+ }
+ at = create_alter_table from_table
+ at.add_foreign_key to_table, options
+
+ execute schema_creation.accept(at)
+ end
+
+ # Removes the given foreign key from the table.
+ #
+ # Removes the foreign key on +accounts.branch_id+.
+ #
+ # remove_foreign_key :accounts, :branches
+ #
+ # Removes the foreign key on +accounts.owner_id+.
+ #
+ # remove_foreign_key :accounts, column: :owner_id
+ #
+ # Removes the foreign key named +special_fk_name+ on the +accounts+ table.
+ #
+ # remove_foreign_key :accounts, name: :special_fk_name
+ #
+ def remove_foreign_key(from_table, options_or_to_table = {})
+ return unless supports_foreign_keys?
+
+ if options_or_to_table.is_a?(Hash)
+ options = options_or_to_table
+ else
+ options = { column: foreign_key_column_for(options_or_to_table) }
+ end
+
+ fk_name_to_delete = options.fetch(:name) do
+ fk_to_delete = foreign_keys(from_table).detect {|fk| fk.column == options[:column] }
+
+ if fk_to_delete
+ fk_to_delete.name
+ else
+ raise ArgumentError, "Table '#{from_table}' has no foreign key on column '#{options[:column]}'"
+ end
+ end
+
+ at = create_alter_table from_table
+ at.drop_foreign_key fk_name_to_delete
+
+ execute schema_creation.accept(at)
+ end
+
+ def foreign_key_column_for(table_name) # :nodoc:
+ "#{table_name.to_s.singularize}_id"
+ end
+
+ def dump_schema_information #:nodoc:
+ sm_table = ActiveRecord::Migrator.schema_migrations_table_name
+
+ ActiveRecord::SchemaMigration.order('version').map { |sm|
+ "INSERT INTO #{sm_table} (version) VALUES ('#{sm.version}');"
+ }.join "\n\n"
+ end
+
+ # Should not be called normally, but this operation is non-destructive.
+ # The migrations module handles this automatically.
+ def initialize_schema_migrations_table
+ ActiveRecord::SchemaMigration.create_table
+ end
+
+ def assume_migrated_upto_version(version, migrations_paths = ActiveRecord::Migrator.migrations_paths)
+ migrations_paths = Array(migrations_paths)
+ version = version.to_i
+ sm_table = quote_table_name(ActiveRecord::Migrator.schema_migrations_table_name)
+
+ migrated = select_values("SELECT version FROM #{sm_table}").map { |v| v.to_i }
+ paths = migrations_paths.map {|p| "#{p}/[0-9]*_*.rb" }
+ versions = Dir[*paths].map do |filename|
+ filename.split('/').last.split('_').first.to_i
+ end
+
+ unless migrated.include?(version)
+ execute "INSERT INTO #{sm_table} (version) VALUES ('#{version}')"
+ end
+
+ inserted = Set.new
+ (versions - migrated).each do |v|
+ if inserted.include?(v)
+ raise "Duplicate migration #{v}. Please renumber your migrations to resolve the conflict."
+ elsif v < version
+ execute "INSERT INTO #{sm_table} (version) VALUES ('#{v}')"
+ inserted << v
+ end
+ end
+ end
+
+ def type_to_sql(type, limit = nil, precision = nil, scale = nil) #:nodoc:
+ if native = native_database_types[type.to_sym]
+ column_type_sql = (native.is_a?(Hash) ? native[:name] : native).dup
+
+ if type == :decimal # ignore limit, use precision and scale
+ scale ||= native[:scale]
+
+ if precision ||= native[:precision]
+ if scale
+ column_type_sql << "(#{precision},#{scale})"
+ else
+ column_type_sql << "(#{precision})"
+ end
+ elsif scale
+ raise ArgumentError, "Error adding decimal column: precision cannot be empty if scale is specified"
+ end
+
+ elsif (type != :primary_key) && (limit ||= native.is_a?(Hash) && native[:limit])
+ column_type_sql << "(#{limit})"
+ end
+
+ column_type_sql
+ else
+ type.to_s
+ end
+ end
+
+ # Given a set of columns and an ORDER BY clause, returns the columns for a SELECT DISTINCT.
+ # Both PostgreSQL and Oracle overrides this for custom DISTINCT syntax - they
+ # require the order columns appear in the SELECT.
+ #
+ # columns_for_distinct("posts.id", ["posts.created_at desc"])
+ def columns_for_distinct(columns, orders) #:nodoc:
+ columns
+ end
+
+ # Adds timestamps (+created_at+ and +updated_at+) columns to the named table.
+ #
+ # add_timestamps(:suppliers)
+ #
+ def add_timestamps(table_name)
+ add_column table_name, :created_at, :datetime
+ add_column table_name, :updated_at, :datetime
+ end
+
+ # Removes the timestamp columns (+created_at+ and +updated_at+) from the table definition.
+ #
+ # remove_timestamps(:suppliers)
+ #
+ def remove_timestamps(table_name)
+ remove_column table_name, :updated_at
+ remove_column table_name, :created_at
+ end
+
+ def update_table_definition(table_name, base) #:nodoc:
+ Table.new(table_name, base)
+ end
+
+ def add_index_options(table_name, column_name, options = {}) #:nodoc:
+ column_names = Array(column_name)
+ index_name = index_name(table_name, column: column_names)
+
+ options.assert_valid_keys(:unique, :order, :name, :where, :length, :internal, :using, :algorithm, :type)
+
+ index_type = options[:unique] ? "UNIQUE" : ""
+ index_type = options[:type].to_s if options.key?(:type)
+ index_name = options[:name].to_s if options.key?(:name)
+ max_index_length = options.fetch(:internal, false) ? index_name_length : allowed_index_name_length
+
+ if options.key?(:algorithm)
+ algorithm = index_algorithms.fetch(options[:algorithm]) {
+ raise ArgumentError.new("Algorithm must be one of the following: #{index_algorithms.keys.map(&:inspect).join(', ')}")
+ }
+ end
+
+ using = "USING #{options[:using]}" if options[:using].present?
+
+ if supports_partial_index?
+ index_options = options[:where] ? " WHERE #{options[:where]}" : ""
+ end
+
+ if index_name.length > max_index_length
+ raise ArgumentError, "Index name '#{index_name}' on table '#{table_name}' is too long; the limit is #{max_index_length} characters"
+ end
+ if table_exists?(table_name) && index_name_exists?(table_name, index_name, false)
+ raise ArgumentError, "Index name '#{index_name}' on table '#{table_name}' already exists"
+ end
+ index_columns = quoted_columns_for_index(column_names, options).join(", ")
+
+ [index_name, index_type, index_columns, index_options, algorithm, using]
+ end
+
+ protected
+ def add_index_sort_order(option_strings, column_names, options = {})
+ if options.is_a?(Hash) && order = options[:order]
+ case order
+ when Hash
+ column_names.each {|name| option_strings[name] += " #{order[name].upcase}" if order.has_key?(name)}
+ when String
+ column_names.each {|name| option_strings[name] += " #{order.upcase}"}
+ end
+ end
+
+ return option_strings
+ end
+
+ # Overridden by the MySQL adapter for supporting index lengths
+ def quoted_columns_for_index(column_names, options = {})
+ option_strings = Hash[column_names.map {|name| [name, '']}]
+
+ # add index sort order if supported
+ if supports_index_sort_order?
+ option_strings = add_index_sort_order(option_strings, column_names, options)
+ end
+
+ column_names.map {|name| quote_column_name(name) + option_strings[name]}
+ end
+
+ def options_include_default?(options)
+ options.include?(:default) && !(options[:null] == false && options[:default].nil?)
+ end
+
+ def index_name_for_remove(table_name, options = {})
+ index_name = index_name(table_name, options)
+
+ unless index_name_exists?(table_name, index_name, true)
+ if options.is_a?(Hash) && options.has_key?(:name)
+ options_without_column = options.dup
+ options_without_column.delete :column
+ index_name_without_column = index_name(table_name, options_without_column)
+
+ return index_name_without_column if index_name_exists?(table_name, index_name_without_column, false)
+ end
+
+ raise ArgumentError, "Index name '#{index_name}' on table '#{table_name}' does not exist"
+ end
+
+ index_name
+ end
+
+ def rename_table_indexes(table_name, new_name)
+ indexes(new_name).each do |index|
+ generated_index_name = index_name(table_name, column: index.columns)
+ if generated_index_name == index.name
+ rename_index new_name, generated_index_name, index_name(new_name, column: index.columns)
+ end
+ end
+ end
+
+ def rename_column_indexes(table_name, column_name, new_column_name)
+ column_name, new_column_name = column_name.to_s, new_column_name.to_s
+ indexes(table_name).each do |index|
+ next unless index.columns.include?(new_column_name)
+ old_columns = index.columns.dup
+ old_columns[old_columns.index(new_column_name)] = column_name
+ generated_index_name = index_name(table_name, column: old_columns)
+ if generated_index_name == index.name
+ rename_index table_name, generated_index_name, index_name(table_name, column: index.columns)
+ end
+ end
+ end
+
+ private
+ def create_table_definition(name, temporary, options, as = nil)
+ TableDefinition.new native_database_types, name, temporary, options, as
+ end
+
+ def create_alter_table(name)
+ AlterTable.new create_table_definition(name, false, {})
+ end
+
+ def foreign_key_name(table_name, options) # :nodoc:
+ options.fetch(:name) do
+ "fk_rails_#{SecureRandom.hex(5)}"
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb b/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb
new file mode 100644
index 0000000000..4a7f2aaca8
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract/transaction.rb
@@ -0,0 +1,197 @@
+module ActiveRecord
+ module ConnectionAdapters
+ class TransactionState
+ attr_reader :parent
+
+ VALID_STATES = Set.new([:committed, :rolledback, nil])
+
+ def initialize(state = nil)
+ @state = state
+ @parent = nil
+ end
+
+ def finalized?
+ @state
+ end
+
+ def committed?
+ @state == :committed
+ end
+
+ def rolledback?
+ @state == :rolledback
+ end
+
+ def set_state(state)
+ if !VALID_STATES.include?(state)
+ raise ArgumentError, "Invalid transaction state: #{state}"
+ end
+ @state = state
+ end
+ end
+
+ class NullTransaction #:nodoc:
+ def initialize; end
+ def closed?; true; end
+ def open?; false; end
+ def joinable?; false; end
+ def add_record(record); end
+ end
+
+ class Transaction #:nodoc:
+
+ attr_reader :connection, :state, :records, :savepoint_name
+ attr_writer :joinable
+
+ def initialize(connection, options)
+ @connection = connection
+ @state = TransactionState.new
+ @records = []
+ @joinable = options.fetch(:joinable, true)
+ end
+
+ def add_record(record)
+ if record.has_transactional_callbacks?
+ records << record
+ else
+ record.set_transaction_state(@state)
+ end
+ end
+
+ def rollback
+ @state.set_state(:rolledback)
+ end
+
+ def rollback_records
+ records.uniq.each do |record|
+ begin
+ record.rolledback! full_rollback?
+ rescue => e
+ record.logger.error(e) if record.respond_to?(:logger) && record.logger
+ end
+ end
+ end
+
+ def commit
+ @state.set_state(:committed)
+ end
+
+ def commit_records
+ records.uniq.each do |record|
+ begin
+ record.committed!
+ rescue => e
+ record.logger.error(e) if record.respond_to?(:logger) && record.logger
+ end
+ end
+ end
+
+ def full_rollback?; true; end
+ def joinable?; @joinable; end
+ def closed?; false; end
+ def open?; !closed?; end
+ end
+
+ class SavepointTransaction < Transaction
+
+ def initialize(connection, savepoint_name, options)
+ super(connection, options)
+ if options[:isolation]
+ raise ActiveRecord::TransactionIsolationError, "cannot set transaction isolation in a nested transaction"
+ end
+ connection.create_savepoint(@savepoint_name = savepoint_name)
+ end
+
+ def rollback
+ super
+ connection.rollback_to_savepoint(savepoint_name)
+ rollback_records
+ end
+
+ def commit
+ super
+ connection.release_savepoint(savepoint_name)
+ end
+
+ def full_rollback?; false; end
+ end
+
+ class RealTransaction < Transaction
+
+ def initialize(connection, options)
+ super
+ if options[:isolation]
+ connection.begin_isolated_db_transaction(options[:isolation])
+ else
+ connection.begin_db_transaction
+ end
+ end
+
+ def rollback
+ super
+ connection.rollback_db_transaction
+ rollback_records
+ end
+
+ def commit
+ super
+ connection.commit_db_transaction
+ commit_records
+ end
+ end
+
+ class TransactionManager #:nodoc:
+ def initialize(connection)
+ @stack = []
+ @connection = connection
+ end
+
+ def begin_transaction(options = {})
+ transaction =
+ if @stack.empty?
+ RealTransaction.new(@connection, options)
+ else
+ SavepointTransaction.new(@connection, "active_record_#{@stack.size}", options)
+ end
+ @stack.push(transaction)
+ transaction
+ end
+
+ def commit_transaction
+ @stack.pop.commit
+ end
+
+ def rollback_transaction
+ @stack.pop.rollback
+ end
+
+ def within_new_transaction(options = {})
+ transaction = begin_transaction options
+ yield
+ rescue Exception => error
+ transaction.rollback if transaction
+ raise
+ ensure
+ begin
+ transaction.commit unless error
+ rescue Exception
+ transaction.rollback
+ raise
+ ensure
+ @stack.pop if transaction
+ end
+ end
+
+ def open_transactions
+ @stack.size
+ end
+
+ def current_transaction
+ @stack.last || NULL_TRANSACTION
+ end
+
+ private
+ NULL_TRANSACTION = NullTransaction.new
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb b/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb
new file mode 100644
index 0000000000..a1b6671664
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract_adapter.rb
@@ -0,0 +1,479 @@
+require 'date'
+require 'bigdecimal'
+require 'bigdecimal/util'
+require 'active_record/type'
+require 'active_support/core_ext/benchmark'
+require 'active_record/connection_adapters/schema_cache'
+require 'active_record/connection_adapters/abstract/schema_dumper'
+require 'active_record/connection_adapters/abstract/schema_creation'
+require 'monitor'
+require 'arel/collectors/bind'
+require 'arel/collectors/sql_string'
+
+module ActiveRecord
+ module ConnectionAdapters # :nodoc:
+ extend ActiveSupport::Autoload
+
+ autoload_at 'active_record/connection_adapters/column' do
+ autoload :Column
+ autoload :NullColumn
+ end
+ autoload :ConnectionSpecification
+
+ autoload_at 'active_record/connection_adapters/abstract/schema_definitions' do
+ autoload :IndexDefinition
+ autoload :ColumnDefinition
+ autoload :ChangeColumnDefinition
+ autoload :TableDefinition
+ autoload :Table
+ autoload :AlterTable
+ end
+
+ autoload_at 'active_record/connection_adapters/abstract/connection_pool' do
+ autoload :ConnectionHandler
+ autoload :ConnectionManagement
+ end
+
+ autoload_under 'abstract' do
+ autoload :SchemaStatements
+ autoload :DatabaseStatements
+ autoload :DatabaseLimits
+ autoload :Quoting
+ autoload :ConnectionPool
+ autoload :QueryCache
+ autoload :Savepoints
+ end
+
+ autoload_at 'active_record/connection_adapters/abstract/transaction' do
+ autoload :TransactionManager
+ autoload :NullTransaction
+ autoload :RealTransaction
+ autoload :SavepointTransaction
+ autoload :TransactionState
+ end
+
+ # Active Record supports multiple database systems. AbstractAdapter and
+ # related classes form the abstraction layer which makes this possible.
+ # An AbstractAdapter represents a connection to a database, and provides an
+ # abstract interface for database-specific functionality such as establishing
+ # a connection, escaping values, building the right SQL fragments for ':offset'
+ # and ':limit' options, etc.
+ #
+ # All the concrete database adapters follow the interface laid down in this class.
+ # ActiveRecord::Base.connection returns an AbstractAdapter object, which
+ # you can use.
+ #
+ # Most of the methods in the adapter are useful during migrations. Most
+ # notably, the instance methods provided by SchemaStatement are very useful.
+ class AbstractAdapter
+ include Quoting, DatabaseStatements, SchemaStatements
+ include DatabaseLimits
+ include QueryCache
+ include ActiveSupport::Callbacks
+ include MonitorMixin
+ include ColumnDumper
+
+ SIMPLE_INT = /\A\d+\z/
+
+ define_callbacks :checkout, :checkin
+
+ attr_accessor :visitor, :pool
+ attr_reader :schema_cache, :owner, :logger
+ alias :in_use? :owner
+
+ def self.type_cast_config_to_integer(config)
+ if config =~ SIMPLE_INT
+ config.to_i
+ else
+ config
+ end
+ end
+
+ def self.type_cast_config_to_boolean(config)
+ if config == "false"
+ false
+ else
+ config
+ end
+ end
+
+ attr_reader :prepared_statements
+
+ def initialize(connection, logger = nil, pool = nil) #:nodoc:
+ super()
+
+ @connection = connection
+ @owner = nil
+ @instrumenter = ActiveSupport::Notifications.instrumenter
+ @logger = logger
+ @pool = pool
+ @schema_cache = SchemaCache.new self
+ @visitor = nil
+ @prepared_statements = false
+ end
+
+ class BindCollector < Arel::Collectors::Bind
+ def compile(bvs, conn)
+ super(bvs.map { |bv| conn.quote(*bv.reverse) })
+ end
+ end
+
+ class SQLString < Arel::Collectors::SQLString
+ def compile(bvs, conn)
+ super(bvs)
+ end
+ end
+
+ def collector
+ if prepared_statements
+ SQLString.new
+ else
+ BindCollector.new
+ end
+ end
+
+ def valid_type?(type)
+ true
+ end
+
+ def schema_creation
+ SchemaCreation.new self
+ end
+
+ def lease
+ synchronize do
+ unless in_use?
+ @owner = Thread.current
+ end
+ end
+ end
+
+ def schema_cache=(cache)
+ cache.connection = self
+ @schema_cache = cache
+ end
+
+ def expire
+ @owner = nil
+ end
+
+ def unprepared_statement
+ old_prepared_statements, @prepared_statements = @prepared_statements, false
+ yield
+ ensure
+ @prepared_statements = old_prepared_statements
+ end
+
+ # Returns the human-readable name of the adapter. Use mixed case - one
+ # can always use downcase if needed.
+ def adapter_name
+ 'Abstract'
+ end
+
+ # Does this adapter support migrations?
+ def supports_migrations?
+ false
+ end
+
+ # Can this adapter determine the primary key for tables not attached
+ # to an Active Record class, such as join tables?
+ def supports_primary_key?
+ false
+ end
+
+ # Does this adapter support DDL rollbacks in transactions? That is, would
+ # CREATE TABLE or ALTER TABLE get rolled back by a transaction?
+ def supports_ddl_transactions?
+ false
+ end
+
+ def supports_bulk_alter?
+ false
+ end
+
+ # Does this adapter support savepoints?
+ def supports_savepoints?
+ false
+ end
+
+ # Should primary key values be selected from their corresponding
+ # sequence before the insert statement? If true, next_sequence_value
+ # is called before each insert to set the record's primary key.
+ def prefetch_primary_key?(table_name = nil)
+ false
+ end
+
+ # Does this adapter support index sort order?
+ def supports_index_sort_order?
+ false
+ end
+
+ # Does this adapter support partial indices?
+ def supports_partial_index?
+ false
+ end
+
+ # Does this adapter support explain?
+ def supports_explain?
+ false
+ end
+
+ # Does this adapter support setting the isolation level for a transaction?
+ def supports_transaction_isolation?
+ false
+ end
+
+ # Does this adapter support database extensions?
+ def supports_extensions?
+ false
+ end
+
+ # Does this adapter support creating indexes in the same statement as
+ # creating the table?
+ def supports_indexes_in_create?
+ false
+ end
+
+ # Does this adapter support creating foreign key constraints?
+ def supports_foreign_keys?
+ false
+ end
+
+ # This is meant to be implemented by the adapters that support extensions
+ def disable_extension(name)
+ end
+
+ # This is meant to be implemented by the adapters that support extensions
+ def enable_extension(name)
+ end
+
+ # A list of extensions, to be filled in by adapters that support them.
+ def extensions
+ []
+ end
+
+ # A list of index algorithms, to be filled by adapters that support them.
+ def index_algorithms
+ {}
+ end
+
+ # QUOTING ==================================================
+
+ # Returns a bind substitution value given a bind +index+ and +column+
+ # NOTE: The column param is currently being used by the sqlserver-adapter
+ def substitute_at(column, index)
+ Arel::Nodes::BindParam.new '?'
+ end
+
+ # REFERENTIAL INTEGRITY ====================================
+
+ # Override to turn off referential integrity while executing <tt>&block</tt>.
+ def disable_referential_integrity
+ yield
+ end
+
+ # CONNECTION MANAGEMENT ====================================
+
+ # Checks whether the connection to the database is still active. This includes
+ # checking whether the database is actually capable of responding, i.e. whether
+ # the connection isn't stale.
+ def active?
+ end
+
+ # Disconnects from the database if already connected, and establishes a
+ # new connection with the database. Implementors should call super if they
+ # override the default implementation.
+ def reconnect!
+ clear_cache!
+ reset_transaction
+ end
+
+ # Disconnects from the database if already connected. Otherwise, this
+ # method does nothing.
+ def disconnect!
+ clear_cache!
+ reset_transaction
+ end
+
+ # Reset the state of this connection, directing the DBMS to clear
+ # transactions and other connection-related server-side state. Usually a
+ # database-dependent operation.
+ #
+ # The default implementation does nothing; the implementation should be
+ # overridden by concrete adapters.
+ def reset!
+ # this should be overridden by concrete adapters
+ end
+
+ ###
+ # Clear any caching the database adapter may be doing, for example
+ # clearing the prepared statement cache. This is database specific.
+ def clear_cache!
+ # this should be overridden by concrete adapters
+ end
+
+ # Returns true if its required to reload the connection between requests for development mode.
+ def requires_reloading?
+ false
+ end
+
+ # Checks whether the connection to the database is still active (i.e. not stale).
+ # This is done under the hood by calling <tt>active?</tt>. If the connection
+ # is no longer active, then this method will reconnect to the database.
+ def verify!(*ignored)
+ reconnect! unless active?
+ end
+
+ # Provides access to the underlying database driver for this adapter. For
+ # example, this method returns a Mysql object in case of MysqlAdapter,
+ # and a PGconn object in case of PostgreSQLAdapter.
+ #
+ # This is useful for when you need to call a proprietary method such as
+ # PostgreSQL's lo_* methods.
+ def raw_connection
+ @connection
+ end
+
+ def create_savepoint(name = nil)
+ end
+
+ def rollback_to_savepoint(name = nil)
+ end
+
+ def release_savepoint(name = nil)
+ end
+
+ def case_sensitive_modifier(node, table_attribute)
+ node
+ end
+
+ def case_sensitive_comparison(table, attribute, column, value)
+ table_attr = table[attribute]
+ value = case_sensitive_modifier(value, table_attr) unless value.nil?
+ table_attr.eq(value)
+ end
+
+ def case_insensitive_comparison(table, attribute, column, value)
+ table[attribute].lower.eq(table.lower(value))
+ end
+
+ def current_savepoint_name
+ current_transaction.savepoint_name
+ end
+
+ # Check the connection back in to the connection pool
+ def close
+ pool.checkin self
+ end
+
+ def type_map # :nodoc:
+ @type_map ||= Type::TypeMap.new.tap do |mapping|
+ initialize_type_map(mapping)
+ end
+ end
+
+ def new_column(name, default, cast_type, sql_type = nil, null = true)
+ Column.new(name, default, cast_type, sql_type, null)
+ end
+
+ def lookup_cast_type(sql_type) # :nodoc:
+ type_map.lookup(sql_type)
+ end
+
+ protected
+
+ def initialize_type_map(m) # :nodoc:
+ register_class_with_limit m, %r(boolean)i, Type::Boolean
+ register_class_with_limit m, %r(char)i, Type::String
+ register_class_with_limit m, %r(binary)i, Type::Binary
+ register_class_with_limit m, %r(text)i, Type::Text
+ register_class_with_limit m, %r(date)i, Type::Date
+ register_class_with_limit m, %r(time)i, Type::Time
+ register_class_with_limit m, %r(datetime)i, Type::DateTime
+ register_class_with_limit m, %r(float)i, Type::Float
+ register_class_with_limit m, %r(int)i, Type::Integer
+
+ m.alias_type %r(blob)i, 'binary'
+ m.alias_type %r(clob)i, 'text'
+ m.alias_type %r(timestamp)i, 'datetime'
+ m.alias_type %r(numeric)i, 'decimal'
+ m.alias_type %r(number)i, 'decimal'
+ m.alias_type %r(double)i, 'float'
+
+ m.register_type(%r(decimal)i) do |sql_type|
+ scale = extract_scale(sql_type)
+ precision = extract_precision(sql_type)
+
+ if scale == 0
+ # FIXME: Remove this class as well
+ Type::DecimalWithoutScale.new(precision: precision)
+ else
+ Type::Decimal.new(precision: precision, scale: scale)
+ end
+ end
+ end
+
+ def reload_type_map # :nodoc:
+ type_map.clear
+ initialize_type_map(type_map)
+ end
+
+ def register_class_with_limit(mapping, key, klass) # :nodoc:
+ mapping.register_type(key) do |*args|
+ limit = extract_limit(args.last)
+ klass.new(limit: limit)
+ end
+ end
+
+ def extract_scale(sql_type) # :nodoc:
+ case sql_type
+ when /\((\d+)\)/ then 0
+ when /\((\d+)(,(\d+))\)/ then $3.to_i
+ end
+ end
+
+ def extract_precision(sql_type) # :nodoc:
+ $1.to_i if sql_type =~ /\((\d+)(,\d+)?\)/
+ end
+
+ def extract_limit(sql_type) # :nodoc:
+ $1.to_i if sql_type =~ /\((.*)\)/
+ end
+
+ def translate_exception_class(e, sql)
+ message = "#{e.class.name}: #{e.message}: #{sql}"
+ @logger.error message if @logger
+ exception = translate_exception(e, message)
+ exception.set_backtrace e.backtrace
+ exception
+ end
+
+ def log(sql, name = "SQL", binds = [], statement_name = nil)
+ @instrumenter.instrument(
+ "sql.active_record",
+ :sql => sql,
+ :name => name,
+ :connection_id => object_id,
+ :statement_name => statement_name,
+ :binds => binds) { yield }
+ rescue => e
+ raise translate_exception_class(e, sql)
+ end
+
+ def translate_exception(exception, message)
+ # override in derived class
+ ActiveRecord::StatementInvalid.new(message, exception)
+ end
+
+ def without_prepared_statement?(binds)
+ !prepared_statements || binds.empty?
+ end
+
+ def column_for(table_name, column_name) # :nodoc:
+ column_name = column_name.to_s
+ columns(table_name).detect { |c| c.name == column_name } ||
+ raise(ActiveRecordError, "No such column: #{table_name}.#{column_name}")
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb b/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb
new file mode 100644
index 0000000000..e5417a9556
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/abstract_mysql_adapter.rb
@@ -0,0 +1,841 @@
+require 'arel/visitors/bind_visitor'
+
+module ActiveRecord
+ module ConnectionAdapters
+ class AbstractMysqlAdapter < AbstractAdapter
+ include Savepoints
+
+ class SchemaCreation < AbstractAdapter::SchemaCreation
+ def visit_AddColumn(o)
+ add_column_position!(super, column_options(o))
+ end
+
+ private
+
+ def visit_DropForeignKey(name)
+ "DROP FOREIGN KEY #{name}"
+ end
+
+ def visit_TableDefinition(o)
+ name = o.name
+ create_sql = "CREATE#{' TEMPORARY' if o.temporary} TABLE #{quote_table_name(name)} "
+
+ statements = o.columns.map { |c| accept c }
+ statements.concat(o.indexes.map { |column_name, options| index_in_create(name, column_name, options) })
+
+ create_sql << "(#{statements.join(', ')}) " if statements.present?
+ create_sql << "#{o.options}"
+ create_sql << " AS #{@conn.to_sql(o.as)}" if o.as
+ create_sql
+ end
+
+ def visit_ChangeColumnDefinition(o)
+ column = o.column
+ options = o.options
+ sql_type = type_to_sql(o.type, options[:limit], options[:precision], options[:scale])
+ change_column_sql = "CHANGE #{quote_column_name(column.name)} #{quote_column_name(options[:name])} #{sql_type}"
+ add_column_options!(change_column_sql, options.merge(column: column))
+ add_column_position!(change_column_sql, options)
+ end
+
+ def add_column_position!(sql, options)
+ if options[:first]
+ sql << " FIRST"
+ elsif options[:after]
+ sql << " AFTER #{quote_column_name(options[:after])}"
+ end
+ sql
+ end
+
+ def index_in_create(table_name, column_name, options)
+ index_name, index_type, index_columns, index_options, index_algorithm, index_using = @conn.add_index_options(table_name, column_name, options)
+ "#{index_type} INDEX #{quote_column_name(index_name)} #{index_using} (#{index_columns})#{index_options} #{index_algorithm}"
+ end
+ end
+
+ def schema_creation
+ SchemaCreation.new self
+ end
+
+ class Column < ConnectionAdapters::Column # :nodoc:
+ attr_reader :collation, :strict, :extra
+
+ def initialize(name, default, cast_type, sql_type = nil, null = true, collation = nil, strict = false, extra = "")
+ @strict = strict
+ @collation = collation
+ @extra = extra
+ super(name, default, cast_type, sql_type, null)
+ assert_valid_default(default)
+ extract_default
+ end
+
+ def extract_default
+ if blob_or_text_column?
+ @default = null || strict ? nil : ''
+ elsif missing_default_forged_as_empty_string?(@default)
+ @default = nil
+ end
+ end
+
+ def has_default?
+ return false if blob_or_text_column? # MySQL forbids defaults on blob and text columns
+ super
+ end
+
+ def blob_or_text_column?
+ sql_type =~ /blob/i || type == :text
+ end
+
+ def case_sensitive?
+ collation && !collation.match(/_ci$/)
+ end
+
+ private
+
+ # MySQL misreports NOT NULL column default when none is given.
+ # We can't detect this for columns which may have a legitimate ''
+ # default (string) but we can for others (integer, datetime, boolean,
+ # and the rest).
+ #
+ # Test whether the column has default '', is not null, and is not
+ # a type allowing default ''.
+ def missing_default_forged_as_empty_string?(default)
+ type != :string && !null && default == ''
+ end
+
+ def assert_valid_default(default)
+ if blob_or_text_column? && default.present?
+ raise ArgumentError, "#{type} columns cannot have a default value: #{default.inspect}"
+ end
+ end
+ end
+
+ ##
+ # :singleton-method:
+ # By default, the MysqlAdapter will consider all columns of type <tt>tinyint(1)</tt>
+ # as boolean. If you wish to disable this emulation (which was the default
+ # behavior in versions 0.13.1 and earlier) you can add the following line
+ # to your application.rb file:
+ #
+ # ActiveRecord::ConnectionAdapters::Mysql[2]Adapter.emulate_booleans = false
+ class_attribute :emulate_booleans
+ self.emulate_booleans = true
+
+ LOST_CONNECTION_ERROR_MESSAGES = [
+ "Server shutdown in progress",
+ "Broken pipe",
+ "Lost connection to MySQL server during query",
+ "MySQL server has gone away" ]
+
+ QUOTED_TRUE, QUOTED_FALSE = '1', '0'
+
+ NATIVE_DATABASE_TYPES = {
+ :primary_key => "int(11) auto_increment PRIMARY KEY",
+ :string => { :name => "varchar", :limit => 255 },
+ :text => { :name => "text" },
+ :integer => { :name => "int", :limit => 4 },
+ :float => { :name => "float" },
+ :decimal => { :name => "decimal" },
+ :datetime => { :name => "datetime" },
+ :time => { :name => "time" },
+ :date => { :name => "date" },
+ :binary => { :name => "blob" },
+ :boolean => { :name => "tinyint", :limit => 1 }
+ }
+
+ INDEX_TYPES = [:fulltext, :spatial]
+ INDEX_USINGS = [:btree, :hash]
+
+ # FIXME: Make the first parameter more similar for the two adapters
+ def initialize(connection, logger, connection_options, config)
+ super(connection, logger)
+ @connection_options, @config = connection_options, config
+ @quoted_column_names, @quoted_table_names = {}, {}
+
+ @visitor = Arel::Visitors::MySQL.new self
+
+ if self.class.type_cast_config_to_boolean(config.fetch(:prepared_statements) { true })
+ @prepared_statements = true
+ else
+ @prepared_statements = false
+ end
+ end
+
+ def adapter_name #:nodoc:
+ self.class::ADAPTER_NAME
+ end
+
+ # Returns true, since this connection adapter supports migrations.
+ def supports_migrations?
+ true
+ end
+
+ def supports_primary_key?
+ true
+ end
+
+ def supports_bulk_alter? #:nodoc:
+ true
+ end
+
+ # Technically MySQL allows to create indexes with the sort order syntax
+ # but at the moment (5.5) it doesn't yet implement them
+ def supports_index_sort_order?
+ true
+ end
+
+ # MySQL 4 technically support transaction isolation, but it is affected by a bug
+ # where the transaction level gets persisted for the whole session:
+ #
+ # http://bugs.mysql.com/bug.php?id=39170
+ def supports_transaction_isolation?
+ version[0] >= 5
+ end
+
+ def supports_indexes_in_create?
+ true
+ end
+
+ def supports_foreign_keys?
+ true
+ end
+
+ def native_database_types
+ NATIVE_DATABASE_TYPES
+ end
+
+ def index_algorithms
+ { default: 'ALGORITHM = DEFAULT', copy: 'ALGORITHM = COPY', inplace: 'ALGORITHM = INPLACE' }
+ end
+
+ # HELPER METHODS ===========================================
+
+ # The two drivers have slightly different ways of yielding hashes of results, so
+ # this method must be implemented to provide a uniform interface.
+ def each_hash(result) # :nodoc:
+ raise NotImplementedError
+ end
+
+ def new_column(field, default, cast_type, sql_type = nil, null = true, collation = "", extra = "") # :nodoc:
+ Column.new(field, default, cast_type, sql_type, null, collation, strict_mode?, extra)
+ end
+
+ # Must return the MySQL error number from the exception, if the exception has an
+ # error number.
+ def error_number(exception) # :nodoc:
+ raise NotImplementedError
+ end
+
+ # QUOTING ==================================================
+
+ def _quote(value) # :nodoc:
+ if value.is_a?(Type::Binary::Data)
+ "x'#{value.hex}'"
+ else
+ super
+ end
+ end
+
+ def quote_column_name(name) #:nodoc:
+ @quoted_column_names[name] ||= "`#{name.to_s.gsub('`', '``')}`"
+ end
+
+ def quote_table_name(name) #:nodoc:
+ @quoted_table_names[name] ||= quote_column_name(name).gsub('.', '`.`')
+ end
+
+ def quoted_true
+ QUOTED_TRUE
+ end
+
+ def unquoted_true
+ 1
+ end
+
+ def quoted_false
+ QUOTED_FALSE
+ end
+
+ def unquoted_false
+ 0
+ end
+
+ # REFERENTIAL INTEGRITY ====================================
+
+ def disable_referential_integrity #:nodoc:
+ old = select_value("SELECT @@FOREIGN_KEY_CHECKS")
+
+ begin
+ update("SET FOREIGN_KEY_CHECKS = 0")
+ yield
+ ensure
+ update("SET FOREIGN_KEY_CHECKS = #{old}")
+ end
+ end
+
+ # DATABASE STATEMENTS ======================================
+
+ def clear_cache!
+ super
+ reload_type_map
+ end
+
+ # Executes the SQL statement in the context of this connection.
+ def execute(sql, name = nil)
+ log(sql, name) { @connection.query(sql) }
+ end
+
+ # MysqlAdapter has to free a result after using it, so we use this method to write
+ # stuff in an abstract way without concerning ourselves about whether it needs to be
+ # explicitly freed or not.
+ def execute_and_free(sql, name = nil) #:nodoc:
+ yield execute(sql, name)
+ end
+
+ def update_sql(sql, name = nil) #:nodoc:
+ super
+ @connection.affected_rows
+ end
+
+ def begin_db_transaction
+ execute "BEGIN"
+ end
+
+ def begin_isolated_db_transaction(isolation)
+ execute "SET TRANSACTION ISOLATION LEVEL #{transaction_isolation_levels.fetch(isolation)}"
+ begin_db_transaction
+ end
+
+ def commit_db_transaction #:nodoc:
+ execute "COMMIT"
+ end
+
+ def rollback_db_transaction #:nodoc:
+ execute "ROLLBACK"
+ end
+
+ # In the simple case, MySQL allows us to place JOINs directly into the UPDATE
+ # query. However, this does not allow for LIMIT, OFFSET and ORDER. To support
+ # these, we must use a subquery.
+ def join_to_update(update, select) #:nodoc:
+ if select.limit || select.offset || select.orders.any?
+ super
+ else
+ update.table select.source
+ update.wheres = select.constraints
+ end
+ end
+
+ def empty_insert_statement_value
+ "VALUES ()"
+ end
+
+ # SCHEMA STATEMENTS ========================================
+
+ # Drops the database specified on the +name+ attribute
+ # and creates it again using the provided +options+.
+ def recreate_database(name, options = {})
+ drop_database(name)
+ sql = create_database(name, options)
+ reconnect!
+ sql
+ end
+
+ # Create a new MySQL database with optional <tt>:charset</tt> and <tt>:collation</tt>.
+ # Charset defaults to utf8.
+ #
+ # Example:
+ # create_database 'charset_test', charset: 'latin1', collation: 'latin1_bin'
+ # create_database 'matt_development'
+ # create_database 'matt_development', charset: :big5
+ def create_database(name, options = {})
+ if options[:collation]
+ execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}` COLLATE `#{options[:collation]}`"
+ else
+ execute "CREATE DATABASE `#{name}` DEFAULT CHARACTER SET `#{options[:charset] || 'utf8'}`"
+ end
+ end
+
+ # Drops a MySQL database.
+ #
+ # Example:
+ # drop_database('sebastian_development')
+ def drop_database(name) #:nodoc:
+ execute "DROP DATABASE IF EXISTS `#{name}`"
+ end
+
+ def current_database
+ select_value 'SELECT DATABASE() as db'
+ end
+
+ # Returns the database character set.
+ def charset
+ show_variable 'character_set_database'
+ end
+
+ # Returns the database collation strategy.
+ def collation
+ show_variable 'collation_database'
+ end
+
+ def tables(name = nil, database = nil, like = nil) #:nodoc:
+ sql = "SHOW TABLES "
+ sql << "IN #{quote_table_name(database)} " if database
+ sql << "LIKE #{quote(like)}" if like
+
+ execute_and_free(sql, 'SCHEMA') do |result|
+ result.collect { |field| field.first }
+ end
+ end
+
+ def table_exists?(name)
+ return false unless name.present?
+ return true if tables(nil, nil, name).any?
+
+ name = name.to_s
+ schema, table = name.split('.', 2)
+
+ unless table # A table was provided without a schema
+ table = schema
+ schema = nil
+ end
+
+ tables(nil, schema, table).any?
+ end
+
+ # Returns an array of indexes for the given table.
+ def indexes(table_name, name = nil) #:nodoc:
+ indexes = []
+ current_index = nil
+ execute_and_free("SHOW KEYS FROM #{quote_table_name(table_name)}", 'SCHEMA') do |result|
+ each_hash(result) do |row|
+ if current_index != row[:Key_name]
+ next if row[:Key_name] == 'PRIMARY' # skip the primary key
+ current_index = row[:Key_name]
+
+ mysql_index_type = row[:Index_type].downcase.to_sym
+ index_type = INDEX_TYPES.include?(mysql_index_type) ? mysql_index_type : nil
+ index_using = INDEX_USINGS.include?(mysql_index_type) ? mysql_index_type : nil
+ indexes << IndexDefinition.new(row[:Table], row[:Key_name], row[:Non_unique].to_i == 0, [], [], nil, nil, index_type, index_using)
+ end
+
+ indexes.last.columns << row[:Column_name]
+ indexes.last.lengths << row[:Sub_part]
+ end
+ end
+
+ indexes
+ end
+
+ # Returns an array of +Column+ objects for the table specified by +table_name+.
+ def columns(table_name)#:nodoc:
+ sql = "SHOW FULL FIELDS FROM #{quote_table_name(table_name)}"
+ execute_and_free(sql, 'SCHEMA') do |result|
+ each_hash(result).map do |field|
+ field_name = set_field_encoding(field[:Field])
+ sql_type = field[:Type]
+ cast_type = lookup_cast_type(sql_type)
+ new_column(field_name, field[:Default], cast_type, sql_type, field[:Null] == "YES", field[:Collation], field[:Extra])
+ end
+ end
+ end
+
+ def create_table(table_name, options = {}) #:nodoc:
+ super(table_name, options.reverse_merge(:options => "ENGINE=InnoDB"))
+ end
+
+ def bulk_change_table(table_name, operations) #:nodoc:
+ sqls = operations.flat_map do |command, args|
+ table, arguments = args.shift, args
+ method = :"#{command}_sql"
+
+ if respond_to?(method, true)
+ send(method, table, *arguments)
+ else
+ raise "Unknown method called : #{method}(#{arguments.inspect})"
+ end
+ end.join(", ")
+
+ execute("ALTER TABLE #{quote_table_name(table_name)} #{sqls}")
+ end
+
+ # Renames a table.
+ #
+ # Example:
+ # rename_table('octopuses', 'octopi')
+ def rename_table(table_name, new_name)
+ execute "RENAME TABLE #{quote_table_name(table_name)} TO #{quote_table_name(new_name)}"
+ rename_table_indexes(table_name, new_name)
+ end
+
+ def drop_table(table_name, options = {})
+ execute "DROP#{' TEMPORARY' if options[:temporary]} TABLE #{quote_table_name(table_name)}"
+ end
+
+ def rename_index(table_name, old_name, new_name)
+ if supports_rename_index?
+ execute "ALTER TABLE #{quote_table_name(table_name)} RENAME INDEX #{quote_table_name(old_name)} TO #{quote_table_name(new_name)}"
+ else
+ super
+ end
+ end
+
+ def change_column_default(table_name, column_name, default)
+ column = column_for(table_name, column_name)
+ change_column table_name, column_name, column.sql_type, :default => default
+ end
+
+ def change_column_null(table_name, column_name, null, default = nil)
+ column = column_for(table_name, column_name)
+
+ unless null || default.nil?
+ execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
+ end
+
+ change_column table_name, column_name, column.sql_type, :null => null
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ execute("ALTER TABLE #{quote_table_name(table_name)} #{change_column_sql(table_name, column_name, type, options)}")
+ end
+
+ def rename_column(table_name, column_name, new_column_name) #:nodoc:
+ execute("ALTER TABLE #{quote_table_name(table_name)} #{rename_column_sql(table_name, column_name, new_column_name)}")
+ rename_column_indexes(table_name, column_name, new_column_name)
+ end
+
+ def add_index(table_name, column_name, options = {}) #:nodoc:
+ index_name, index_type, index_columns, index_options, index_algorithm, index_using = add_index_options(table_name, column_name, options)
+ execute "CREATE #{index_type} INDEX #{quote_column_name(index_name)} #{index_using} ON #{quote_table_name(table_name)} (#{index_columns})#{index_options} #{index_algorithm}"
+ end
+
+ def foreign_keys(table_name)
+ fk_info = select_all <<-SQL.strip_heredoc
+ SELECT fk.referenced_table_name as 'to_table'
+ ,fk.referenced_column_name as 'primary_key'
+ ,fk.column_name as 'column'
+ ,fk.constraint_name as 'name'
+ FROM information_schema.key_column_usage fk
+ WHERE fk.referenced_column_name is not null
+ AND fk.table_schema = '#{@config[:database]}'
+ AND fk.table_name = '#{table_name}'
+ SQL
+
+ create_table_info = select_one("SHOW CREATE TABLE #{quote_table_name(table_name)}")["Create Table"]
+
+ fk_info.map do |row|
+ options = {
+ column: row['column'],
+ name: row['name'],
+ primary_key: row['primary_key']
+ }
+
+ options[:on_update] = extract_foreign_key_action(create_table_info, row['name'], "UPDATE")
+ options[:on_delete] = extract_foreign_key_action(create_table_info, row['name'], "DELETE")
+
+ ForeignKeyDefinition.new(table_name, row['to_table'], options)
+ end
+ end
+
+ # Maps logical Rails types to MySQL-specific data types.
+ def type_to_sql(type, limit = nil, precision = nil, scale = nil)
+ case type.to_s
+ when 'binary'
+ case limit
+ when 0..0xfff; "varbinary(#{limit})"
+ when nil; "blob"
+ when 0x1000..0xffffffff; "blob(#{limit})"
+ else raise(ActiveRecordError, "No binary type has character length #{limit}")
+ end
+ when 'integer'
+ case limit
+ when 1; 'tinyint'
+ when 2; 'smallint'
+ when 3; 'mediumint'
+ when nil, 4, 11; 'int(11)' # compatibility with MySQL default
+ when 5..8; 'bigint'
+ else raise(ActiveRecordError, "No integer type has byte size #{limit}")
+ end
+ when 'text'
+ case limit
+ when 0..0xff; 'tinytext'
+ when nil, 0x100..0xffff; 'text'
+ when 0x10000..0xffffff; 'mediumtext'
+ when 0x1000000..0xffffffff; 'longtext'
+ else raise(ActiveRecordError, "No text type has character length #{limit}")
+ end
+ else
+ super
+ end
+ end
+
+ def add_column_position!(sql, options)
+ if options[:first]
+ sql << " FIRST"
+ elsif options[:after]
+ sql << " AFTER #{quote_column_name(options[:after])}"
+ end
+ end
+
+ # SHOW VARIABLES LIKE 'name'
+ def show_variable(name)
+ variables = select_all("SHOW VARIABLES LIKE '#{name}'", 'SCHEMA')
+ variables.first['Value'] unless variables.empty?
+ end
+
+ # Returns a table's primary key and belonging sequence.
+ def pk_and_sequence_for(table)
+ execute_and_free("SHOW CREATE TABLE #{quote_table_name(table)}", 'SCHEMA') do |result|
+ create_table = each_hash(result).first[:"Create Table"]
+ if create_table.to_s =~ /PRIMARY KEY\s+(?:USING\s+\w+\s+)?\((.+)\)/
+ keys = $1.split(",").map { |key| key.delete('`"') }
+ keys.length == 1 ? [keys.first, nil] : nil
+ else
+ nil
+ end
+ end
+ end
+
+ # Returns just a table's primary key
+ def primary_key(table)
+ pk_and_sequence = pk_and_sequence_for(table)
+ pk_and_sequence && pk_and_sequence.first
+ end
+
+ def case_sensitive_modifier(node, table_attribute)
+ node = Arel::Nodes.build_quoted node, table_attribute
+ Arel::Nodes::Bin.new(node)
+ end
+
+ def case_sensitive_comparison(table, attribute, column, value)
+ if column.case_sensitive?
+ table[attribute].eq(value)
+ else
+ super
+ end
+ end
+
+ def case_insensitive_comparison(table, attribute, column, value)
+ if column.case_sensitive?
+ super
+ else
+ table[attribute].eq(value)
+ end
+ end
+
+ def limited_update_conditions(where_sql, quoted_table_name, quoted_primary_key)
+ where_sql
+ end
+
+ def strict_mode?
+ self.class.type_cast_config_to_boolean(@config.fetch(:strict, true))
+ end
+
+ def valid_type?(type)
+ !native_database_types[type].nil?
+ end
+
+ protected
+
+ def initialize_type_map(m) # :nodoc:
+ super
+ m.register_type(%r(enum)i) do |sql_type|
+ limit = sql_type[/^enum\((.+)\)/i, 1]
+ .split(',').map{|enum| enum.strip.length - 2}.max
+ Type::String.new(limit: limit)
+ end
+
+ m.register_type %r(tinytext)i, Type::Text.new(limit: 255)
+ m.register_type %r(tinyblob)i, Type::Binary.new(limit: 255)
+ m.register_type %r(mediumtext)i, Type::Text.new(limit: 16777215)
+ m.register_type %r(mediumblob)i, Type::Binary.new(limit: 16777215)
+ m.register_type %r(longtext)i, Type::Text.new(limit: 2147483647)
+ m.register_type %r(longblob)i, Type::Binary.new(limit: 2147483647)
+ m.register_type %r(^bigint)i, Type::Integer.new(limit: 8)
+ m.register_type %r(^int)i, Type::Integer.new(limit: 4)
+ m.register_type %r(^mediumint)i, Type::Integer.new(limit: 3)
+ m.register_type %r(^smallint)i, Type::Integer.new(limit: 2)
+ m.register_type %r(^tinyint)i, Type::Integer.new(limit: 1)
+ m.register_type %r(^float)i, Type::Float.new(limit: 24)
+ m.register_type %r(^double)i, Type::Float.new(limit: 53)
+
+ m.alias_type %r(tinyint\(1\))i, 'boolean' if emulate_booleans
+ m.alias_type %r(set)i, 'varchar'
+ m.alias_type %r(year)i, 'integer'
+ m.alias_type %r(bit)i, 'binary'
+ end
+
+ # MySQL is too stupid to create a temporary table for use subquery, so we have
+ # to give it some prompting in the form of a subsubquery. Ugh!
+ def subquery_for(key, select)
+ subsubselect = select.clone
+ subsubselect.projections = [key]
+
+ subselect = Arel::SelectManager.new(select.engine)
+ subselect.project Arel.sql(key.name)
+ subselect.from subsubselect.as('__active_record_temp')
+ end
+
+ def add_index_length(option_strings, column_names, options = {})
+ if options.is_a?(Hash) && length = options[:length]
+ case length
+ when Hash
+ column_names.each {|name| option_strings[name] += "(#{length[name]})" if length.has_key?(name) && length[name].present?}
+ when Fixnum
+ column_names.each {|name| option_strings[name] += "(#{length})"}
+ end
+ end
+
+ return option_strings
+ end
+
+ def quoted_columns_for_index(column_names, options = {})
+ option_strings = Hash[column_names.map {|name| [name, '']}]
+
+ # add index length
+ option_strings = add_index_length(option_strings, column_names, options)
+
+ # add index sort order
+ option_strings = add_index_sort_order(option_strings, column_names, options)
+
+ column_names.map {|name| quote_column_name(name) + option_strings[name]}
+ end
+
+ def translate_exception(exception, message)
+ case error_number(exception)
+ when 1062
+ RecordNotUnique.new(message, exception)
+ when 1452
+ InvalidForeignKey.new(message, exception)
+ else
+ super
+ end
+ end
+
+ def add_column_sql(table_name, column_name, type, options = {})
+ td = create_table_definition table_name, options[:temporary], options[:options]
+ cd = td.new_column_definition(column_name, type, options)
+ schema_creation.visit_AddColumn cd
+ end
+
+ def change_column_sql(table_name, column_name, type, options = {})
+ column = column_for(table_name, column_name)
+
+ unless options_include_default?(options)
+ options[:default] = column.default
+ end
+
+ unless options.has_key?(:null)
+ options[:null] = column.null
+ end
+
+ options[:name] = column.name
+ schema_creation.accept ChangeColumnDefinition.new column, type, options
+ end
+
+ def rename_column_sql(table_name, column_name, new_column_name)
+ column = column_for(table_name, column_name)
+ options = {
+ name: new_column_name,
+ default: column.default,
+ null: column.null,
+ auto_increment: column.extra == "auto_increment"
+ }
+
+ current_type = select_one("SHOW COLUMNS FROM #{quote_table_name(table_name)} LIKE '#{column_name}'", 'SCHEMA')["Type"]
+ schema_creation.accept ChangeColumnDefinition.new column, current_type, options
+ end
+
+ def remove_column_sql(table_name, column_name, type = nil, options = {})
+ "DROP #{quote_column_name(column_name)}"
+ end
+
+ def remove_columns_sql(table_name, *column_names)
+ column_names.map {|column_name| remove_column_sql(table_name, column_name) }
+ end
+
+ def add_index_sql(table_name, column_name, options = {})
+ index_name, index_type, index_columns = add_index_options(table_name, column_name, options)
+ "ADD #{index_type} INDEX #{index_name} (#{index_columns})"
+ end
+
+ def remove_index_sql(table_name, options = {})
+ index_name = index_name_for_remove(table_name, options)
+ "DROP INDEX #{index_name}"
+ end
+
+ def add_timestamps_sql(table_name)
+ [add_column_sql(table_name, :created_at, :datetime), add_column_sql(table_name, :updated_at, :datetime)]
+ end
+
+ def remove_timestamps_sql(table_name)
+ [remove_column_sql(table_name, :updated_at), remove_column_sql(table_name, :created_at)]
+ end
+
+ private
+
+ def version
+ @version ||= full_version.scan(/^(\d+)\.(\d+)\.(\d+)/).flatten.map { |v| v.to_i }
+ end
+
+ def mariadb?
+ full_version =~ /mariadb/i
+ end
+
+ def supports_views?
+ version[0] >= 5
+ end
+
+ def supports_rename_index?
+ mariadb? ? false : (version[0] == 5 && version[1] >= 7) || version[0] >= 6
+ end
+
+ def configure_connection
+ variables = @config.fetch(:variables, {}).stringify_keys
+
+ # By default, MySQL 'where id is null' selects the last inserted id.
+ # Turn this off. http://dev.rubyonrails.org/ticket/6778
+ variables['sql_auto_is_null'] = 0
+
+ # Increase timeout so the server doesn't disconnect us.
+ wait_timeout = @config[:wait_timeout]
+ wait_timeout = 2147483 unless wait_timeout.is_a?(Fixnum)
+ variables['wait_timeout'] = self.class.type_cast_config_to_integer(wait_timeout)
+
+ # Make MySQL reject illegal values rather than truncating or blanking them, see
+ # http://dev.mysql.com/doc/refman/5.0/en/server-sql-mode.html#sqlmode_strict_all_tables
+ # If the user has provided another value for sql_mode, don't replace it.
+ unless variables.has_key?('sql_mode')
+ variables['sql_mode'] = strict_mode? ? 'STRICT_ALL_TABLES' : ''
+ end
+
+ # NAMES does not have an equals sign, see
+ # http://dev.mysql.com/doc/refman/5.0/en/set-statement.html#id944430
+ # (trailing comma because variable_assignments will always have content)
+ encoding = "NAMES #{@config[:encoding]}, " if @config[:encoding]
+
+ # Gather up all of the SET variables...
+ variable_assignments = variables.map do |k, v|
+ if v == ':default' || v == :default
+ "@@SESSION.#{k.to_s} = DEFAULT" # Sets the value to the global or compile default
+ elsif !v.nil?
+ "@@SESSION.#{k.to_s} = #{quote(v)}"
+ end
+ # or else nil; compact to clear nils out
+ end.compact.join(', ')
+
+ # ...and send them all in one query
+ @connection.query "SET #{encoding} #{variable_assignments}"
+ end
+
+ def extract_foreign_key_action(structure, name, action) # :nodoc:
+ if structure =~ /CONSTRAINT #{quote_column_name(name)} FOREIGN KEY .* REFERENCES .* ON #{action} (CASCADE|SET NULL|RESTRICT)/
+ case $1
+ when 'CASCADE'; :cascade
+ when 'SET NULL'; :nullify
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/column.rb b/activerecord/lib/active_record/connection_adapters/column.rb
new file mode 100644
index 0000000000..5f9cc6edd0
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/column.rb
@@ -0,0 +1,62 @@
+require 'set'
+
+module ActiveRecord
+ # :stopdoc:
+ module ConnectionAdapters
+ # An abstract definition of a column in a table.
+ class Column
+ TRUE_VALUES = [true, 1, '1', 't', 'T', 'true', 'TRUE', 'on', 'ON'].to_set
+ FALSE_VALUES = [false, 0, '0', 'f', 'F', 'false', 'FALSE', 'off', 'OFF'].to_set
+
+ module Format
+ ISO_DATE = /\A(\d{4})-(\d\d)-(\d\d)\z/
+ ISO_DATETIME = /\A(\d{4})-(\d\d)-(\d\d) (\d\d):(\d\d):(\d\d)(\.\d+)?\z/
+ end
+
+ attr_reader :name, :cast_type, :null, :sql_type, :default, :default_function
+
+ delegate :type, :precision, :scale, :limit, :klass, :accessor,
+ :number?, :binary?, :changed?,
+ :type_cast_from_user, :type_cast_from_database, :type_cast_for_database,
+ :type_cast_for_schema,
+ to: :cast_type
+
+ # Instantiates a new column in the table.
+ #
+ # +name+ is the column's name, such as <tt>supplier_id</tt> in <tt>supplier_id int(11)</tt>.
+ # +default+ is the type-casted default value, such as +new+ in <tt>sales_stage varchar(20) default 'new'</tt>.
+ # +cast_type+ is the object used for type casting and type information.
+ # +sql_type+ is used to extract the column's length, if necessary. For example +60+ in
+ # <tt>company_name varchar(60)</tt>.
+ # It will be mapped to one of the standard Rails SQL types in the <tt>type</tt> attribute.
+ # +null+ determines if this column allows +NULL+ values.
+ def initialize(name, default, cast_type, sql_type = nil, null = true)
+ @name = name
+ @cast_type = cast_type
+ @sql_type = sql_type
+ @null = null
+ @default = default
+ @default_function = nil
+ end
+
+ def has_default?
+ !default.nil?
+ end
+
+ # Returns the human name of the column name.
+ #
+ # ===== Examples
+ # Column.new('sales_stage', ...).human_name # => 'Sales stage'
+ def human_name
+ Base.human_attribute_name(@name)
+ end
+
+ def with_type(type)
+ dup.tap do |clone|
+ clone.instance_variable_set('@cast_type', type)
+ end
+ end
+ end
+ end
+ # :startdoc:
+end
diff --git a/activerecord/lib/active_record/connection_adapters/connection_specification.rb b/activerecord/lib/active_record/connection_adapters/connection_specification.rb
new file mode 100644
index 0000000000..d28a54b8f9
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/connection_specification.rb
@@ -0,0 +1,270 @@
+require 'uri'
+
+module ActiveRecord
+ module ConnectionAdapters
+ class ConnectionSpecification #:nodoc:
+ attr_reader :config, :adapter_method
+
+ def initialize(config, adapter_method)
+ @config, @adapter_method = config, adapter_method
+ end
+
+ def initialize_dup(original)
+ @config = original.config.dup
+ end
+
+ # Expands a connection string into a hash.
+ class ConnectionUrlResolver # :nodoc:
+
+ # == Example
+ #
+ # url = "postgresql://foo:bar@localhost:9000/foo_test?pool=5&timeout=3000"
+ # ConnectionUrlResolver.new(url).to_hash
+ # # => {
+ # "adapter" => "postgresql",
+ # "host" => "localhost",
+ # "port" => 9000,
+ # "database" => "foo_test",
+ # "username" => "foo",
+ # "password" => "bar",
+ # "pool" => "5",
+ # "timeout" => "3000"
+ # }
+ def initialize(url)
+ raise "Database URL cannot be empty" if url.blank?
+ @uri = uri_parser.parse(url)
+ @adapter = @uri.scheme.gsub('-', '_')
+ @adapter = "postgresql" if @adapter == "postgres"
+
+ if @uri.opaque
+ @uri.opaque, @query = @uri.opaque.split('?', 2)
+ else
+ @query = @uri.query
+ end
+ end
+
+ # Converts the given URL to a full connection hash.
+ def to_hash
+ config = raw_config.reject { |_,value| value.blank? }
+ config.map { |key,value| config[key] = uri_parser.unescape(value) if value.is_a? String }
+ config
+ end
+
+ private
+
+ def uri
+ @uri
+ end
+
+ def uri_parser
+ @uri_parser ||= URI::Parser.new
+ end
+
+ # Converts the query parameters of the URI into a hash.
+ #
+ # "localhost?pool=5&reaping_frequency=2"
+ # # => { "pool" => "5", "reaping_frequency" => "2" }
+ #
+ # returns empty hash if no query present.
+ #
+ # "localhost"
+ # # => {}
+ def query_hash
+ Hash[(@query || '').split("&").map { |pair| pair.split("=") }]
+ end
+
+ def raw_config
+ if uri.opaque
+ query_hash.merge({
+ "adapter" => @adapter,
+ "database" => uri.opaque })
+ else
+ query_hash.merge({
+ "adapter" => @adapter,
+ "username" => uri.user,
+ "password" => uri.password,
+ "port" => uri.port,
+ "database" => database_from_path,
+ "host" => uri.hostname })
+ end
+ end
+
+ # Returns name of the database.
+ def database_from_path
+ if @adapter == 'sqlite3'
+ # 'sqlite3:/foo' is absolute, because that makes sense. The
+ # corresponding relative version, 'sqlite3:foo', is handled
+ # elsewhere, as an "opaque".
+
+ uri.path
+ else
+ # Only SQLite uses a filename as the "database" name; for
+ # anything else, a leading slash would be silly.
+
+ uri.path.sub(%r{^/}, "")
+ end
+ end
+ end
+
+ ##
+ # Builds a ConnectionSpecification from user input.
+ class Resolver # :nodoc:
+ attr_reader :configurations
+
+ # Accepts a hash two layers deep, keys on the first layer represent
+ # environments such as "production". Keys must be strings.
+ def initialize(configurations)
+ @configurations = configurations
+ end
+
+ # Returns a hash with database connection information.
+ #
+ # == Examples
+ #
+ # Full hash Configuration.
+ #
+ # configurations = { "production" => { "host" => "localhost", "database" => "foo", "adapter" => "sqlite3" } }
+ # Resolver.new(configurations).resolve(:production)
+ # # => { "host" => "localhost", "database" => "foo", "adapter" => "sqlite3"}
+ #
+ # Initialized with URL configuration strings.
+ #
+ # configurations = { "production" => "postgresql://localhost/foo" }
+ # Resolver.new(configurations).resolve(:production)
+ # # => { "host" => "localhost", "database" => "foo", "adapter" => "postgresql" }
+ #
+ def resolve(config)
+ if config
+ resolve_connection config
+ elsif env = ActiveRecord::ConnectionHandling::RAILS_ENV.call
+ resolve_symbol_connection env.to_sym
+ else
+ raise AdapterNotSpecified
+ end
+ end
+
+ # Expands each key in @configurations hash into fully resolved hash
+ def resolve_all
+ config = configurations.dup
+ config.each do |key, value|
+ config[key] = resolve(value) if value
+ end
+ config
+ end
+
+ # Returns an instance of ConnectionSpecification for a given adapter.
+ # Accepts a hash one layer deep that contains all connection information.
+ #
+ # == Example
+ #
+ # config = { "production" => { "host" => "localhost", "database" => "foo", "adapter" => "sqlite3" } }
+ # spec = Resolver.new(config).spec(:production)
+ # spec.adapter_method
+ # # => "sqlite3_connection"
+ # spec.config
+ # # => { "host" => "localhost", "database" => "foo", "adapter" => "sqlite3" }
+ #
+ def spec(config)
+ spec = resolve(config).symbolize_keys
+
+ raise(AdapterNotSpecified, "database configuration does not specify adapter") unless spec.key?(:adapter)
+
+ path_to_adapter = "active_record/connection_adapters/#{spec[:adapter]}_adapter"
+ begin
+ require path_to_adapter
+ rescue Gem::LoadError => e
+ raise Gem::LoadError, "Specified '#{spec[:adapter]}' for database adapter, but the gem is not loaded. Add `gem '#{e.name}'` to your Gemfile (and ensure its version is at the minimum required by ActiveRecord)."
+ rescue LoadError => e
+ raise LoadError, "Could not load '#{path_to_adapter}'. Make sure that the adapter in config/database.yml is valid. If you use an adapter other than 'mysql', 'mysql2', 'postgresql' or 'sqlite3' add the necessary adapter gem to the Gemfile.", e.backtrace
+ end
+
+ adapter_method = "#{spec[:adapter]}_connection"
+ ConnectionSpecification.new(spec, adapter_method)
+ end
+
+ private
+
+ # Returns fully resolved connection, accepts hash, string or symbol.
+ # Always returns a hash.
+ #
+ # == Examples
+ #
+ # Symbol representing current environment.
+ #
+ # Resolver.new("production" => {}).resolve_connection(:production)
+ # # => {}
+ #
+ # One layer deep hash of connection values.
+ #
+ # Resolver.new({}).resolve_connection("adapter" => "sqlite3")
+ # # => { "adapter" => "sqlite3" }
+ #
+ # Connection URL.
+ #
+ # Resolver.new({}).resolve_connection("postgresql://localhost/foo")
+ # # => { "host" => "localhost", "database" => "foo", "adapter" => "postgresql" }
+ #
+ def resolve_connection(spec)
+ case spec
+ when Symbol
+ resolve_symbol_connection spec
+ when String
+ resolve_string_connection spec
+ when Hash
+ resolve_hash_connection spec
+ end
+ end
+
+ def resolve_string_connection(spec)
+ # Rails has historically accepted a string to mean either
+ # an environment key or a URL spec, so we have deprecated
+ # this ambiguous behaviour and in the future this function
+ # can be removed in favor of resolve_url_connection.
+ if configurations.key?(spec) || spec !~ /:/
+ ActiveSupport::Deprecation.warn "Passing a string to ActiveRecord::Base.establish_connection " \
+ "for a configuration lookup is deprecated, please pass a symbol (#{spec.to_sym.inspect}) instead"
+ resolve_symbol_connection(spec)
+ else
+ resolve_url_connection(spec)
+ end
+ end
+
+ # Takes the environment such as `:production` or `:development`.
+ # This requires that the @configurations was initialized with a key that
+ # matches.
+ #
+ # Resolver.new("production" => {}).resolve_symbol_connection(:production)
+ # # => {}
+ #
+ def resolve_symbol_connection(spec)
+ if config = configurations[spec.to_s]
+ resolve_connection(config)
+ else
+ raise(AdapterNotSpecified, "'#{spec}' database is not configured. Available: #{configurations.keys.inspect}")
+ end
+ end
+
+ # Accepts a hash. Expands the "url" key that contains a
+ # URL database connection to a full connection
+ # hash and merges with the rest of the hash.
+ # Connection details inside of the "url" key win any merge conflicts
+ def resolve_hash_connection(spec)
+ if spec["url"] && spec["url"] !~ /^jdbc:/
+ connection_hash = resolve_url_connection(spec.delete("url"))
+ spec.merge!(connection_hash)
+ end
+ spec
+ end
+
+ # Takes a connection URL.
+ #
+ # Resolver.new({}).resolve_url_connection("postgresql://localhost/foo")
+ # # => { "host" => "localhost", "database" => "foo", "adapter" => "postgresql" }
+ #
+ def resolve_url_connection(url)
+ ConnectionUrlResolver.new(url).to_hash
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb b/activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb
new file mode 100644
index 0000000000..39d52e6349
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql2_adapter.rb
@@ -0,0 +1,281 @@
+require 'active_record/connection_adapters/abstract_mysql_adapter'
+
+gem 'mysql2', '~> 0.3.13'
+require 'mysql2'
+
+module ActiveRecord
+ module ConnectionHandling # :nodoc:
+ # Establishes a connection to the database that's used by all Active Record objects.
+ def mysql2_connection(config)
+ config = config.symbolize_keys
+
+ config[:username] = 'root' if config[:username].nil?
+
+ if Mysql2::Client.const_defined? :FOUND_ROWS
+ config[:flags] = Mysql2::Client::FOUND_ROWS
+ end
+
+ client = Mysql2::Client.new(config)
+ options = [config[:host], config[:username], config[:password], config[:database], config[:port], config[:socket], 0]
+ ConnectionAdapters::Mysql2Adapter.new(client, logger, options, config)
+ rescue Mysql2::Error => error
+ if error.message.include?("Unknown database")
+ raise ActiveRecord::NoDatabaseError.new(error.message, error)
+ else
+ raise
+ end
+ end
+ end
+
+ module ConnectionAdapters
+ class Mysql2Adapter < AbstractMysqlAdapter
+ ADAPTER_NAME = 'Mysql2'
+
+ def initialize(connection, logger, connection_options, config)
+ super
+ @prepared_statements = false
+ configure_connection
+ end
+
+ MAX_INDEX_LENGTH_FOR_UTF8MB4 = 191
+ def initialize_schema_migrations_table
+ if @config[:encoding] == 'utf8mb4'
+ ActiveRecord::SchemaMigration.create_table(MAX_INDEX_LENGTH_FOR_UTF8MB4)
+ else
+ ActiveRecord::SchemaMigration.create_table
+ end
+ end
+
+ def supports_explain?
+ true
+ end
+
+ # HELPER METHODS ===========================================
+
+ def each_hash(result) # :nodoc:
+ if block_given?
+ result.each(:as => :hash, :symbolize_keys => true) do |row|
+ yield row
+ end
+ else
+ to_enum(:each_hash, result)
+ end
+ end
+
+ def error_number(exception)
+ exception.error_number if exception.respond_to?(:error_number)
+ end
+
+ # QUOTING ==================================================
+
+ def quote_string(string)
+ @connection.escape(string)
+ end
+
+ def quoted_date(value)
+ if value.acts_like?(:time) && value.respond_to?(:usec)
+ "#{super}.#{sprintf("%06d", value.usec)}"
+ else
+ super
+ end
+ end
+
+ # CONNECTION MANAGEMENT ====================================
+
+ def active?
+ return false unless @connection
+ @connection.ping
+ end
+
+ def reconnect!
+ super
+ disconnect!
+ connect
+ end
+ alias :reset! :reconnect!
+
+ # Disconnects from the database if already connected.
+ # Otherwise, this method does nothing.
+ def disconnect!
+ super
+ unless @connection.nil?
+ @connection.close
+ @connection = nil
+ end
+ end
+
+ # DATABASE STATEMENTS ======================================
+
+ def explain(arel, binds = [])
+ sql = "EXPLAIN #{to_sql(arel, binds.dup)}"
+ start = Time.now
+ result = exec_query(sql, 'EXPLAIN', binds)
+ elapsed = Time.now - start
+
+ ExplainPrettyPrinter.new.pp(result, elapsed)
+ end
+
+ class ExplainPrettyPrinter # :nodoc:
+ # Pretty prints the result of a EXPLAIN in a way that resembles the output of the
+ # MySQL shell:
+ #
+ # +----+-------------+-------+-------+---------------+---------+---------+-------+------+-------------+
+ # | id | select_type | table | type | possible_keys | key | key_len | ref | rows | Extra |
+ # +----+-------------+-------+-------+---------------+---------+---------+-------+------+-------------+
+ # | 1 | SIMPLE | users | const | PRIMARY | PRIMARY | 4 | const | 1 | |
+ # | 1 | SIMPLE | posts | ALL | NULL | NULL | NULL | NULL | 1 | Using where |
+ # +----+-------------+-------+-------+---------------+---------+---------+-------+------+-------------+
+ # 2 rows in set (0.00 sec)
+ #
+ # This is an exercise in Ruby hyperrealism :).
+ def pp(result, elapsed)
+ widths = compute_column_widths(result)
+ separator = build_separator(widths)
+
+ pp = []
+
+ pp << separator
+ pp << build_cells(result.columns, widths)
+ pp << separator
+
+ result.rows.each do |row|
+ pp << build_cells(row, widths)
+ end
+
+ pp << separator
+ pp << build_footer(result.rows.length, elapsed)
+
+ pp.join("\n") + "\n"
+ end
+
+ private
+
+ def compute_column_widths(result)
+ [].tap do |widths|
+ result.columns.each_with_index do |column, i|
+ cells_in_column = [column] + result.rows.map {|r| r[i].nil? ? 'NULL' : r[i].to_s}
+ widths << cells_in_column.map(&:length).max
+ end
+ end
+ end
+
+ def build_separator(widths)
+ padding = 1
+ '+' + widths.map {|w| '-' * (w + (padding*2))}.join('+') + '+'
+ end
+
+ def build_cells(items, widths)
+ cells = []
+ items.each_with_index do |item, i|
+ item = 'NULL' if item.nil?
+ justifier = item.is_a?(Numeric) ? 'rjust' : 'ljust'
+ cells << item.to_s.send(justifier, widths[i])
+ end
+ '| ' + cells.join(' | ') + ' |'
+ end
+
+ def build_footer(nrows, elapsed)
+ rows_label = nrows == 1 ? 'row' : 'rows'
+ "#{nrows} #{rows_label} in set (%.2f sec)" % elapsed
+ end
+ end
+
+ # FIXME: re-enable the following once a "better" query_cache solution is in core
+ #
+ # The overrides below perform much better than the originals in AbstractAdapter
+ # because we're able to take advantage of mysql2's lazy-loading capabilities
+ #
+ # # Returns a record hash with the column names as keys and column values
+ # # as values.
+ # def select_one(sql, name = nil)
+ # result = execute(sql, name)
+ # result.each(as: :hash) do |r|
+ # return r
+ # end
+ # end
+ #
+ # # Returns a single value from a record
+ # def select_value(sql, name = nil)
+ # result = execute(sql, name)
+ # if first = result.first
+ # first.first
+ # end
+ # end
+ #
+ # # Returns an array of the values of the first column in a select:
+ # # select_values("SELECT id FROM companies LIMIT 3") => [1,2,3]
+ # def select_values(sql, name = nil)
+ # execute(sql, name).map { |row| row.first }
+ # end
+
+ # Returns an array of arrays containing the field values.
+ # Order is the same as that returned by +columns+.
+ def select_rows(sql, name = nil, binds = [])
+ execute(sql, name).to_a
+ end
+
+ # Executes the SQL statement in the context of this connection.
+ def execute(sql, name = nil)
+ if @connection
+ # make sure we carry over any changes to ActiveRecord::Base.default_timezone that have been
+ # made since we established the connection
+ @connection.query_options[:database_timezone] = ActiveRecord::Base.default_timezone
+ end
+
+ super
+ end
+
+ def exec_query(sql, name = 'SQL', binds = [])
+ result = execute(sql, name)
+ ActiveRecord::Result.new(result.fields, result.to_a)
+ end
+
+ alias exec_without_stmt exec_query
+
+ # Returns an ActiveRecord::Result instance.
+ def select(sql, name = nil, binds = [])
+ exec_query(sql, name)
+ end
+
+ def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
+ super
+ id_value || @connection.last_id
+ end
+ alias :create :insert_sql
+
+ def exec_insert(sql, name, binds, pk = nil, sequence_name = nil)
+ execute to_sql(sql, binds), name
+ end
+
+ def exec_delete(sql, name, binds)
+ execute to_sql(sql, binds), name
+ @connection.affected_rows
+ end
+ alias :exec_update :exec_delete
+
+ def last_inserted_id(result)
+ @connection.last_id
+ end
+
+ private
+
+ def connect
+ @connection = Mysql2::Client.new(@config)
+ configure_connection
+ end
+
+ def configure_connection
+ @connection.query_options.merge!(:as => :array)
+ super
+ end
+
+ def full_version
+ @full_version ||= @connection.info[:version]
+ end
+
+ def set_field_encoding field_name
+ field_name
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/mysql_adapter.rb b/activerecord/lib/active_record/connection_adapters/mysql_adapter.rb
new file mode 100644
index 0000000000..a03bc28744
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/mysql_adapter.rb
@@ -0,0 +1,487 @@
+require 'active_record/connection_adapters/abstract_mysql_adapter'
+require 'active_record/connection_adapters/statement_pool'
+require 'active_support/core_ext/hash/keys'
+
+gem 'mysql', '~> 2.9'
+require 'mysql'
+
+class Mysql
+ class Time
+ def to_date
+ Date.new(year, month, day)
+ end
+ end
+ class Stmt; include Enumerable end
+ class Result; include Enumerable end
+end
+
+module ActiveRecord
+ module ConnectionHandling # :nodoc:
+ # Establishes a connection to the database that's used by all Active Record objects.
+ def mysql_connection(config)
+ config = config.symbolize_keys
+ host = config[:host]
+ port = config[:port]
+ socket = config[:socket]
+ username = config[:username] ? config[:username].to_s : 'root'
+ password = config[:password].to_s
+ database = config[:database]
+
+ mysql = Mysql.init
+ mysql.ssl_set(config[:sslkey], config[:sslcert], config[:sslca], config[:sslcapath], config[:sslcipher]) if config[:sslca] || config[:sslkey]
+
+ default_flags = Mysql.const_defined?(:CLIENT_MULTI_RESULTS) ? Mysql::CLIENT_MULTI_RESULTS : 0
+ default_flags |= Mysql::CLIENT_FOUND_ROWS if Mysql.const_defined?(:CLIENT_FOUND_ROWS)
+ options = [host, username, password, database, port, socket, default_flags]
+ ConnectionAdapters::MysqlAdapter.new(mysql, logger, options, config)
+ rescue Mysql::Error => error
+ if error.message.include?("Unknown database")
+ raise ActiveRecord::NoDatabaseError.new(error.message, error)
+ else
+ raise
+ end
+ end
+ end
+
+ module ConnectionAdapters
+ # The MySQL adapter will work with both Ruby/MySQL, which is a Ruby-based MySQL adapter that comes bundled with Active Record, and with
+ # the faster C-based MySQL/Ruby adapter (available both as a gem and from http://www.tmtm.org/en/mysql/ruby/).
+ #
+ # Options:
+ #
+ # * <tt>:host</tt> - Defaults to "localhost".
+ # * <tt>:port</tt> - Defaults to 3306.
+ # * <tt>:socket</tt> - Defaults to "/tmp/mysql.sock".
+ # * <tt>:username</tt> - Defaults to "root"
+ # * <tt>:password</tt> - Defaults to nothing.
+ # * <tt>:database</tt> - The name of the database. No default, must be provided.
+ # * <tt>:encoding</tt> - (Optional) Sets the client encoding by executing "SET NAMES <encoding>" after connection.
+ # * <tt>:reconnect</tt> - Defaults to false (See MySQL documentation: http://dev.mysql.com/doc/refman/5.0/en/auto-reconnect.html).
+ # * <tt>:strict</tt> - Defaults to true. Enable STRICT_ALL_TABLES. (See MySQL documentation: http://dev.mysql.com/doc/refman/5.0/en/server-sql-mode.html)
+ # * <tt>:variables</tt> - (Optional) A hash session variables to send as `SET @@SESSION.key = value` on each database connection. Use the value `:default` to set a variable to its DEFAULT value. (See MySQL documentation: http://dev.mysql.com/doc/refman/5.0/en/set-statement.html).
+ # * <tt>:sslca</tt> - Necessary to use MySQL with an SSL connection.
+ # * <tt>:sslkey</tt> - Necessary to use MySQL with an SSL connection.
+ # * <tt>:sslcert</tt> - Necessary to use MySQL with an SSL connection.
+ # * <tt>:sslcapath</tt> - Necessary to use MySQL with an SSL connection.
+ # * <tt>:sslcipher</tt> - Necessary to use MySQL with an SSL connection.
+ #
+ class MysqlAdapter < AbstractMysqlAdapter
+ ADAPTER_NAME = 'MySQL'
+
+ class StatementPool < ConnectionAdapters::StatementPool
+ def initialize(connection, max = 1000)
+ super
+ @cache = Hash.new { |h,pid| h[pid] = {} }
+ end
+
+ def each(&block); cache.each(&block); end
+ def key?(key); cache.key?(key); end
+ def [](key); cache[key]; end
+ def length; cache.length; end
+ def delete(key); cache.delete(key); end
+
+ def []=(sql, key)
+ while @max <= cache.size
+ cache.shift.last[:stmt].close
+ end
+ cache[sql] = key
+ end
+
+ def clear
+ cache.values.each do |hash|
+ hash[:stmt].close
+ end
+ cache.clear
+ end
+
+ private
+ def cache
+ @cache[Process.pid]
+ end
+ end
+
+ def initialize(connection, logger, connection_options, config)
+ super
+ @statements = StatementPool.new(@connection,
+ self.class.type_cast_config_to_integer(config.fetch(:statement_limit) { 1000 }))
+ @client_encoding = nil
+ connect
+ end
+
+ # Returns true, since this connection adapter supports prepared statement
+ # caching.
+ def supports_statement_cache?
+ true
+ end
+
+ # HELPER METHODS ===========================================
+
+ def each_hash(result) # :nodoc:
+ if block_given?
+ result.each_hash do |row|
+ row.symbolize_keys!
+ yield row
+ end
+ else
+ to_enum(:each_hash, result)
+ end
+ end
+
+ def error_number(exception) # :nodoc:
+ exception.errno if exception.respond_to?(:errno)
+ end
+
+ # QUOTING ==================================================
+
+ def quote_string(string) #:nodoc:
+ @connection.quote(string)
+ end
+
+ # CONNECTION MANAGEMENT ====================================
+
+ def active?
+ if @connection.respond_to?(:stat)
+ @connection.stat
+ else
+ @connection.query 'select 1'
+ end
+
+ # mysql-ruby doesn't raise an exception when stat fails.
+ if @connection.respond_to?(:errno)
+ @connection.errno.zero?
+ else
+ true
+ end
+ rescue Mysql::Error
+ false
+ end
+
+ def reconnect!
+ super
+ disconnect!
+ connect
+ end
+
+ # Disconnects from the database if already connected. Otherwise, this
+ # method does nothing.
+ def disconnect!
+ super
+ @connection.close rescue nil
+ end
+
+ def reset!
+ if @connection.respond_to?(:change_user)
+ # See http://bugs.mysql.com/bug.php?id=33540 -- the workaround way to
+ # reset the connection is to change the user to the same user.
+ @connection.change_user(@config[:username], @config[:password], @config[:database])
+ configure_connection
+ end
+ end
+
+ # DATABASE STATEMENTS ======================================
+
+ def select_rows(sql, name = nil, binds = [])
+ @connection.query_with_result = true
+ rows = exec_query(sql, name, binds).rows
+ @connection.more_results && @connection.next_result # invoking stored procedures with CLIENT_MULTI_RESULTS requires this to tidy up else connection will be dropped
+ rows
+ end
+
+ # Clears the prepared statements cache.
+ def clear_cache!
+ super
+ @statements.clear
+ end
+
+ # Taken from here:
+ # https://github.com/tmtm/ruby-mysql/blob/master/lib/mysql/charset.rb
+ # Author: TOMITA Masahiro <tommy@tmtm.org>
+ ENCODINGS = {
+ "armscii8" => nil,
+ "ascii" => Encoding::US_ASCII,
+ "big5" => Encoding::Big5,
+ "binary" => Encoding::ASCII_8BIT,
+ "cp1250" => Encoding::Windows_1250,
+ "cp1251" => Encoding::Windows_1251,
+ "cp1256" => Encoding::Windows_1256,
+ "cp1257" => Encoding::Windows_1257,
+ "cp850" => Encoding::CP850,
+ "cp852" => Encoding::CP852,
+ "cp866" => Encoding::IBM866,
+ "cp932" => Encoding::Windows_31J,
+ "dec8" => nil,
+ "eucjpms" => Encoding::EucJP_ms,
+ "euckr" => Encoding::EUC_KR,
+ "gb2312" => Encoding::EUC_CN,
+ "gbk" => Encoding::GBK,
+ "geostd8" => nil,
+ "greek" => Encoding::ISO_8859_7,
+ "hebrew" => Encoding::ISO_8859_8,
+ "hp8" => nil,
+ "keybcs2" => nil,
+ "koi8r" => Encoding::KOI8_R,
+ "koi8u" => Encoding::KOI8_U,
+ "latin1" => Encoding::ISO_8859_1,
+ "latin2" => Encoding::ISO_8859_2,
+ "latin5" => Encoding::ISO_8859_9,
+ "latin7" => Encoding::ISO_8859_13,
+ "macce" => Encoding::MacCentEuro,
+ "macroman" => Encoding::MacRoman,
+ "sjis" => Encoding::SHIFT_JIS,
+ "swe7" => nil,
+ "tis620" => Encoding::TIS_620,
+ "ucs2" => Encoding::UTF_16BE,
+ "ujis" => Encoding::EucJP_ms,
+ "utf8" => Encoding::UTF_8,
+ "utf8mb4" => Encoding::UTF_8,
+ }
+
+ # Get the client encoding for this database
+ def client_encoding
+ return @client_encoding if @client_encoding
+
+ result = exec_query(
+ "SHOW VARIABLES WHERE Variable_name = 'character_set_client'",
+ 'SCHEMA')
+ @client_encoding = ENCODINGS[result.rows.last.last]
+ end
+
+ def exec_query(sql, name = 'SQL', binds = [])
+ if without_prepared_statement?(binds)
+ result_set, affected_rows = exec_without_stmt(sql, name)
+ else
+ result_set, affected_rows = exec_stmt(sql, name, binds)
+ end
+
+ yield affected_rows if block_given?
+
+ result_set
+ end
+
+ def last_inserted_id(result)
+ @connection.insert_id
+ end
+
+ module Fields # :nodoc:
+ class DateTime < Type::DateTime # :nodoc:
+ def cast_value(value)
+ if Mysql::Time === value
+ new_time(
+ value.year,
+ value.month,
+ value.day,
+ value.hour,
+ value.minute,
+ value.second,
+ value.second_part)
+ else
+ super
+ end
+ end
+ end
+
+ class Time < Type::Time # :nodoc:
+ def cast_value(value)
+ if Mysql::Time === value
+ new_time(
+ 2000,
+ 01,
+ 01,
+ value.hour,
+ value.minute,
+ value.second,
+ value.second_part)
+ else
+ super
+ end
+ end
+ end
+
+ class << self
+ TYPES = Type::HashLookupTypeMap.new # :nodoc:
+
+ delegate :register_type, :alias_type, to: :TYPES
+
+ def find_type(field)
+ if field.type == Mysql::Field::TYPE_TINY && field.length > 1
+ TYPES.lookup(Mysql::Field::TYPE_LONG)
+ else
+ TYPES.lookup(field.type)
+ end
+ end
+ end
+
+ register_type Mysql::Field::TYPE_TINY, Type::Boolean.new
+ register_type Mysql::Field::TYPE_LONG, Type::Integer.new
+ alias_type Mysql::Field::TYPE_LONGLONG, Mysql::Field::TYPE_LONG
+ alias_type Mysql::Field::TYPE_NEWDECIMAL, Mysql::Field::TYPE_LONG
+
+ register_type Mysql::Field::TYPE_DATE, Type::Date.new
+ register_type Mysql::Field::TYPE_DATETIME, Fields::DateTime.new
+ register_type Mysql::Field::TYPE_TIME, Fields::Time.new
+ register_type Mysql::Field::TYPE_FLOAT, Type::Float.new
+ end
+
+ def initialize_type_map(m) # :nodoc:
+ super
+ m.register_type %r(datetime)i, Fields::DateTime.new
+ m.register_type %r(time)i, Fields::Time.new
+ end
+
+ def exec_without_stmt(sql, name = 'SQL') # :nodoc:
+ # Some queries, like SHOW CREATE TABLE don't work through the prepared
+ # statement API. For those queries, we need to use this method. :'(
+ log(sql, name) do
+ result = @connection.query(sql)
+ affected_rows = @connection.affected_rows
+
+ if result
+ types = {}
+ fields = []
+ result.fetch_fields.each { |field|
+ field_name = field.name
+ fields << field_name
+
+ if field.decimals > 0
+ types[field_name] = Type::Decimal.new
+ else
+ types[field_name] = Fields.find_type field
+ end
+ }
+
+ result_set = ActiveRecord::Result.new(fields, result.to_a, types)
+ result.free
+ else
+ result_set = ActiveRecord::Result.new([], [])
+ end
+
+ [result_set, affected_rows]
+ end
+ end
+
+ def execute_and_free(sql, name = nil) # :nodoc:
+ result = execute(sql, name)
+ ret = yield result
+ result.free
+ ret
+ end
+
+ def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
+ super sql, name
+ id_value || @connection.insert_id
+ end
+ alias :create :insert_sql
+
+ def exec_delete(sql, name, binds) # :nodoc:
+ affected_rows = 0
+
+ exec_query(sql, name, binds) do |n|
+ affected_rows = n
+ end
+
+ affected_rows
+ end
+ alias :exec_update :exec_delete
+
+ def begin_db_transaction #:nodoc:
+ exec_query "BEGIN"
+ end
+
+ private
+
+ def exec_stmt(sql, name, binds)
+ cache = {}
+ type_casted_binds = binds.map { |col, val|
+ [col, type_cast(val, col)]
+ }
+
+ log(sql, name, type_casted_binds) do
+ if binds.empty?
+ stmt = @connection.prepare(sql)
+ else
+ cache = @statements[sql] ||= {
+ :stmt => @connection.prepare(sql)
+ }
+ stmt = cache[:stmt]
+ end
+
+ begin
+ stmt.execute(*type_casted_binds.map { |_, val| val })
+ rescue Mysql::Error => e
+ # Older versions of MySQL leave the prepared statement in a bad
+ # place when an error occurs. To support older MySQL versions, we
+ # need to close the statement and delete the statement from the
+ # cache.
+ stmt.close
+ @statements.delete sql
+ raise e
+ end
+
+ cols = nil
+ if metadata = stmt.result_metadata
+ cols = cache[:cols] ||= metadata.fetch_fields.map { |field|
+ field.name
+ }
+ metadata.free
+ end
+
+ result_set = ActiveRecord::Result.new(cols, stmt.to_a) if cols
+ affected_rows = stmt.affected_rows
+
+ stmt.free_result
+ stmt.close if binds.empty?
+
+ [result_set, affected_rows]
+ end
+ end
+
+ def connect
+ encoding = @config[:encoding]
+ if encoding
+ @connection.options(Mysql::SET_CHARSET_NAME, encoding) rescue nil
+ end
+
+ if @config[:sslca] || @config[:sslkey]
+ @connection.ssl_set(@config[:sslkey], @config[:sslcert], @config[:sslca], @config[:sslcapath], @config[:sslcipher])
+ end
+
+ @connection.options(Mysql::OPT_CONNECT_TIMEOUT, @config[:connect_timeout]) if @config[:connect_timeout]
+ @connection.options(Mysql::OPT_READ_TIMEOUT, @config[:read_timeout]) if @config[:read_timeout]
+ @connection.options(Mysql::OPT_WRITE_TIMEOUT, @config[:write_timeout]) if @config[:write_timeout]
+
+ @connection.real_connect(*@connection_options)
+
+ # reconnect must be set after real_connect is called, because real_connect sets it to false internally
+ @connection.reconnect = !!@config[:reconnect] if @connection.respond_to?(:reconnect=)
+
+ configure_connection
+ end
+
+ # Many Rails applications monkey-patch a replacement of the configure_connection method
+ # and don't call 'super', so leave this here even though it looks superfluous.
+ def configure_connection
+ super
+ end
+
+ def select(sql, name = nil, binds = [])
+ @connection.query_with_result = true
+ rows = exec_query(sql, name, binds)
+ @connection.more_results && @connection.next_result # invoking stored procedures with CLIENT_MULTI_RESULTS requires this to tidy up else connection will be dropped
+ rows
+ end
+
+ # Returns the full version of the connected MySQL server.
+ def full_version
+ @full_version ||= @connection.server_info
+ end
+
+ def set_field_encoding field_name
+ field_name.force_encoding(client_encoding)
+ if internal_enc = Encoding.default_internal
+ field_name = field_name.encode!(internal_enc)
+ end
+ field_name
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/array_parser.rb b/activerecord/lib/active_record/connection_adapters/postgresql/array_parser.rb
new file mode 100644
index 0000000000..1b74c039ce
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/array_parser.rb
@@ -0,0 +1,93 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module ArrayParser # :nodoc:
+
+ DOUBLE_QUOTE = '"'
+ BACKSLASH = "\\"
+ COMMA = ','
+ BRACKET_OPEN = '{'
+ BRACKET_CLOSE = '}'
+
+ def parse_pg_array(string) # :nodoc:
+ local_index = 0
+ array = []
+ while(local_index < string.length)
+ case string[local_index]
+ when BRACKET_OPEN
+ local_index,array = parse_array_contents(array, string, local_index + 1)
+ when BRACKET_CLOSE
+ return array
+ end
+ local_index += 1
+ end
+
+ array
+ end
+
+ private
+
+ def parse_array_contents(array, string, index)
+ is_escaping = false
+ is_quoted = false
+ was_quoted = false
+ current_item = ''
+
+ local_index = index
+ while local_index
+ token = string[local_index]
+ if is_escaping
+ current_item << token
+ is_escaping = false
+ else
+ if is_quoted
+ case token
+ when DOUBLE_QUOTE
+ is_quoted = false
+ was_quoted = true
+ when BACKSLASH
+ is_escaping = true
+ else
+ current_item << token
+ end
+ else
+ case token
+ when BACKSLASH
+ is_escaping = true
+ when COMMA
+ add_item_to_array(array, current_item, was_quoted)
+ current_item = ''
+ was_quoted = false
+ when DOUBLE_QUOTE
+ is_quoted = true
+ when BRACKET_OPEN
+ internal_items = []
+ local_index,internal_items = parse_array_contents(internal_items, string, local_index + 1)
+ array.push(internal_items)
+ when BRACKET_CLOSE
+ add_item_to_array(array, current_item, was_quoted)
+ return local_index,array
+ else
+ current_item << token
+ end
+ end
+ end
+
+ local_index += 1
+ end
+ return local_index,array
+ end
+
+ def add_item_to_array(array, current_item, quoted)
+ return if !quoted && current_item.length == 0
+
+ if !quoted && current_item == 'NULL'
+ array.push nil
+ else
+ array.push current_item
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/column.rb b/activerecord/lib/active_record/connection_adapters/postgresql/column.rb
new file mode 100644
index 0000000000..37e5c3859c
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/column.rb
@@ -0,0 +1,20 @@
+module ActiveRecord
+ module ConnectionAdapters
+ # PostgreSQL-specific extensions to column definitions in a table.
+ class PostgreSQLColumn < Column #:nodoc:
+ attr_accessor :array
+
+ def initialize(name, default, cast_type, sql_type = nil, null = true, default_function = nil)
+ if sql_type =~ /\[\]$/
+ @array = true
+ super(name, default, cast_type, sql_type[0..sql_type.length - 3], null)
+ else
+ @array = false
+ super(name, default, cast_type, sql_type, null)
+ end
+
+ @default_function = default_function
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb b/activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb
new file mode 100644
index 0000000000..89a7257d77
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/database_statements.rb
@@ -0,0 +1,236 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module DatabaseStatements
+ def explain(arel, binds = [])
+ sql = "EXPLAIN #{to_sql(arel, binds)}"
+ ExplainPrettyPrinter.new.pp(exec_query(sql, 'EXPLAIN', binds))
+ end
+
+ class ExplainPrettyPrinter # :nodoc:
+ # Pretty prints the result of a EXPLAIN in a way that resembles the output of the
+ # PostgreSQL shell:
+ #
+ # QUERY PLAN
+ # ------------------------------------------------------------------------------
+ # Nested Loop Left Join (cost=0.00..37.24 rows=8 width=0)
+ # Join Filter: (posts.user_id = users.id)
+ # -> Index Scan using users_pkey on users (cost=0.00..8.27 rows=1 width=4)
+ # Index Cond: (id = 1)
+ # -> Seq Scan on posts (cost=0.00..28.88 rows=8 width=4)
+ # Filter: (posts.user_id = 1)
+ # (6 rows)
+ #
+ def pp(result)
+ header = result.columns.first
+ lines = result.rows.map(&:first)
+
+ # We add 2 because there's one char of padding at both sides, note
+ # the extra hyphens in the example above.
+ width = [header, *lines].map(&:length).max + 2
+
+ pp = []
+
+ pp << header.center(width).rstrip
+ pp << '-' * width
+
+ pp += lines.map {|line| " #{line}"}
+
+ nrows = result.rows.length
+ rows_label = nrows == 1 ? 'row' : 'rows'
+ pp << "(#{nrows} #{rows_label})"
+
+ pp.join("\n") + "\n"
+ end
+ end
+
+ def select_value(arel, name = nil, binds = [])
+ arel, binds = binds_from_relation arel, binds
+ sql = to_sql(arel, binds)
+ execute_and_clear(sql, name, binds) do |result|
+ result.getvalue(0, 0) if result.ntuples > 0 && result.nfields > 0
+ end
+ end
+
+ def select_values(arel, name = nil)
+ arel, binds = binds_from_relation arel, []
+ sql = to_sql(arel, binds)
+ execute_and_clear(sql, name, binds) do |result|
+ if result.nfields > 0
+ result.column_values(0)
+ else
+ []
+ end
+ end
+ end
+
+ # Executes a SELECT query and returns an array of rows. Each row is an
+ # array of field values.
+ def select_rows(sql, name = nil, binds = [])
+ execute_and_clear(sql, name, binds) do |result|
+ result.values
+ end
+ end
+
+ # Executes an INSERT query and returns the new record's ID
+ def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil)
+ unless pk
+ # Extract the table from the insert sql. Yuck.
+ table_ref = extract_table_ref_from_insert_sql(sql)
+ pk = primary_key(table_ref) if table_ref
+ end
+
+ if pk && use_insert_returning?
+ select_value("#{sql} RETURNING #{quote_column_name(pk)}")
+ elsif pk
+ super
+ last_insert_id_value(sequence_name || default_sequence_name(table_ref, pk))
+ else
+ super
+ end
+ end
+
+ def create
+ super.insert
+ end
+
+ # The internal PostgreSQL identifier of the money data type.
+ MONEY_COLUMN_TYPE_OID = 790 #:nodoc:
+ # The internal PostgreSQL identifier of the BYTEA data type.
+ BYTEA_COLUMN_TYPE_OID = 17 #:nodoc:
+
+ # create a 2D array representing the result set
+ def result_as_array(res) #:nodoc:
+ # check if we have any binary column and if they need escaping
+ ftypes = Array.new(res.nfields) do |i|
+ [i, res.ftype(i)]
+ end
+
+ rows = res.values
+ return rows unless ftypes.any? { |_, x|
+ x == BYTEA_COLUMN_TYPE_OID || x == MONEY_COLUMN_TYPE_OID
+ }
+
+ typehash = ftypes.group_by { |_, type| type }
+ binaries = typehash[BYTEA_COLUMN_TYPE_OID] || []
+ monies = typehash[MONEY_COLUMN_TYPE_OID] || []
+
+ rows.each do |row|
+ # unescape string passed BYTEA field (OID == 17)
+ binaries.each do |index, _|
+ row[index] = unescape_bytea(row[index])
+ end
+
+ # If this is a money type column and there are any currency symbols,
+ # then strip them off. Indeed it would be prettier to do this in
+ # PostgreSQLColumn.string_to_decimal but would break form input
+ # fields that call value_before_type_cast.
+ monies.each do |index, _|
+ data = row[index]
+ # Because money output is formatted according to the locale, there are two
+ # cases to consider (note the decimal separators):
+ # (1) $12,345,678.12
+ # (2) $12.345.678,12
+ case data
+ when /^-?\D+[\d,]+\.\d{2}$/ # (1)
+ data.gsub!(/[^-\d.]/, '')
+ when /^-?\D+[\d.]+,\d{2}$/ # (2)
+ data.gsub!(/[^-\d,]/, '').sub!(/,/, '.')
+ end
+ end
+ end
+ end
+
+ # Queries the database and returns the results in an Array-like object
+ def query(sql, name = nil) #:nodoc:
+ log(sql, name) do
+ result_as_array @connection.async_exec(sql)
+ end
+ end
+
+ # Executes an SQL statement, returning a PGresult object on success
+ # or raising a PGError exception otherwise.
+ def execute(sql, name = nil)
+ log(sql, name) do
+ @connection.async_exec(sql)
+ end
+ end
+
+ def substitute_at(column, index)
+ Arel::Nodes::BindParam.new "$#{index + 1}"
+ end
+
+ def exec_query(sql, name = 'SQL', binds = [])
+ execute_and_clear(sql, name, binds) do |result|
+ types = {}
+ fields = result.fields
+ fields.each_with_index do |fname, i|
+ ftype = result.ftype i
+ fmod = result.fmod i
+ types[fname] = get_oid_type(ftype, fmod, fname)
+ end
+ ActiveRecord::Result.new(fields, result.values, types)
+ end
+ end
+
+ def exec_delete(sql, name = 'SQL', binds = [])
+ execute_and_clear(sql, name, binds) {|result| result.cmd_tuples }
+ end
+ alias :exec_update :exec_delete
+
+ def sql_for_insert(sql, pk, id_value, sequence_name, binds)
+ unless pk
+ # Extract the table from the insert sql. Yuck.
+ table_ref = extract_table_ref_from_insert_sql(sql)
+ pk = primary_key(table_ref) if table_ref
+ end
+
+ if pk && use_insert_returning?
+ sql = "#{sql} RETURNING #{quote_column_name(pk)}"
+ end
+
+ [sql, binds]
+ end
+
+ def exec_insert(sql, name, binds, pk = nil, sequence_name = nil)
+ val = exec_query(sql, name, binds)
+ if !use_insert_returning? && pk
+ unless sequence_name
+ table_ref = extract_table_ref_from_insert_sql(sql)
+ sequence_name = default_sequence_name(table_ref, pk)
+ return val unless sequence_name
+ end
+ last_insert_id_result(sequence_name)
+ else
+ val
+ end
+ end
+
+ # Executes an UPDATE query and returns the number of affected tuples.
+ def update_sql(sql, name = nil)
+ super.cmd_tuples
+ end
+
+ # Begins a transaction.
+ def begin_db_transaction
+ execute "BEGIN"
+ end
+
+ def begin_isolated_db_transaction(isolation)
+ begin_db_transaction
+ execute "SET TRANSACTION ISOLATION LEVEL #{transaction_isolation_levels.fetch(isolation)}"
+ end
+
+ # Commits a transaction.
+ def commit_db_transaction
+ execute "COMMIT"
+ end
+
+ # Aborts a transaction.
+ def rollback_db_transaction
+ execute "ROLLBACK"
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid.rb
new file mode 100644
index 0000000000..d28a2b4fa0
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid.rb
@@ -0,0 +1,36 @@
+require 'active_record/connection_adapters/postgresql/oid/infinity'
+
+require 'active_record/connection_adapters/postgresql/oid/array'
+require 'active_record/connection_adapters/postgresql/oid/bit'
+require 'active_record/connection_adapters/postgresql/oid/bit_varying'
+require 'active_record/connection_adapters/postgresql/oid/bytea'
+require 'active_record/connection_adapters/postgresql/oid/cidr'
+require 'active_record/connection_adapters/postgresql/oid/date'
+require 'active_record/connection_adapters/postgresql/oid/date_time'
+require 'active_record/connection_adapters/postgresql/oid/decimal'
+require 'active_record/connection_adapters/postgresql/oid/enum'
+require 'active_record/connection_adapters/postgresql/oid/float'
+require 'active_record/connection_adapters/postgresql/oid/hstore'
+require 'active_record/connection_adapters/postgresql/oid/inet'
+require 'active_record/connection_adapters/postgresql/oid/integer'
+require 'active_record/connection_adapters/postgresql/oid/json'
+require 'active_record/connection_adapters/postgresql/oid/jsonb'
+require 'active_record/connection_adapters/postgresql/oid/money'
+require 'active_record/connection_adapters/postgresql/oid/point'
+require 'active_record/connection_adapters/postgresql/oid/range'
+require 'active_record/connection_adapters/postgresql/oid/specialized_string'
+require 'active_record/connection_adapters/postgresql/oid/time'
+require 'active_record/connection_adapters/postgresql/oid/uuid'
+require 'active_record/connection_adapters/postgresql/oid/vector'
+require 'active_record/connection_adapters/postgresql/oid/xml'
+
+require 'active_record/connection_adapters/postgresql/oid/type_map_initializer'
+
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/array.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/array.rb
new file mode 100644
index 0000000000..cd5efe2bb8
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/array.rb
@@ -0,0 +1,96 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Array < Type::Value # :nodoc:
+ include Type::Mutable
+
+ # Loads pg_array_parser if available. String parsing can be
+ # performed quicker by a native extension, which will not create
+ # a large amount of Ruby objects that will need to be garbage
+ # collected. pg_array_parser has a C and Java extension
+ begin
+ require 'pg_array_parser'
+ include PgArrayParser
+ rescue LoadError
+ require 'active_record/connection_adapters/postgresql/array_parser'
+ include PostgreSQL::ArrayParser
+ end
+
+ attr_reader :subtype, :delimiter
+ delegate :type, to: :subtype
+
+ def initialize(subtype, delimiter = ',')
+ @subtype = subtype
+ @delimiter = delimiter
+ end
+
+ def type_cast_from_database(value)
+ if value.is_a?(::String)
+ type_cast_array(parse_pg_array(value), :type_cast_from_database)
+ else
+ super
+ end
+ end
+
+ def type_cast_from_user(value)
+ type_cast_array(value, :type_cast_from_user)
+ end
+
+ def type_cast_for_database(value)
+ if value.is_a?(::Array)
+ cast_value_for_database(value)
+ else
+ super
+ end
+ end
+
+ private
+
+ def type_cast_array(value, method)
+ if value.is_a?(::Array)
+ value.map { |item| type_cast_array(item, method) }
+ else
+ @subtype.public_send(method, value)
+ end
+ end
+
+ def cast_value_for_database(value)
+ if value.is_a?(::Array)
+ casted_values = value.map { |item| cast_value_for_database(item) }
+ "{#{casted_values.join(delimiter)}}"
+ else
+ quote_and_escape(subtype.type_cast_for_database(value))
+ end
+ end
+
+ ARRAY_ESCAPE = "\\" * 2 * 2 # escape the backslash twice for PG arrays
+
+ def quote_and_escape(value)
+ case value
+ when ::String
+ if string_requires_quoting?(value)
+ value = value.gsub(/\\/, ARRAY_ESCAPE)
+ value.gsub!(/"/,"\\\"")
+ %("#{value}")
+ else
+ value
+ end
+ when nil then "NULL"
+ else value
+ end
+ end
+
+ # See http://www.postgresql.org/docs/9.2/static/arrays.html#ARRAYS-IO
+ # for a list of all cases in which strings will be quoted.
+ def string_requires_quoting?(string)
+ string.empty? ||
+ string == "NULL" ||
+ string =~ /[\{\}"\\\s]/ ||
+ string.include?(delimiter)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit.rb
new file mode 100644
index 0000000000..1dbb40ca1d
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit.rb
@@ -0,0 +1,52 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Bit < Type::Value # :nodoc:
+ def type
+ :bit
+ end
+
+ def type_cast(value)
+ if ::String === value
+ case value
+ when /^0x/i
+ value[2..-1].hex.to_s(2) # Hexadecimal notation
+ else
+ value # Bit-string notation
+ end
+ else
+ value
+ end
+ end
+
+ def type_cast_for_database(value)
+ Data.new(super) if value
+ end
+
+ class Data
+ def initialize(value)
+ @value = value
+ end
+
+ def to_s
+ value
+ end
+
+ def binary?
+ /\A[01]*\Z/ === value
+ end
+
+ def hex?
+ /\A[0-9A-F]*\Z/i === value
+ end
+
+ protected
+
+ attr_reader :value
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit_varying.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit_varying.rb
new file mode 100644
index 0000000000..4c21097d48
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bit_varying.rb
@@ -0,0 +1,13 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class BitVarying < OID::Bit # :nodoc:
+ def type
+ :bit_varying
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/bytea.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bytea.rb
new file mode 100644
index 0000000000..997613d7be
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/bytea.rb
@@ -0,0 +1,14 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Bytea < Type::Binary # :nodoc:
+ def type_cast_from_database(value)
+ return if value.nil?
+ PGconn.unescape_bytea(super)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/cidr.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/cidr.rb
new file mode 100644
index 0000000000..a53b4ee8e2
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/cidr.rb
@@ -0,0 +1,46 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Cidr < Type::Value # :nodoc:
+ def type
+ :cidr
+ end
+
+ def type_cast_for_schema(value)
+ subnet_mask = value.instance_variable_get(:@mask_addr)
+
+ # If the subnet mask is equal to /32, don't output it
+ if subnet_mask == (2**32 - 1)
+ "\"#{value.to_s}\""
+ else
+ "\"#{value.to_s}/#{subnet_mask.to_s(2).count('1')}\""
+ end
+ end
+
+ def type_cast_for_database(value)
+ if IPAddr === value
+ "#{value.to_s}/#{value.instance_variable_get(:@mask_addr).to_s(2).count('1')}"
+ else
+ value
+ end
+ end
+
+ def cast_value(value)
+ if value.nil?
+ nil
+ elsif String === value
+ begin
+ IPAddr.new(value)
+ rescue ArgumentError
+ nil
+ end
+ else
+ value
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/date.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/date.rb
new file mode 100644
index 0000000000..1d8d264530
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/date.rb
@@ -0,0 +1,11 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Date < Type::Date # :nodoc:
+ include Infinity
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/date_time.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/date_time.rb
new file mode 100644
index 0000000000..b9e7894e5c
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/date_time.rb
@@ -0,0 +1,27 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class DateTime < Type::DateTime # :nodoc:
+ include Infinity
+
+ def cast_value(value)
+ if value.is_a?(::String)
+ case value
+ when 'infinity' then ::Float::INFINITY
+ when '-infinity' then -::Float::INFINITY
+ when / BC$/
+ astronomical_year = format("%04d", -value[/^\d+/].to_i + 1)
+ super(value.sub(/ BC$/, "").sub(/^\d+/, astronomical_year))
+ else
+ super
+ end
+ else
+ value
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/decimal.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/decimal.rb
new file mode 100644
index 0000000000..43d22c8daf
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/decimal.rb
@@ -0,0 +1,13 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Decimal < Type::Decimal # :nodoc:
+ def infinity(options = {})
+ BigDecimal.new("Infinity") * (options[:negative] ? -1 : 1)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/enum.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/enum.rb
new file mode 100644
index 0000000000..77d5038efd
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/enum.rb
@@ -0,0 +1,17 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Enum < Type::Value # :nodoc:
+ def type
+ :enum
+ end
+
+ def type_cast(value)
+ value.to_s
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/float.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/float.rb
new file mode 100644
index 0000000000..78ef94b912
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/float.rb
@@ -0,0 +1,21 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Float < Type::Float # :nodoc:
+ include Infinity
+
+ def cast_value(value)
+ case value
+ when ::Float then value
+ when 'Infinity' then ::Float::INFINITY
+ when '-Infinity' then -::Float::INFINITY
+ when 'NaN' then ::Float::NAN
+ else value.to_f
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/hstore.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/hstore.rb
new file mode 100644
index 0000000000..be4525c94f
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/hstore.rb
@@ -0,0 +1,59 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Hstore < Type::Value # :nodoc:
+ include Type::Mutable
+
+ def type
+ :hstore
+ end
+
+ def type_cast_from_database(value)
+ if value.is_a?(::String)
+ ::Hash[value.scan(HstorePair).map { |k, v|
+ v = v.upcase == 'NULL' ? nil : v.gsub(/\A"(.*)"\Z/m,'\1').gsub(/\\(.)/, '\1')
+ k = k.gsub(/\A"(.*)"\Z/m,'\1').gsub(/\\(.)/, '\1')
+ [k, v]
+ }]
+ else
+ value
+ end
+ end
+
+ def type_cast_for_database(value)
+ if value.is_a?(::Hash)
+ value.map { |k, v| "#{escape_hstore(k)}=>#{escape_hstore(v)}" }.join(', ')
+ else
+ value
+ end
+ end
+
+ def accessor
+ ActiveRecord::Store::StringKeyedHashAccessor
+ end
+
+ private
+
+ HstorePair = begin
+ quoted_string = /"[^"\\]*(?:\\.[^"\\]*)*"/
+ unquoted_string = /(?:\\.|[^\s,])[^\s=,\\]*(?:\\.[^\s=,\\]*|=[^,>])*/
+ /(#{quoted_string}|#{unquoted_string})\s*=>\s*(#{quoted_string}|#{unquoted_string})/
+ end
+
+ def escape_hstore(value)
+ if value.nil?
+ 'NULL'
+ else
+ if value == ""
+ '""'
+ else
+ '"%s"' % value.to_s.gsub(/(["\\])/, '\\\\\1')
+ end
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/inet.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/inet.rb
new file mode 100644
index 0000000000..96486fa65b
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/inet.rb
@@ -0,0 +1,13 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Inet < Cidr # :nodoc:
+ def type
+ :inet
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/infinity.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/infinity.rb
new file mode 100644
index 0000000000..e47780399a
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/infinity.rb
@@ -0,0 +1,13 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ module Infinity # :nodoc:
+ def infinity(options = {})
+ options[:negative] ? -::Float::INFINITY : ::Float::INFINITY
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/integer.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/integer.rb
new file mode 100644
index 0000000000..59abdc0009
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/integer.rb
@@ -0,0 +1,11 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Integer < Type::Integer # :nodoc:
+ include Infinity
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/json.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/json.rb
new file mode 100644
index 0000000000..e12ddd9901
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/json.rb
@@ -0,0 +1,35 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Json < Type::Value # :nodoc:
+ include Type::Mutable
+
+ def type
+ :json
+ end
+
+ def type_cast_from_database(value)
+ if value.is_a?(::String)
+ ::ActiveSupport::JSON.decode(value)
+ else
+ super
+ end
+ end
+
+ def type_cast_for_database(value)
+ if value.is_a?(::Array) || value.is_a?(::Hash)
+ ::ActiveSupport::JSON.encode(value)
+ else
+ super
+ end
+ end
+
+ def accessor
+ ActiveRecord::Store::StringKeyedHashAccessor
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/jsonb.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/jsonb.rb
new file mode 100644
index 0000000000..34ed32ad35
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/jsonb.rb
@@ -0,0 +1,23 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Jsonb < Json # :nodoc:
+ def type
+ :jsonb
+ end
+
+ def changed_in_place?(raw_old_value, new_value)
+ # Postgres does not preserve insignificant whitespaces when
+ # roundtripping jsonb columns. This causes some false positives for
+ # the comparison here. Therefore, we need to parse and re-dump the
+ # raw value here to ensure the insignificant whitespaces are
+ # consitent with our encoder's output.
+ raw_old_value = type_cast_for_database(type_cast_from_database(raw_old_value))
+ super(raw_old_value, new_value)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/money.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/money.rb
new file mode 100644
index 0000000000..df890c2ed6
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/money.rb
@@ -0,0 +1,43 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Money < Type::Decimal # :nodoc:
+ include Infinity
+
+ class_attribute :precision
+
+ def type
+ :money
+ end
+
+ def scale
+ 2
+ end
+
+ def cast_value(value)
+ return value unless ::String === value
+
+ # Because money output is formatted according to the locale, there are two
+ # cases to consider (note the decimal separators):
+ # (1) $12,345,678.12
+ # (2) $12.345.678,12
+ # Negative values are represented as follows:
+ # (3) -$2.55
+ # (4) ($2.55)
+
+ value.sub!(/^\((.+)\)$/, '-\1') # (4)
+ case value
+ when /^-?\D+[\d,]+\.\d{2}$/ # (1)
+ value.gsub!(/[^-\d.]/, '')
+ when /^-?\D+[\d.]+,\d{2}$/ # (2)
+ value.gsub!(/[^-\d,]/, '').sub!(/,/, '.')
+ end
+
+ super(value)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/point.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/point.rb
new file mode 100644
index 0000000000..bac8b01d6b
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/point.rb
@@ -0,0 +1,43 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Point < Type::Value # :nodoc:
+ include Type::Mutable
+
+ def type
+ :point
+ end
+
+ def type_cast(value)
+ case value
+ when ::String
+ if value[0] == '(' && value[-1] == ')'
+ value = value[1...-1]
+ end
+ type_cast(value.split(','))
+ when ::Array
+ value.map { |v| Float(v) }
+ else
+ value
+ end
+ end
+
+ def type_cast_for_database(value)
+ if value.is_a?(::Array)
+ "(#{number_for_point(value[0])},#{number_for_point(value[1])})"
+ else
+ super
+ end
+ end
+
+ private
+
+ def number_for_point(number)
+ number.to_s.gsub(/\.0$/, '')
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb
new file mode 100644
index 0000000000..ae967d5167
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/range.rb
@@ -0,0 +1,76 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Range < Type::Value # :nodoc:
+ attr_reader :subtype, :type
+
+ def initialize(subtype, type)
+ @subtype = subtype
+ @type = type
+ end
+
+ def type_cast_for_schema(value)
+ value.inspect.gsub('Infinity', '::Float::INFINITY')
+ end
+
+ def cast_value(value)
+ return if value == 'empty'
+ return value if value.is_a?(::Range)
+
+ extracted = extract_bounds(value)
+ from = type_cast_single extracted[:from]
+ to = type_cast_single extracted[:to]
+
+ if !infinity?(from) && extracted[:exclude_start]
+ if from.respond_to?(:succ)
+ from = from.succ
+ ActiveSupport::Deprecation.warn <<-MESSAGE
+Excluding the beginning of a Range is only partialy supported through `#succ`.
+This is not reliable and will be removed in the future.
+ MESSAGE
+ else
+ raise ArgumentError, "The Ruby Range object does not support excluding the beginning of a Range. (unsupported value: '#{value}')"
+ end
+ end
+ ::Range.new(from, to, extracted[:exclude_end])
+ end
+
+ def type_cast_for_database(value)
+ if value.is_a?(::Range)
+ from = type_cast_single_for_database(value.begin)
+ to = type_cast_single_for_database(value.end)
+ "[#{from},#{to}#{value.exclude_end? ? ')' : ']'}"
+ else
+ super
+ end
+ end
+
+ private
+
+ def type_cast_single(value)
+ infinity?(value) ? value : @subtype.type_cast_from_database(value)
+ end
+
+ def type_cast_single_for_database(value)
+ infinity?(value) ? '' : @subtype.type_cast_for_database(value)
+ end
+
+ def extract_bounds(value)
+ from, to = value[1..-2].split(',')
+ {
+ from: (value[1] == ',' || from == '-infinity') ? @subtype.infinity(negative: true) : from,
+ to: (value[-2] == ',' || to == 'infinity') ? @subtype.infinity : to,
+ exclude_start: (value[0] == '('),
+ exclude_end: (value[-1] == ')')
+ }
+ end
+
+ def infinity?(value)
+ value.respond_to?(:infinite?) && value.infinite?
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/specialized_string.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/specialized_string.rb
new file mode 100644
index 0000000000..2d2fede4e8
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/specialized_string.rb
@@ -0,0 +1,15 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class SpecializedString < Type::String # :nodoc:
+ attr_reader :type
+
+ def initialize(type)
+ @type = type
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/time.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/time.rb
new file mode 100644
index 0000000000..8f0246eddb
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/time.rb
@@ -0,0 +1,11 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Time < Type::Time # :nodoc:
+ include Infinity
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/type_map_initializer.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/type_map_initializer.rb
new file mode 100644
index 0000000000..e396ff4a1e
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/type_map_initializer.rb
@@ -0,0 +1,85 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ # This class uses the data from PostgreSQL pg_type table to build
+ # the OID -> Type mapping.
+ # - OID is and integer representing the type.
+ # - Type is an OID::Type object.
+ # This class has side effects on the +store+ passed during initialization.
+ class TypeMapInitializer # :nodoc:
+ def initialize(store)
+ @store = store
+ end
+
+ def run(records)
+ nodes = records.reject { |row| @store.key? row['oid'].to_i }
+ mapped, nodes = nodes.partition { |row| @store.key? row['typname'] }
+ ranges, nodes = nodes.partition { |row| row['typtype'] == 'r' }
+ enums, nodes = nodes.partition { |row| row['typtype'] == 'e' }
+ domains, nodes = nodes.partition { |row| row['typtype'] == 'd' }
+ arrays, nodes = nodes.partition { |row| row['typinput'] == 'array_in' }
+ composites, nodes = nodes.partition { |row| row['typelem'] != '0' }
+
+ mapped.each { |row| register_mapped_type(row) }
+ enums.each { |row| register_enum_type(row) }
+ domains.each { |row| register_domain_type(row) }
+ arrays.each { |row| register_array_type(row) }
+ ranges.each { |row| register_range_type(row) }
+ composites.each { |row| register_composite_type(row) }
+ end
+
+ private
+ def register_mapped_type(row)
+ alias_type row['oid'], row['typname']
+ end
+
+ def register_enum_type(row)
+ register row['oid'], OID::Enum.new
+ end
+
+ def register_array_type(row)
+ if subtype = @store.lookup(row['typelem'].to_i)
+ register row['oid'], OID::Array.new(subtype, row['typdelim'])
+ end
+ end
+
+ def register_range_type(row)
+ if subtype = @store.lookup(row['rngsubtype'].to_i)
+ register row['oid'], OID::Range.new(subtype, row['typname'].to_sym)
+ end
+ end
+
+ def register_domain_type(row)
+ if base_type = @store.lookup(row["typbasetype"].to_i)
+ register row['oid'], base_type
+ else
+ warn "unknown base type (OID: #{row["typbasetype"]}) for domain #{row["typname"]}."
+ end
+ end
+
+ def register_composite_type(row)
+ if subtype = @store.lookup(row['typelem'].to_i)
+ register row['oid'], OID::Vector.new(row['typdelim'], subtype)
+ end
+ end
+
+ def register(oid, oid_type)
+ oid = assert_valid_registration(oid, oid_type)
+ @store.register_type(oid, oid_type)
+ end
+
+ def alias_type(oid, target)
+ oid = assert_valid_registration(oid, target)
+ @store.alias_type(oid, target)
+ end
+
+ def assert_valid_registration(oid, oid_type)
+ raise ArgumentError, "can't register nil type for OID #{oid}" if oid_type.nil?
+ oid.to_i
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/uuid.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/uuid.rb
new file mode 100644
index 0000000000..dd97393eac
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/uuid.rb
@@ -0,0 +1,26 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Uuid < Type::Value # :nodoc:
+ RFC_4122 = %r{\A\{?[a-fA-F0-9]{4}-?
+ [a-fA-F0-9]{4}-?
+ [a-fA-F0-9]{4}-?
+ [1-5][a-fA-F0-9]{3}-?
+ [8-Bab][a-fA-F0-9]{3}-?
+ [a-fA-F0-9]{4}-?
+ [a-fA-F0-9]{4}-?
+ [a-fA-F0-9]{4}-?\}?\z}x
+
+ def type
+ :uuid
+ end
+
+ def type_cast(value)
+ value.to_s[RFC_4122, 0]
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/vector.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/vector.rb
new file mode 100644
index 0000000000..de4187b028
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/vector.rb
@@ -0,0 +1,26 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Vector < Type::Value # :nodoc:
+ attr_reader :delim, :subtype
+
+ # +delim+ corresponds to the `typdelim` column in the pg_types
+ # table. +subtype+ is derived from the `typelem` column in the
+ # pg_types table.
+ def initialize(delim, subtype)
+ @delim = delim
+ @subtype = subtype
+ end
+
+ # FIXME: this should probably split on +delim+ and use +subtype+
+ # to cast the values. Unfortunately, the current Rails behavior
+ # is to just return the string.
+ def type_cast(value)
+ value
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/oid/xml.rb b/activerecord/lib/active_record/connection_adapters/postgresql/oid/xml.rb
new file mode 100644
index 0000000000..334af7c598
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/oid/xml.rb
@@ -0,0 +1,28 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module OID # :nodoc:
+ class Xml < Type::String # :nodoc:
+ def type
+ :xml
+ end
+
+ def type_cast_for_database(value)
+ return unless value
+ Data.new(super)
+ end
+
+ class Data # :nodoc:
+ def initialize(value)
+ @value = value
+ end
+
+ def to_s
+ @value
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb b/activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb
new file mode 100644
index 0000000000..cf5c8d288e
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/quoting.rb
@@ -0,0 +1,118 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module Quoting
+ # Escapes binary strings for bytea input to the database.
+ def escape_bytea(value)
+ @connection.escape_bytea(value) if value
+ end
+
+ # Unescapes bytea output from a database to the binary string it represents.
+ # NOTE: This is NOT an inverse of escape_bytea! This is only to be used
+ # on escaped binary output from database drive.
+ def unescape_bytea(value)
+ @connection.unescape_bytea(value) if value
+ end
+
+ # Quotes PostgreSQL-specific data types for SQL input.
+ def quote(value, column = nil) #:nodoc:
+ return super unless column
+
+ case value
+ when Float
+ if value.infinite? || value.nan?
+ "'#{value.to_s}'"
+ else
+ super
+ end
+ else
+ super
+ end
+ end
+
+ # Quotes strings for use in SQL input.
+ def quote_string(s) #:nodoc:
+ @connection.escape(s)
+ end
+
+ # Checks the following cases:
+ #
+ # - table_name
+ # - "table.name"
+ # - schema_name.table_name
+ # - schema_name."table.name"
+ # - "schema.name".table_name
+ # - "schema.name"."table.name"
+ def quote_table_name(name)
+ Utils.extract_schema_qualified_name(name.to_s).quoted
+ end
+
+ def quote_table_name_for_assignment(table, attr)
+ quote_column_name(attr)
+ end
+
+ # Quotes column names for use in SQL queries.
+ def quote_column_name(name) #:nodoc:
+ PGconn.quote_ident(name.to_s)
+ end
+
+ # Quote date/time values for use in SQL input. Includes microseconds
+ # if the value is a Time responding to usec.
+ def quoted_date(value) #:nodoc:
+ result = super
+ if value.acts_like?(:time) && value.respond_to?(:usec)
+ result = "#{result}.#{sprintf("%06d", value.usec)}"
+ end
+
+ if value.year <= 0
+ bce_year = format("%04d", -value.year + 1)
+ result = result.sub(/^-?\d+/, bce_year) + " BC"
+ end
+ result
+ end
+
+ # Does not quote function default values for UUID columns
+ def quote_default_value(value, column) #:nodoc:
+ if column.type == :uuid && value =~ /\(\)/
+ value
+ else
+ quote(value, column)
+ end
+ end
+
+ private
+
+ def _quote(value)
+ case value
+ when Type::Binary::Data
+ "'#{escape_bytea(value.to_s)}'"
+ when OID::Xml::Data
+ "xml '#{quote_string(value.to_s)}'"
+ when OID::Bit::Data
+ if value.binary?
+ "B'#{value}'"
+ elsif value.hex?
+ "X'#{value}'"
+ end
+ else
+ super
+ end
+ end
+
+ def _type_cast(value)
+ case value
+ when Type::Binary::Data
+ # Return a bind param hash with format as binary.
+ # See http://deveiate.org/code/pg/PGconn.html#method-i-exec_prepared-doc
+ # for more information
+ { value: value.to_s, format: 1 }
+ when OID::Xml::Data, OID::Bit::Data
+ value.to_s
+ else
+ super
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/referential_integrity.rb b/activerecord/lib/active_record/connection_adapters/postgresql/referential_integrity.rb
new file mode 100644
index 0000000000..52b307c432
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/referential_integrity.rb
@@ -0,0 +1,30 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module ReferentialIntegrity # :nodoc:
+ def supports_disable_referential_integrity? # :nodoc:
+ true
+ end
+
+ def disable_referential_integrity # :nodoc:
+ if supports_disable_referential_integrity?
+ begin
+ execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} DISABLE TRIGGER ALL" }.join(";"))
+ rescue
+ execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} DISABLE TRIGGER USER" }.join(";"))
+ end
+ end
+ yield
+ ensure
+ if supports_disable_referential_integrity?
+ begin
+ execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} ENABLE TRIGGER ALL" }.join(";"))
+ rescue
+ execute(tables.collect { |name| "ALTER TABLE #{quote_table_name(name)} ENABLE TRIGGER USER" }.join(";"))
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/schema_definitions.rb b/activerecord/lib/active_record/connection_adapters/postgresql/schema_definitions.rb
new file mode 100644
index 0000000000..83554bbf74
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/schema_definitions.rb
@@ -0,0 +1,154 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ module ColumnMethods
+ def xml(*args)
+ options = args.extract_options!
+ column(args[0], :xml, options)
+ end
+
+ def tsvector(*args)
+ options = args.extract_options!
+ column(args[0], :tsvector, options)
+ end
+
+ def int4range(name, options = {})
+ column(name, :int4range, options)
+ end
+
+ def int8range(name, options = {})
+ column(name, :int8range, options)
+ end
+
+ def tsrange(name, options = {})
+ column(name, :tsrange, options)
+ end
+
+ def tstzrange(name, options = {})
+ column(name, :tstzrange, options)
+ end
+
+ def numrange(name, options = {})
+ column(name, :numrange, options)
+ end
+
+ def daterange(name, options = {})
+ column(name, :daterange, options)
+ end
+
+ def hstore(name, options = {})
+ column(name, :hstore, options)
+ end
+
+ def ltree(name, options = {})
+ column(name, :ltree, options)
+ end
+
+ def inet(name, options = {})
+ column(name, :inet, options)
+ end
+
+ def cidr(name, options = {})
+ column(name, :cidr, options)
+ end
+
+ def macaddr(name, options = {})
+ column(name, :macaddr, options)
+ end
+
+ def uuid(name, options = {})
+ column(name, :uuid, options)
+ end
+
+ def json(name, options = {})
+ column(name, :json, options)
+ end
+
+ def jsonb(name, options = {})
+ column(name, :jsonb, options)
+ end
+
+ def citext(name, options = {})
+ column(name, :citext, options)
+ end
+
+ def point(name, options = {})
+ column(name, :point, options)
+ end
+
+ def bit(name, options)
+ column(name, :bit, options)
+ end
+
+ def bit_varying(name, options)
+ column(name, :bit_varying, options)
+ end
+
+ def money(name, options)
+ column(name, :money, options)
+ end
+ end
+
+ class ColumnDefinition < ActiveRecord::ConnectionAdapters::ColumnDefinition
+ attr_accessor :array
+ end
+
+ class TableDefinition < ActiveRecord::ConnectionAdapters::TableDefinition
+ include ColumnMethods
+
+ # Defines the primary key field.
+ # Use of the native PostgreSQL UUID type is supported, and can be used
+ # by defining your tables as such:
+ #
+ # create_table :stuffs, id: :uuid do |t|
+ # t.string :content
+ # t.timestamps
+ # end
+ #
+ # By default, this will use the +uuid_generate_v4()+ function from the
+ # +uuid-ossp+ extension, which MUST be enabled on your database. To enable
+ # the +uuid-ossp+ extension, you can use the +enable_extension+ method in your
+ # migrations. To use a UUID primary key without +uuid-ossp+ enabled, you can
+ # set the +:default+ option to +nil+:
+ #
+ # create_table :stuffs, id: false do |t|
+ # t.primary_key :id, :uuid, default: nil
+ # t.uuid :foo_id
+ # t.timestamps
+ # end
+ #
+ # You may also pass a different UUID generation function from +uuid-ossp+
+ # or another library.
+ #
+ # Note that setting the UUID primary key default value to +nil+ will
+ # require you to assure that you always provide a UUID value before saving
+ # a record (as primary keys cannot be +nil+). This might be done via the
+ # +SecureRandom.uuid+ method and a +before_save+ callback, for instance.
+ def primary_key(name, type = :primary_key, options = {})
+ return super unless type == :uuid
+ options[:default] = options.fetch(:default, 'uuid_generate_v4()')
+ options[:primary_key] = true
+ column name, type, options
+ end
+
+ def column(name, type = nil, options = {})
+ super
+ column = self[name]
+ column.array = options[:array]
+
+ self
+ end
+
+ private
+
+ def create_column_definition(name, type)
+ PostgreSQL::ColumnDefinition.new name, type
+ end
+ end
+
+ class Table < ActiveRecord::ConnectionAdapters::Table
+ include ColumnMethods
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb b/activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb
new file mode 100644
index 0000000000..7042817672
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb
@@ -0,0 +1,560 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ class SchemaCreation < AbstractAdapter::SchemaCreation
+ private
+
+ def visit_AddColumn(o)
+ sql_type = type_to_sql(o.type, o.limit, o.precision, o.scale)
+ sql = "ADD COLUMN #{quote_column_name(o.name)} #{sql_type}"
+ add_column_options!(sql, column_options(o))
+ end
+
+ def visit_ColumnDefinition(o)
+ sql = super
+ if o.primary_key? && o.type != :primary_key
+ sql << " PRIMARY KEY "
+ add_column_options!(sql, column_options(o))
+ end
+ sql
+ end
+
+ def add_column_options!(sql, options)
+ if options[:array] || options[:column].try(:array)
+ sql << '[]'
+ end
+
+ column = options.fetch(:column) { return super }
+ if column.type == :uuid && options[:default] =~ /\(\)/
+ sql << " DEFAULT #{options[:default]}"
+ else
+ super
+ end
+ end
+
+ def type_for_column(column)
+ if column.array
+ @conn.lookup_cast_type("#{column.sql_type}[]")
+ else
+ super
+ end
+ end
+ end
+
+ module SchemaStatements
+ # Drops the database specified on the +name+ attribute
+ # and creates it again using the provided +options+.
+ def recreate_database(name, options = {}) #:nodoc:
+ drop_database(name)
+ create_database(name, options)
+ end
+
+ # Create a new PostgreSQL database. Options include <tt>:owner</tt>, <tt>:template</tt>,
+ # <tt>:encoding</tt> (defaults to utf8), <tt>:collation</tt>, <tt>:ctype</tt>,
+ # <tt>:tablespace</tt>, and <tt>:connection_limit</tt> (note that MySQL uses
+ # <tt>:charset</tt> while PostgreSQL uses <tt>:encoding</tt>).
+ #
+ # Example:
+ # create_database config[:database], config
+ # create_database 'foo_development', encoding: 'unicode'
+ def create_database(name, options = {})
+ options = { encoding: 'utf8' }.merge!(options.symbolize_keys)
+
+ option_string = options.sum do |key, value|
+ case key
+ when :owner
+ " OWNER = \"#{value}\""
+ when :template
+ " TEMPLATE = \"#{value}\""
+ when :encoding
+ " ENCODING = '#{value}'"
+ when :collation
+ " LC_COLLATE = '#{value}'"
+ when :ctype
+ " LC_CTYPE = '#{value}'"
+ when :tablespace
+ " TABLESPACE = \"#{value}\""
+ when :connection_limit
+ " CONNECTION LIMIT = #{value}"
+ else
+ ""
+ end
+ end
+
+ execute "CREATE DATABASE #{quote_table_name(name)}#{option_string}"
+ end
+
+ # Drops a PostgreSQL database.
+ #
+ # Example:
+ # drop_database 'matt_development'
+ def drop_database(name) #:nodoc:
+ execute "DROP DATABASE IF EXISTS #{quote_table_name(name)}"
+ end
+
+ # Returns the list of all tables in the schema search path or a specified schema.
+ def tables(name = nil)
+ query(<<-SQL, 'SCHEMA').map { |row| row[0] }
+ SELECT tablename
+ FROM pg_tables
+ WHERE schemaname = ANY (current_schemas(false))
+ SQL
+ end
+
+ # Returns true if table exists.
+ # If the schema is not specified as part of +name+ then it will only find tables within
+ # the current schema search path (regardless of permissions to access tables in other schemas)
+ def table_exists?(name)
+ name = Utils.extract_schema_qualified_name(name.to_s)
+ return false unless name.identifier
+
+ exec_query(<<-SQL, 'SCHEMA').rows.first[0].to_i > 0
+ SELECT COUNT(*)
+ FROM pg_class c
+ LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
+ WHERE c.relkind IN ('r','v','m') -- (r)elation/table, (v)iew, (m)aterialized view
+ AND c.relname = '#{name.identifier}'
+ AND n.nspname = #{name.schema ? "'#{name.schema}'" : 'ANY (current_schemas(false))'}
+ SQL
+ end
+
+ # Returns true if schema exists.
+ def schema_exists?(name)
+ exec_query(<<-SQL, 'SCHEMA').rows.first[0].to_i > 0
+ SELECT COUNT(*)
+ FROM pg_namespace
+ WHERE nspname = '#{name}'
+ SQL
+ end
+
+ def index_name_exists?(table_name, index_name, default)
+ exec_query(<<-SQL, 'SCHEMA').rows.first[0].to_i > 0
+ SELECT COUNT(*)
+ FROM pg_class t
+ INNER JOIN pg_index d ON t.oid = d.indrelid
+ INNER JOIN pg_class i ON d.indexrelid = i.oid
+ WHERE i.relkind = 'i'
+ AND i.relname = '#{index_name}'
+ AND t.relname = '#{table_name}'
+ AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname = ANY (current_schemas(false)) )
+ SQL
+ end
+
+ # Returns an array of indexes for the given table.
+ def indexes(table_name, name = nil)
+ result = query(<<-SQL, 'SCHEMA')
+ SELECT distinct i.relname, d.indisunique, d.indkey, pg_get_indexdef(d.indexrelid), t.oid
+ FROM pg_class t
+ INNER JOIN pg_index d ON t.oid = d.indrelid
+ INNER JOIN pg_class i ON d.indexrelid = i.oid
+ WHERE i.relkind = 'i'
+ AND d.indisprimary = 'f'
+ AND t.relname = '#{table_name}'
+ AND i.relnamespace IN (SELECT oid FROM pg_namespace WHERE nspname = ANY (current_schemas(false)) )
+ ORDER BY i.relname
+ SQL
+
+ result.map do |row|
+ index_name = row[0]
+ unique = row[1] == 't'
+ indkey = row[2].split(" ")
+ inddef = row[3]
+ oid = row[4]
+
+ columns = Hash[query(<<-SQL, "SCHEMA")]
+ SELECT a.attnum, a.attname
+ FROM pg_attribute a
+ WHERE a.attrelid = #{oid}
+ AND a.attnum IN (#{indkey.join(",")})
+ SQL
+
+ column_names = columns.values_at(*indkey).compact
+
+ unless column_names.empty?
+ # add info on sort order for columns (only desc order is explicitly specified, asc is the default)
+ desc_order_columns = inddef.scan(/(\w+) DESC/).flatten
+ orders = desc_order_columns.any? ? Hash[desc_order_columns.map {|order_column| [order_column, :desc]}] : {}
+ where = inddef.scan(/WHERE (.+)$/).flatten[0]
+ using = inddef.scan(/USING (.+?) /).flatten[0].to_sym
+
+ IndexDefinition.new(table_name, index_name, unique, column_names, [], orders, where, nil, using)
+ end
+ end.compact
+ end
+
+ # Returns the list of all column definitions for a table.
+ def columns(table_name)
+ # Limit, precision, and scale are all handled by the superclass.
+ column_definitions(table_name).map do |column_name, type, default, notnull, oid, fmod|
+ oid = get_oid_type(oid.to_i, fmod.to_i, column_name, type)
+ default_value = extract_value_from_default(oid, default)
+ default_function = extract_default_function(default_value, default)
+ new_column(column_name, default_value, oid, type, notnull == 'f', default_function)
+ end
+ end
+
+ def new_column(name, default, cast_type, sql_type = nil, null = true, default_function = nil) # :nodoc:
+ PostgreSQLColumn.new(name, default, cast_type, sql_type, null, default_function)
+ end
+
+ # Returns the current database name.
+ def current_database
+ query('select current_database()', 'SCHEMA')[0][0]
+ end
+
+ # Returns the current schema name.
+ def current_schema
+ query('SELECT current_schema', 'SCHEMA')[0][0]
+ end
+
+ # Returns the current database encoding format.
+ def encoding
+ query(<<-end_sql, 'SCHEMA')[0][0]
+ SELECT pg_encoding_to_char(pg_database.encoding) FROM pg_database
+ WHERE pg_database.datname LIKE '#{current_database}'
+ end_sql
+ end
+
+ # Returns the current database collation.
+ def collation
+ query(<<-end_sql, 'SCHEMA')[0][0]
+ SELECT pg_database.datcollate FROM pg_database WHERE pg_database.datname LIKE '#{current_database}'
+ end_sql
+ end
+
+ # Returns the current database ctype.
+ def ctype
+ query(<<-end_sql, 'SCHEMA')[0][0]
+ SELECT pg_database.datctype FROM pg_database WHERE pg_database.datname LIKE '#{current_database}'
+ end_sql
+ end
+
+ # Returns an array of schema names.
+ def schema_names
+ query(<<-SQL, 'SCHEMA').flatten
+ SELECT nspname
+ FROM pg_namespace
+ WHERE nspname !~ '^pg_.*'
+ AND nspname NOT IN ('information_schema')
+ ORDER by nspname;
+ SQL
+ end
+
+ # Creates a schema for the given schema name.
+ def create_schema schema_name
+ execute "CREATE SCHEMA #{schema_name}"
+ end
+
+ # Drops the schema for the given schema name.
+ def drop_schema schema_name
+ execute "DROP SCHEMA #{schema_name} CASCADE"
+ end
+
+ # Sets the schema search path to a string of comma-separated schema names.
+ # Names beginning with $ have to be quoted (e.g. $user => '$user').
+ # See: http://www.postgresql.org/docs/current/static/ddl-schemas.html
+ #
+ # This should be not be called manually but set in database.yml.
+ def schema_search_path=(schema_csv)
+ if schema_csv
+ execute("SET search_path TO #{schema_csv}", 'SCHEMA')
+ @schema_search_path = schema_csv
+ end
+ end
+
+ # Returns the active schema search path.
+ def schema_search_path
+ @schema_search_path ||= query('SHOW search_path', 'SCHEMA')[0][0]
+ end
+
+ # Returns the current client message level.
+ def client_min_messages
+ query('SHOW client_min_messages', 'SCHEMA')[0][0]
+ end
+
+ # Set the client message level.
+ def client_min_messages=(level)
+ execute("SET client_min_messages TO '#{level}'", 'SCHEMA')
+ end
+
+ # Returns the sequence name for a table's primary key or some other specified key.
+ def default_sequence_name(table_name, pk = nil) #:nodoc:
+ result = serial_sequence(table_name, pk || 'id')
+ return nil unless result
+ Utils.extract_schema_qualified_name(result)
+ rescue ActiveRecord::StatementInvalid
+ PostgreSQL::Name.new(nil, "#{table_name}_#{pk || 'id'}_seq")
+ end
+
+ def serial_sequence(table, column)
+ result = exec_query(<<-eosql, 'SCHEMA')
+ SELECT pg_get_serial_sequence('#{table}', '#{column}')
+ eosql
+ result.rows.first.first
+ end
+
+ # Resets the sequence of a table's primary key to the maximum value.
+ def reset_pk_sequence!(table, pk = nil, sequence = nil) #:nodoc:
+ unless pk and sequence
+ default_pk, default_sequence = pk_and_sequence_for(table)
+
+ pk ||= default_pk
+ sequence ||= default_sequence
+ end
+
+ if @logger && pk && !sequence
+ @logger.warn "#{table} has primary key #{pk} with no default sequence"
+ end
+
+ if pk && sequence
+ quoted_sequence = quote_table_name(sequence)
+
+ select_value <<-end_sql, 'SCHEMA'
+ SELECT setval('#{quoted_sequence}', (SELECT COALESCE(MAX(#{quote_column_name pk})+(SELECT increment_by FROM #{quoted_sequence}), (SELECT min_value FROM #{quoted_sequence})) FROM #{quote_table_name(table)}), false)
+ end_sql
+ end
+ end
+
+ # Returns a table's primary key and belonging sequence.
+ def pk_and_sequence_for(table) #:nodoc:
+ # First try looking for a sequence with a dependency on the
+ # given table's primary key.
+ result = query(<<-end_sql, 'SCHEMA')[0]
+ SELECT attr.attname, nsp.nspname, seq.relname
+ FROM pg_class seq,
+ pg_attribute attr,
+ pg_depend dep,
+ pg_constraint cons,
+ pg_namespace nsp
+ WHERE seq.oid = dep.objid
+ AND seq.relkind = 'S'
+ AND attr.attrelid = dep.refobjid
+ AND attr.attnum = dep.refobjsubid
+ AND attr.attrelid = cons.conrelid
+ AND attr.attnum = cons.conkey[1]
+ AND seq.relnamespace = nsp.oid
+ AND cons.contype = 'p'
+ AND dep.classid = 'pg_class'::regclass
+ AND dep.refobjid = '#{quote_table_name(table)}'::regclass
+ end_sql
+
+ if result.nil? or result.empty?
+ result = query(<<-end_sql, 'SCHEMA')[0]
+ SELECT attr.attname, nsp.nspname,
+ CASE
+ WHEN pg_get_expr(def.adbin, def.adrelid) !~* 'nextval' THEN NULL
+ WHEN split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2) ~ '.' THEN
+ substr(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2),
+ strpos(split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2), '.')+1)
+ ELSE split_part(pg_get_expr(def.adbin, def.adrelid), '''', 2)
+ END
+ FROM pg_class t
+ JOIN pg_attribute attr ON (t.oid = attrelid)
+ JOIN pg_attrdef def ON (adrelid = attrelid AND adnum = attnum)
+ JOIN pg_constraint cons ON (conrelid = adrelid AND adnum = conkey[1])
+ JOIN pg_namespace nsp ON (t.relnamespace = nsp.oid)
+ WHERE t.oid = '#{quote_table_name(table)}'::regclass
+ AND cons.contype = 'p'
+ AND pg_get_expr(def.adbin, def.adrelid) ~* 'nextval|uuid_generate'
+ end_sql
+ end
+
+ pk = result.shift
+ if result.last
+ [pk, PostgreSQL::Name.new(*result)]
+ else
+ [pk, nil]
+ end
+ rescue
+ nil
+ end
+
+ # Returns just a table's primary key
+ def primary_key(table)
+ row = exec_query(<<-end_sql, 'SCHEMA').rows.first
+ SELECT attr.attname
+ FROM pg_attribute attr
+ INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = cons.conkey[1]
+ WHERE cons.contype = 'p'
+ AND cons.conrelid = '#{quote_table_name(table)}'::regclass
+ end_sql
+
+ row && row.first
+ end
+
+ # Renames a table.
+ # Also renames a table's primary key sequence if the sequence name exists and
+ # matches the Active Record default.
+ #
+ # Example:
+ # rename_table('octopuses', 'octopi')
+ def rename_table(table_name, new_name)
+ clear_cache!
+ execute "ALTER TABLE #{quote_table_name(table_name)} RENAME TO #{quote_table_name(new_name)}"
+ pk, seq = pk_and_sequence_for(new_name)
+ if seq && seq.identifier == "#{table_name}_#{pk}_seq"
+ new_seq = "#{new_name}_#{pk}_seq"
+ execute "ALTER TABLE #{quote_table_name(seq)} RENAME TO #{quote_table_name(new_seq)}"
+ end
+
+ rename_table_indexes(table_name, new_name)
+ end
+
+ # Adds a new column to the named table.
+ # See TableDefinition#column for details of the options you can use.
+ def add_column(table_name, column_name, type, options = {})
+ clear_cache!
+ super
+ end
+
+ # Changes the column of a table.
+ def change_column(table_name, column_name, type, options = {})
+ clear_cache!
+ quoted_table_name = quote_table_name(table_name)
+ sql_type = type_to_sql(type, options[:limit], options[:precision], options[:scale])
+ sql_type << "[]" if options[:array]
+ execute "ALTER TABLE #{quoted_table_name} ALTER COLUMN #{quote_column_name(column_name)} TYPE #{sql_type}"
+
+ change_column_default(table_name, column_name, options[:default]) if options_include_default?(options)
+ change_column_null(table_name, column_name, options[:null], options[:default]) if options.key?(:null)
+ end
+
+ # Changes the default value of a table column.
+ def change_column_default(table_name, column_name, default)
+ clear_cache!
+ column = column_for(table_name, column_name)
+ return unless column
+
+ alter_column_query = "ALTER TABLE #{quote_table_name(table_name)} ALTER COLUMN #{quote_column_name(column_name)} %s"
+ if default.nil?
+ # <tt>DEFAULT NULL</tt> results in the same behavior as <tt>DROP DEFAULT</tt>. However, PostgreSQL will
+ # cast the default to the columns type, which leaves us with a default like "default NULL::character varying".
+ execute alter_column_query % "DROP DEFAULT"
+ else
+ execute alter_column_query % "SET DEFAULT #{quote_default_value(default, column)}"
+ end
+ end
+
+ def change_column_null(table_name, column_name, null, default = nil)
+ clear_cache!
+ unless null || default.nil?
+ column = column_for(table_name, column_name)
+ execute("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote_default_value(default, column)} WHERE #{quote_column_name(column_name)} IS NULL") if column
+ end
+ execute("ALTER TABLE #{quote_table_name(table_name)} ALTER #{quote_column_name(column_name)} #{null ? 'DROP' : 'SET'} NOT NULL")
+ end
+
+ # Renames a column in a table.
+ def rename_column(table_name, column_name, new_column_name)
+ clear_cache!
+ execute "ALTER TABLE #{quote_table_name(table_name)} RENAME COLUMN #{quote_column_name(column_name)} TO #{quote_column_name(new_column_name)}"
+ rename_column_indexes(table_name, column_name, new_column_name)
+ end
+
+ def add_index(table_name, column_name, options = {}) #:nodoc:
+ index_name, index_type, index_columns, index_options, index_algorithm, index_using = add_index_options(table_name, column_name, options)
+ execute "CREATE #{index_type} INDEX #{index_algorithm} #{quote_column_name(index_name)} ON #{quote_table_name(table_name)} #{index_using} (#{index_columns})#{index_options}"
+ end
+
+ def remove_index!(table_name, index_name) #:nodoc:
+ execute "DROP INDEX #{quote_table_name(index_name)}"
+ end
+
+ def rename_index(table_name, old_name, new_name)
+ execute "ALTER INDEX #{quote_column_name(old_name)} RENAME TO #{quote_table_name(new_name)}"
+ end
+
+ def foreign_keys(table_name)
+ fk_info = select_all <<-SQL.strip_heredoc
+ SELECT t2.relname AS to_table, a1.attname AS column, a2.attname AS primary_key, c.conname AS name, c.confupdtype AS on_update, c.confdeltype AS on_delete
+ FROM pg_constraint c
+ JOIN pg_class t1 ON c.conrelid = t1.oid
+ JOIN pg_class t2 ON c.confrelid = t2.oid
+ JOIN pg_attribute a1 ON a1.attnum = c.conkey[1] AND a1.attrelid = t1.oid
+ JOIN pg_attribute a2 ON a2.attnum = c.confkey[1] AND a2.attrelid = t2.oid
+ JOIN pg_namespace t3 ON c.connamespace = t3.oid
+ WHERE c.contype = 'f'
+ AND t1.relname = #{quote(table_name)}
+ AND t3.nspname = ANY (current_schemas(false))
+ ORDER BY c.conname
+ SQL
+
+ fk_info.map do |row|
+ options = {
+ column: row['column'],
+ name: row['name'],
+ primary_key: row['primary_key']
+ }
+
+ options[:on_delete] = extract_foreign_key_action(row['on_delete'])
+ options[:on_update] = extract_foreign_key_action(row['on_update'])
+ ForeignKeyDefinition.new(table_name, row['to_table'], options)
+ end
+ end
+
+ def extract_foreign_key_action(specifier) # :nodoc:
+ case specifier
+ when 'c'; :cascade
+ when 'n'; :nullify
+ when 'r'; :restrict
+ end
+ end
+
+ def index_name_length
+ 63
+ end
+
+ # Maps logical Rails types to PostgreSQL-specific data types.
+ def type_to_sql(type, limit = nil, precision = nil, scale = nil)
+ case type.to_s
+ when 'binary'
+ # PostgreSQL doesn't support limits on binary (bytea) columns.
+ # The hard limit is 1Gb, because of a 32-bit size field, and TOAST.
+ case limit
+ when nil, 0..0x3fffffff; super(type)
+ else raise(ActiveRecordError, "No binary type has byte size #{limit}.")
+ end
+ when 'text'
+ # PostgreSQL doesn't support limits on text columns.
+ # The hard limit is 1Gb, according to section 8.3 in the manual.
+ case limit
+ when nil, 0..0x3fffffff; super(type)
+ else raise(ActiveRecordError, "The limit on text can be at most 1GB - 1byte.")
+ end
+ when 'integer'
+ return 'integer' unless limit
+
+ case limit
+ when 1, 2; 'smallint'
+ when 3, 4; 'integer'
+ when 5..8; 'bigint'
+ else raise(ActiveRecordError, "No integer type has byte size #{limit}. Use a numeric with precision 0 instead.")
+ end
+ when 'datetime'
+ return super unless precision
+
+ case precision
+ when 0..6; "timestamp(#{precision})"
+ else raise(ActiveRecordError, "No timestamp type has precision of #{precision}. The allowed range of precision is from 0 to 6")
+ end
+ else
+ super
+ end
+ end
+
+ # PostgreSQL requires the ORDER BY columns in the select list for distinct queries, and
+ # requires that the ORDER BY include the distinct column.
+ def columns_for_distinct(columns, orders) #:nodoc:
+ order_columns = orders.reject(&:blank?).map{ |s|
+ # Convert Arel node to string
+ s = s.to_sql unless s.is_a?(String)
+ # Remove any ASC/DESC modifiers
+ s.gsub(/\s+(?:ASC|DESC)?\s*(?:NULLS\s+(?:FIRST|LAST)\s*)?/i, '')
+ }.reject(&:blank?).map.with_index { |column, i| "#{column} AS alias_#{i}" }
+
+ [super, *order_columns].join(', ')
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql/utils.rb b/activerecord/lib/active_record/connection_adapters/postgresql/utils.rb
new file mode 100644
index 0000000000..0290bcb48c
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql/utils.rb
@@ -0,0 +1,66 @@
+module ActiveRecord
+ module ConnectionAdapters
+ module PostgreSQL
+ # Value Object to hold a schema qualified name.
+ # This is usually the name of a PostgreSQL relation but it can also represent
+ # schema qualified type names. +schema+ and +identifier+ are unquoted to prevent
+ # double quoting.
+ class Name # :nodoc:
+ SEPARATOR = "."
+ attr_reader :schema, :identifier
+
+ def initialize(schema, identifier)
+ @schema, @identifier = unquote(schema), unquote(identifier)
+ end
+
+ def to_s
+ parts.join SEPARATOR
+ end
+
+ def quoted
+ parts.map { |p| PGconn.quote_ident(p) }.join SEPARATOR
+ end
+
+ def ==(o)
+ o.class == self.class && o.parts == parts
+ end
+ alias_method :eql?, :==
+
+ def hash
+ parts.hash
+ end
+
+ protected
+ def unquote(part)
+ return unless part
+ part.gsub(/(^"|"$)/,'')
+ end
+
+ def parts
+ @parts ||= [@schema, @identifier].compact
+ end
+ end
+
+ module Utils # :nodoc:
+ extend self
+
+ # Returns an instance of <tt>ActiveRecord::ConnectionAdapters::PostgreSQL::Name</tt>
+ # extracted from +string+.
+ # +schema+ is nil if not specified in +string+.
+ # +schema+ and +identifier+ exclude surrounding quotes (regardless of whether provided in +string+)
+ # +string+ supports the range of schema/table references understood by PostgreSQL, for example:
+ #
+ # * <tt>table_name</tt>
+ # * <tt>"table.name"</tt>
+ # * <tt>schema_name.table_name</tt>
+ # * <tt>schema_name."table.name"</tt>
+ # * <tt>"schema_name".table_name</tt>
+ # * <tt>"schema.name"."table name"</tt>
+ def extract_schema_qualified_name(string)
+ table, schema = string.scan(/[^".\s]+|"[^"]*"/)[0..1].reverse
+ PostgreSQL::Name.new(schema, table)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb b/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
new file mode 100644
index 0000000000..eede374678
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/postgresql_adapter.rb
@@ -0,0 +1,743 @@
+require 'active_record/connection_adapters/abstract_adapter'
+require 'active_record/connection_adapters/statement_pool'
+
+require 'active_record/connection_adapters/postgresql/utils'
+require 'active_record/connection_adapters/postgresql/column'
+require 'active_record/connection_adapters/postgresql/oid'
+require 'active_record/connection_adapters/postgresql/quoting'
+require 'active_record/connection_adapters/postgresql/referential_integrity'
+require 'active_record/connection_adapters/postgresql/schema_definitions'
+require 'active_record/connection_adapters/postgresql/schema_statements'
+require 'active_record/connection_adapters/postgresql/database_statements'
+
+require 'arel/visitors/bind_visitor'
+
+# Make sure we're using pg high enough for PGResult#values
+gem 'pg', '~> 0.11'
+require 'pg'
+
+require 'ipaddr'
+
+module ActiveRecord
+ module ConnectionHandling # :nodoc:
+ VALID_CONN_PARAMS = [:host, :hostaddr, :port, :dbname, :user, :password, :connect_timeout,
+ :client_encoding, :options, :application_name, :fallback_application_name,
+ :keepalives, :keepalives_idle, :keepalives_interval, :keepalives_count,
+ :tty, :sslmode, :requiressl, :sslcompression, :sslcert, :sslkey,
+ :sslrootcert, :sslcrl, :requirepeer, :krbsrvname, :gsslib, :service]
+
+ # Establishes a connection to the database that's used by all Active Record objects
+ def postgresql_connection(config)
+ conn_params = config.symbolize_keys
+
+ conn_params.delete_if { |_, v| v.nil? }
+
+ # Map ActiveRecords param names to PGs.
+ conn_params[:user] = conn_params.delete(:username) if conn_params[:username]
+ conn_params[:dbname] = conn_params.delete(:database) if conn_params[:database]
+
+ # Forward only valid config params to PGconn.connect.
+ conn_params.keep_if { |k, _| VALID_CONN_PARAMS.include?(k) }
+
+ # The postgres drivers don't allow the creation of an unconnected PGconn object,
+ # so just pass a nil connection object for the time being.
+ ConnectionAdapters::PostgreSQLAdapter.new(nil, logger, conn_params, config)
+ end
+ end
+
+ module ConnectionAdapters
+ # The PostgreSQL adapter works with the native C (https://bitbucket.org/ged/ruby-pg) driver.
+ #
+ # Options:
+ #
+ # * <tt>:host</tt> - Defaults to a Unix-domain socket in /tmp. On machines without Unix-domain sockets,
+ # the default is to connect to localhost.
+ # * <tt>:port</tt> - Defaults to 5432.
+ # * <tt>:username</tt> - Defaults to be the same as the operating system name of the user running the application.
+ # * <tt>:password</tt> - Password to be used if the server demands password authentication.
+ # * <tt>:database</tt> - Defaults to be the same as the user name.
+ # * <tt>:schema_search_path</tt> - An optional schema search path for the connection given
+ # as a string of comma-separated schema names. This is backward-compatible with the <tt>:schema_order</tt> option.
+ # * <tt>:encoding</tt> - An optional client encoding that is used in a <tt>SET client_encoding TO
+ # <encoding></tt> call on the connection.
+ # * <tt>:min_messages</tt> - An optional client min messages that is used in a
+ # <tt>SET client_min_messages TO <min_messages></tt> call on the connection.
+ # * <tt>:variables</tt> - An optional hash of additional parameters that
+ # will be used in <tt>SET SESSION key = val</tt> calls on the connection.
+ # * <tt>:insert_returning</tt> - An optional boolean to control the use or <tt>RETURNING</tt> for <tt>INSERT</tt> statements
+ # defaults to true.
+ #
+ # Any further options are used as connection parameters to libpq. See
+ # http://www.postgresql.org/docs/9.1/static/libpq-connect.html for the
+ # list of parameters.
+ #
+ # In addition, default connection parameters of libpq can be set per environment variables.
+ # See http://www.postgresql.org/docs/9.1/static/libpq-envars.html .
+ class PostgreSQLAdapter < AbstractAdapter
+ ADAPTER_NAME = 'PostgreSQL'
+
+ NATIVE_DATABASE_TYPES = {
+ primary_key: "serial primary key",
+ string: { name: "character varying" },
+ text: { name: "text" },
+ integer: { name: "integer" },
+ float: { name: "float" },
+ decimal: { name: "decimal" },
+ datetime: { name: "timestamp" },
+ time: { name: "time" },
+ date: { name: "date" },
+ daterange: { name: "daterange" },
+ numrange: { name: "numrange" },
+ tsrange: { name: "tsrange" },
+ tstzrange: { name: "tstzrange" },
+ int4range: { name: "int4range" },
+ int8range: { name: "int8range" },
+ binary: { name: "bytea" },
+ boolean: { name: "boolean" },
+ xml: { name: "xml" },
+ tsvector: { name: "tsvector" },
+ hstore: { name: "hstore" },
+ inet: { name: "inet" },
+ cidr: { name: "cidr" },
+ macaddr: { name: "macaddr" },
+ uuid: { name: "uuid" },
+ json: { name: "json" },
+ ltree: { name: "ltree" },
+ citext: { name: "citext" },
+ point: { name: "point" },
+ bit: { name: "bit" },
+ bit_varying: { name: "bit varying" },
+ money: { name: "money" },
+ }
+
+ OID = PostgreSQL::OID #:nodoc:
+
+ include PostgreSQL::Quoting
+ include PostgreSQL::ReferentialIntegrity
+ include PostgreSQL::SchemaStatements
+ include PostgreSQL::DatabaseStatements
+ include Savepoints
+
+ # Returns 'PostgreSQL' as adapter name for identification purposes.
+ def adapter_name
+ ADAPTER_NAME
+ end
+
+ def schema_creation # :nodoc:
+ PostgreSQL::SchemaCreation.new self
+ end
+
+ # Adds `:array` option to the default set provided by the
+ # AbstractAdapter
+ def prepare_column_options(column, types) # :nodoc:
+ spec = super
+ spec[:array] = 'true' if column.respond_to?(:array) && column.array
+ spec[:default] = "\"#{column.default_function}\"" if column.default_function
+ spec
+ end
+
+ # Adds `:array` as a valid migration key
+ def migration_keys
+ super + [:array]
+ end
+
+ # Returns +true+, since this connection adapter supports prepared statement
+ # caching.
+ def supports_statement_cache?
+ true
+ end
+
+ def supports_index_sort_order?
+ true
+ end
+
+ def supports_partial_index?
+ true
+ end
+
+ def supports_transaction_isolation?
+ true
+ end
+
+ def supports_foreign_keys?
+ true
+ end
+
+ def index_algorithms
+ { concurrently: 'CONCURRENTLY' }
+ end
+
+ class StatementPool < ConnectionAdapters::StatementPool
+ def initialize(connection, max)
+ super
+ @counter = 0
+ @cache = Hash.new { |h,pid| h[pid] = {} }
+ end
+
+ def each(&block); cache.each(&block); end
+ def key?(key); cache.key?(key); end
+ def [](key); cache[key]; end
+ def length; cache.length; end
+
+ def next_key
+ "a#{@counter + 1}"
+ end
+
+ def []=(sql, key)
+ while @max <= cache.size
+ dealloc(cache.shift.last)
+ end
+ @counter += 1
+ cache[sql] = key
+ end
+
+ def clear
+ cache.each_value do |stmt_key|
+ dealloc stmt_key
+ end
+ cache.clear
+ end
+
+ def delete(sql_key)
+ dealloc cache[sql_key]
+ cache.delete sql_key
+ end
+
+ private
+
+ def cache
+ @cache[Process.pid]
+ end
+
+ def dealloc(key)
+ @connection.query "DEALLOCATE #{key}" if connection_active?
+ end
+
+ def connection_active?
+ @connection.status == PGconn::CONNECTION_OK
+ rescue PGError
+ false
+ end
+ end
+
+ # Initializes and connects a PostgreSQL adapter.
+ def initialize(connection, logger, connection_parameters, config)
+ super(connection, logger)
+
+ @visitor = Arel::Visitors::PostgreSQL.new self
+ if self.class.type_cast_config_to_boolean(config.fetch(:prepared_statements) { true })
+ @prepared_statements = true
+ else
+ @prepared_statements = false
+ end
+
+ @connection_parameters, @config = connection_parameters, config
+
+ # @local_tz is initialized as nil to avoid warnings when connect tries to use it
+ @local_tz = nil
+ @table_alias_length = nil
+
+ connect
+ @statements = StatementPool.new @connection,
+ self.class.type_cast_config_to_integer(config.fetch(:statement_limit) { 1000 })
+
+ if postgresql_version < 80200
+ raise "Your version of PostgreSQL (#{postgresql_version}) is too old, please upgrade!"
+ end
+
+ @type_map = Type::HashLookupTypeMap.new
+ initialize_type_map(type_map)
+ @local_tz = execute('SHOW TIME ZONE', 'SCHEMA').first["TimeZone"]
+ @use_insert_returning = @config.key?(:insert_returning) ? self.class.type_cast_config_to_boolean(@config[:insert_returning]) : true
+ end
+
+ # Clears the prepared statements cache.
+ def clear_cache!
+ @statements.clear
+ end
+
+ # Is this connection alive and ready for queries?
+ def active?
+ @connection.query 'SELECT 1'
+ true
+ rescue PGError
+ false
+ end
+
+ # Close then reopen the connection.
+ def reconnect!
+ super
+ @connection.reset
+ configure_connection
+ end
+
+ def reset!
+ clear_cache!
+ reset_transaction
+ unless @connection.transaction_status == ::PG::PQTRANS_IDLE
+ @connection.query 'ROLLBACK'
+ end
+ @connection.query 'DISCARD ALL'
+ configure_connection
+ end
+
+ # Disconnects from the database if already connected. Otherwise, this
+ # method does nothing.
+ def disconnect!
+ super
+ @connection.close rescue nil
+ end
+
+ def native_database_types #:nodoc:
+ NATIVE_DATABASE_TYPES
+ end
+
+ # Returns true, since this connection adapter supports migrations.
+ def supports_migrations?
+ true
+ end
+
+ # Does PostgreSQL support finding primary key on non-Active Record tables?
+ def supports_primary_key? #:nodoc:
+ true
+ end
+
+ # Enable standard-conforming strings if available.
+ def set_standard_conforming_strings
+ old, self.client_min_messages = client_min_messages, 'panic'
+ execute('SET standard_conforming_strings = on', 'SCHEMA') rescue nil
+ ensure
+ self.client_min_messages = old
+ end
+
+ def supports_ddl_transactions?
+ true
+ end
+
+ def supports_explain?
+ true
+ end
+
+ # Returns true if pg > 9.1
+ def supports_extensions?
+ postgresql_version >= 90100
+ end
+
+ # Range datatypes weren't introduced until PostgreSQL 9.2
+ def supports_ranges?
+ postgresql_version >= 90200
+ end
+
+ def supports_materialized_views?
+ postgresql_version >= 90300
+ end
+
+ def enable_extension(name)
+ exec_query("CREATE EXTENSION IF NOT EXISTS \"#{name}\"").tap {
+ reload_type_map
+ }
+ end
+
+ def disable_extension(name)
+ exec_query("DROP EXTENSION IF EXISTS \"#{name}\" CASCADE").tap {
+ reload_type_map
+ }
+ end
+
+ def extension_enabled?(name)
+ if supports_extensions?
+ res = exec_query "SELECT EXISTS(SELECT * FROM pg_available_extensions WHERE name = '#{name}' AND installed_version IS NOT NULL) as enabled",
+ 'SCHEMA'
+ res.cast_values.first
+ end
+ end
+
+ def extensions
+ if supports_extensions?
+ exec_query("SELECT extname from pg_extension", "SCHEMA").cast_values
+ else
+ super
+ end
+ end
+
+ # Returns the configured supported identifier length supported by PostgreSQL
+ def table_alias_length
+ @table_alias_length ||= query('SHOW max_identifier_length', 'SCHEMA')[0][0].to_i
+ end
+
+ # Set the authorized user for this session
+ def session_auth=(user)
+ clear_cache!
+ exec_query "SET SESSION AUTHORIZATION #{user}"
+ end
+
+ def use_insert_returning?
+ @use_insert_returning
+ end
+
+ def valid_type?(type)
+ !native_database_types[type].nil?
+ end
+
+ def update_table_definition(table_name, base) #:nodoc:
+ PostgreSQL::Table.new(table_name, base)
+ end
+
+ def lookup_cast_type(sql_type) # :nodoc:
+ oid = execute("SELECT #{quote(sql_type)}::regtype::oid", "SCHEMA").first['oid'].to_i
+ super(oid)
+ end
+
+ protected
+
+ # Returns the version of the connected PostgreSQL server.
+ def postgresql_version
+ @connection.server_version
+ end
+
+ # See http://www.postgresql.org/docs/9.1/static/errcodes-appendix.html
+ FOREIGN_KEY_VIOLATION = "23503"
+ UNIQUE_VIOLATION = "23505"
+
+ def translate_exception(exception, message)
+ return exception unless exception.respond_to?(:result)
+
+ case exception.result.try(:error_field, PGresult::PG_DIAG_SQLSTATE)
+ when UNIQUE_VIOLATION
+ RecordNotUnique.new(message, exception)
+ when FOREIGN_KEY_VIOLATION
+ InvalidForeignKey.new(message, exception)
+ else
+ super
+ end
+ end
+
+ private
+
+ def get_oid_type(oid, fmod, column_name, sql_type = '') # :nodoc:
+ if !type_map.key?(oid)
+ load_additional_types(type_map, [oid])
+ end
+
+ type_map.fetch(oid, fmod, sql_type) {
+ warn "unknown OID #{oid}: failed to recognize type of '#{column_name}'. It will be treated as String."
+ Type::Value.new.tap do |cast_type|
+ type_map.register_type(oid, cast_type)
+ end
+ }
+ end
+
+ def initialize_type_map(m) # :nodoc:
+ register_class_with_limit m, 'int2', OID::Integer
+ m.alias_type 'int4', 'int2'
+ m.alias_type 'int8', 'int2'
+ m.alias_type 'oid', 'int2'
+ m.register_type 'float4', OID::Float.new
+ m.alias_type 'float8', 'float4'
+ m.register_type 'text', Type::Text.new
+ register_class_with_limit m, 'varchar', Type::String
+ m.alias_type 'char', 'varchar'
+ m.alias_type 'name', 'varchar'
+ m.alias_type 'bpchar', 'varchar'
+ m.register_type 'bool', Type::Boolean.new
+ register_class_with_limit m, 'bit', OID::Bit
+ register_class_with_limit m, 'varbit', OID::BitVarying
+ m.alias_type 'timestamptz', 'timestamp'
+ m.register_type 'date', OID::Date.new
+ m.register_type 'time', OID::Time.new
+
+ m.register_type 'money', OID::Money.new
+ m.register_type 'bytea', OID::Bytea.new
+ m.register_type 'point', OID::Point.new
+ m.register_type 'hstore', OID::Hstore.new
+ m.register_type 'json', OID::Json.new
+ m.register_type 'jsonb', OID::Jsonb.new
+ m.register_type 'cidr', OID::Cidr.new
+ m.register_type 'inet', OID::Inet.new
+ m.register_type 'uuid', OID::Uuid.new
+ m.register_type 'xml', OID::Xml.new
+ m.register_type 'tsvector', OID::SpecializedString.new(:tsvector)
+ m.register_type 'macaddr', OID::SpecializedString.new(:macaddr)
+ m.register_type 'citext', OID::SpecializedString.new(:citext)
+ m.register_type 'ltree', OID::SpecializedString.new(:ltree)
+
+ # FIXME: why are we keeping these types as strings?
+ m.alias_type 'interval', 'varchar'
+ m.alias_type 'path', 'varchar'
+ m.alias_type 'line', 'varchar'
+ m.alias_type 'polygon', 'varchar'
+ m.alias_type 'circle', 'varchar'
+ m.alias_type 'lseg', 'varchar'
+ m.alias_type 'box', 'varchar'
+
+ m.register_type 'timestamp' do |_, _, sql_type|
+ precision = extract_precision(sql_type)
+ OID::DateTime.new(precision: precision)
+ end
+
+ m.register_type 'numeric' do |_, fmod, sql_type|
+ precision = extract_precision(sql_type)
+ scale = extract_scale(sql_type)
+
+ # The type for the numeric depends on the width of the field,
+ # so we'll do something special here.
+ #
+ # When dealing with decimal columns:
+ #
+ # places after decimal = fmod - 4 & 0xffff
+ # places before decimal = (fmod - 4) >> 16 & 0xffff
+ if fmod && (fmod - 4 & 0xffff).zero?
+ # FIXME: Remove this class, and the second argument to
+ # lookups on PG
+ Type::DecimalWithoutScale.new(precision: precision)
+ else
+ OID::Decimal.new(precision: precision, scale: scale)
+ end
+ end
+
+ load_additional_types(m)
+ end
+
+ def extract_limit(sql_type) # :nodoc:
+ case sql_type
+ when /^bigint/i; 8
+ when /^smallint/i; 2
+ else super
+ end
+ end
+
+ # Extracts the value from a PostgreSQL column default definition.
+ def extract_value_from_default(oid, default) # :nodoc:
+ case default
+ # Quoted types
+ when /\A[\(B]?'(.*)'::/m
+ $1.gsub(/''/, "'")
+ # Boolean types
+ when 'true', 'false'
+ default
+ # Numeric types
+ when /\A\(?(-?\d+(\.\d*)?\)?(::bigint)?)\z/
+ $1
+ # Object identifier types
+ when /\A-?\d+\z/
+ $1
+ else
+ # Anything else is blank, some user type, or some function
+ # and we can't know the value of that, so return nil.
+ nil
+ end
+ end
+
+ def extract_default_function(default_value, default) # :nodoc:
+ default if has_default_function?(default_value, default)
+ end
+
+ def has_default_function?(default_value, default) # :nodoc:
+ !default_value && (%r{\w+\(.*\)} === default)
+ end
+
+ def load_additional_types(type_map, oids = nil) # :nodoc:
+ if supports_ranges?
+ query = <<-SQL
+ SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput, r.rngsubtype, t.typtype, t.typbasetype
+ FROM pg_type as t
+ LEFT JOIN pg_range as r ON oid = rngtypid
+ SQL
+ else
+ query = <<-SQL
+ SELECT t.oid, t.typname, t.typelem, t.typdelim, t.typinput, t.typtype, t.typbasetype
+ FROM pg_type as t
+ SQL
+ end
+
+ if oids
+ query += "WHERE t.oid::integer IN (%s)" % oids.join(", ")
+ end
+
+ initializer = OID::TypeMapInitializer.new(type_map)
+ records = execute(query, 'SCHEMA')
+ initializer.run(records)
+ end
+
+ FEATURE_NOT_SUPPORTED = "0A000" #:nodoc:
+
+ def execute_and_clear(sql, name, binds)
+ result = without_prepared_statement?(binds) ? exec_no_cache(sql, name, binds) :
+ exec_cache(sql, name, binds)
+ ret = yield result
+ result.clear
+ ret
+ end
+
+ def exec_no_cache(sql, name, binds)
+ log(sql, name, binds) { @connection.async_exec(sql, []) }
+ end
+
+ def exec_cache(sql, name, binds)
+ stmt_key = prepare_statement(sql)
+ type_casted_binds = binds.map { |col, val|
+ [col, type_cast(val, col)]
+ }
+
+ log(sql, name, type_casted_binds, stmt_key) do
+ @connection.send_query_prepared(stmt_key, type_casted_binds.map { |_, val| val })
+ @connection.block
+ @connection.get_last_result
+ end
+ rescue ActiveRecord::StatementInvalid => e
+ pgerror = e.original_exception
+
+ # Get the PG code for the failure. Annoyingly, the code for
+ # prepared statements whose return value may have changed is
+ # FEATURE_NOT_SUPPORTED. Check here for more details:
+ # http://git.postgresql.org/gitweb/?p=postgresql.git;a=blob;f=src/backend/utils/cache/plancache.c#l573
+ begin
+ code = pgerror.result.result_error_field(PGresult::PG_DIAG_SQLSTATE)
+ rescue
+ raise e
+ end
+ if FEATURE_NOT_SUPPORTED == code
+ @statements.delete sql_key(sql)
+ retry
+ else
+ raise e
+ end
+ end
+
+ # Returns the statement identifier for the client side cache
+ # of statements
+ def sql_key(sql)
+ "#{schema_search_path}-#{sql}"
+ end
+
+ # Prepare the statement if it hasn't been prepared, return
+ # the statement key.
+ def prepare_statement(sql)
+ sql_key = sql_key(sql)
+ unless @statements.key? sql_key
+ nextkey = @statements.next_key
+ begin
+ @connection.prepare nextkey, sql
+ rescue => e
+ raise translate_exception_class(e, sql)
+ end
+ # Clear the queue
+ @connection.get_last_result
+ @statements[sql_key] = nextkey
+ end
+ @statements[sql_key]
+ end
+
+ # Connects to a PostgreSQL server and sets up the adapter depending on the
+ # connected server's characteristics.
+ def connect
+ @connection = PGconn.connect(@connection_parameters)
+
+ # Money type has a fixed precision of 10 in PostgreSQL 8.2 and below, and as of
+ # PostgreSQL 8.3 it has a fixed precision of 19. PostgreSQLColumn.extract_precision
+ # should know about this but can't detect it there, so deal with it here.
+ OID::Money.precision = (postgresql_version >= 80300) ? 19 : 10
+
+ configure_connection
+ rescue ::PG::Error => error
+ if error.message.include?("does not exist")
+ raise ActiveRecord::NoDatabaseError.new(error.message, error)
+ else
+ raise
+ end
+ end
+
+ # Configures the encoding, verbosity, schema search path, and time zone of the connection.
+ # This is called by #connect and should not be called manually.
+ def configure_connection
+ if @config[:encoding]
+ @connection.set_client_encoding(@config[:encoding])
+ end
+ self.client_min_messages = @config[:min_messages] || 'warning'
+ self.schema_search_path = @config[:schema_search_path] || @config[:schema_order]
+
+ # Use standard-conforming strings if available so we don't have to do the E'...' dance.
+ set_standard_conforming_strings
+
+ # If using Active Record's time zone support configure the connection to return
+ # TIMESTAMP WITH ZONE types in UTC.
+ # (SET TIME ZONE does not use an equals sign like other SET variables)
+ if ActiveRecord::Base.default_timezone == :utc
+ execute("SET time zone 'UTC'", 'SCHEMA')
+ elsif @local_tz
+ execute("SET time zone '#{@local_tz}'", 'SCHEMA')
+ end
+
+ # SET statements from :variables config hash
+ # http://www.postgresql.org/docs/8.3/static/sql-set.html
+ variables = @config[:variables] || {}
+ variables.map do |k, v|
+ if v == ':default' || v == :default
+ # Sets the value to the global or compile default
+ execute("SET SESSION #{k.to_s} TO DEFAULT", 'SCHEMA')
+ elsif !v.nil?
+ execute("SET SESSION #{k.to_s} TO #{quote(v)}", 'SCHEMA')
+ end
+ end
+ end
+
+ # Returns the current ID of a table's sequence.
+ def last_insert_id(sequence_name) #:nodoc:
+ Integer(last_insert_id_value(sequence_name))
+ end
+
+ def last_insert_id_value(sequence_name)
+ last_insert_id_result(sequence_name).rows.first.first
+ end
+
+ def last_insert_id_result(sequence_name) #:nodoc:
+ exec_query("SELECT currval('#{sequence_name}')", 'SQL')
+ end
+
+ # Executes a SELECT query and returns the results, performing any data type
+ # conversions that are required to be performed here instead of in PostgreSQLColumn.
+ def select(sql, name = nil, binds = [])
+ exec_query(sql, name, binds)
+ end
+
+ # Returns the list of a table's column names, data types, and default values.
+ #
+ # The underlying query is roughly:
+ # SELECT column.name, column.type, default.value
+ # FROM column LEFT JOIN default
+ # ON column.table_id = default.table_id
+ # AND column.num = default.column_num
+ # WHERE column.table_id = get_table_id('table_name')
+ # AND column.num > 0
+ # AND NOT column.is_dropped
+ # ORDER BY column.num
+ #
+ # If the table name is not prefixed with a schema, the database will
+ # take the first match from the schema search path.
+ #
+ # Query implementation notes:
+ # - format_type includes the column size constraint, e.g. varchar(50)
+ # - ::regclass is a function that gives the id for a table name
+ def column_definitions(table_name) # :nodoc:
+ exec_query(<<-end_sql, 'SCHEMA').rows
+ SELECT a.attname, format_type(a.atttypid, a.atttypmod),
+ pg_get_expr(d.adbin, d.adrelid), a.attnotnull, a.atttypid, a.atttypmod
+ FROM pg_attribute a LEFT JOIN pg_attrdef d
+ ON a.attrelid = d.adrelid AND a.attnum = d.adnum
+ WHERE a.attrelid = '#{quote_table_name(table_name)}'::regclass
+ AND a.attnum > 0 AND NOT a.attisdropped
+ ORDER BY a.attnum
+ end_sql
+ end
+
+ def extract_table_ref_from_insert_sql(sql) # :nodoc:
+ sql[/into\s+([^\(]*).*values\s*\(/im]
+ $1.strip if $1
+ end
+
+ def create_table_definition(name, temporary, options, as = nil) # :nodoc:
+ PostgreSQL::TableDefinition.new native_database_types, name, temporary, options, as
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/schema_cache.rb b/activerecord/lib/active_record/connection_adapters/schema_cache.rb
new file mode 100644
index 0000000000..a10ce330c7
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/schema_cache.rb
@@ -0,0 +1,94 @@
+module ActiveRecord
+ module ConnectionAdapters
+ class SchemaCache
+ attr_reader :version
+ attr_accessor :connection
+
+ def initialize(conn)
+ @connection = conn
+
+ @columns = {}
+ @columns_hash = {}
+ @primary_keys = {}
+ @tables = {}
+ end
+
+ def primary_keys(table_name)
+ @primary_keys[table_name] ||= table_exists?(table_name) ? connection.primary_key(table_name) : nil
+ end
+
+ # A cached lookup for table existence.
+ def table_exists?(name)
+ prepare_tables if @tables.empty?
+ return @tables[name] if @tables.key? name
+
+ @tables[name] = connection.table_exists?(name)
+ end
+
+ # Add internal cache for table with +table_name+.
+ def add(table_name)
+ if table_exists?(table_name)
+ primary_keys(table_name)
+ columns(table_name)
+ columns_hash(table_name)
+ end
+ end
+
+ def tables(name)
+ @tables[name]
+ end
+
+ # Get the columns for a table
+ def columns(table_name)
+ @columns[table_name] ||= connection.columns(table_name)
+ end
+
+ # Get the columns for a table as a hash, key is the column name
+ # value is the column object.
+ def columns_hash(table_name)
+ @columns_hash[table_name] ||= Hash[columns(table_name).map { |col|
+ [col.name, col]
+ }]
+ end
+
+ # Clears out internal caches
+ def clear!
+ @columns.clear
+ @columns_hash.clear
+ @primary_keys.clear
+ @tables.clear
+ @version = nil
+ end
+
+ def size
+ [@columns, @columns_hash, @primary_keys, @tables].map { |x|
+ x.size
+ }.inject :+
+ end
+
+ # Clear out internal caches for table with +table_name+.
+ def clear_table_cache!(table_name)
+ @columns.delete table_name
+ @columns_hash.delete table_name
+ @primary_keys.delete table_name
+ @tables.delete table_name
+ end
+
+ def marshal_dump
+ # if we get current version during initialization, it happens stack over flow.
+ @version = ActiveRecord::Migrator.current_version
+ [@version, @columns, @columns_hash, @primary_keys, @tables]
+ end
+
+ def marshal_load(array)
+ @version, @columns, @columns_hash, @primary_keys, @tables = array
+ end
+
+ private
+
+ def prepare_tables
+ connection.tables.each { |table| @tables[table] = true }
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb b/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb
new file mode 100644
index 0000000000..faf1cdc686
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/sqlite3_adapter.rb
@@ -0,0 +1,633 @@
+require 'active_record/connection_adapters/abstract_adapter'
+require 'active_record/connection_adapters/statement_pool'
+require 'arel/visitors/bind_visitor'
+
+gem 'sqlite3', '~> 1.3.6'
+require 'sqlite3'
+
+module ActiveRecord
+ module ConnectionHandling # :nodoc:
+ # sqlite3 adapter reuses sqlite_connection.
+ def sqlite3_connection(config)
+ # Require database.
+ unless config[:database]
+ raise ArgumentError, "No database file specified. Missing argument: database"
+ end
+
+ # Allow database path relative to Rails.root, but only if the database
+ # path is not the special path that tells sqlite to build a database only
+ # in memory.
+ if ':memory:' != config[:database]
+ config[:database] = File.expand_path(config[:database], Rails.root) if defined?(Rails.root)
+ dirname = File.dirname(config[:database])
+ Dir.mkdir(dirname) unless File.directory?(dirname)
+ end
+
+ db = SQLite3::Database.new(
+ config[:database].to_s,
+ :results_as_hash => true
+ )
+
+ db.busy_timeout(ConnectionAdapters::SQLite3Adapter.type_cast_config_to_integer(config[:timeout])) if config[:timeout]
+
+ ConnectionAdapters::SQLite3Adapter.new(db, logger, nil, config)
+ rescue Errno::ENOENT => error
+ if error.message.include?("No such file or directory")
+ raise ActiveRecord::NoDatabaseError.new(error.message, error)
+ else
+ raise
+ end
+ end
+ end
+
+ module ConnectionAdapters #:nodoc:
+ class SQLite3Binary < Type::Binary # :nodoc:
+ def cast_value(value)
+ if value.encoding != Encoding::ASCII_8BIT
+ value = value.force_encoding(Encoding::ASCII_8BIT)
+ end
+ value
+ end
+ end
+
+ class SQLite3String < Type::String # :nodoc:
+ def type_cast_for_database(value)
+ if value.is_a?(::String) && value.encoding == Encoding::ASCII_8BIT
+ value.encode(Encoding::UTF_8)
+ else
+ super
+ end
+ end
+ end
+
+ # The SQLite3 adapter works SQLite 3.6.16 or newer
+ # with the sqlite3-ruby drivers (available as gem from https://rubygems.org/gems/sqlite3).
+ #
+ # Options:
+ #
+ # * <tt>:database</tt> - Path to the database file.
+ class SQLite3Adapter < AbstractAdapter
+ include Savepoints
+
+ NATIVE_DATABASE_TYPES = {
+ primary_key: 'INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL',
+ string: { name: "varchar" },
+ text: { name: "text" },
+ integer: { name: "integer" },
+ float: { name: "float" },
+ decimal: { name: "decimal" },
+ datetime: { name: "datetime" },
+ time: { name: "time" },
+ date: { name: "date" },
+ binary: { name: "blob" },
+ boolean: { name: "boolean" }
+ }
+
+ class Version
+ include Comparable
+
+ def initialize(version_string)
+ @version = version_string.split('.').map { |v| v.to_i }
+ end
+
+ def <=>(version_string)
+ @version <=> version_string.split('.').map { |v| v.to_i }
+ end
+ end
+
+ class StatementPool < ConnectionAdapters::StatementPool
+ def initialize(connection, max)
+ super
+ @cache = Hash.new { |h,pid| h[pid] = {} }
+ end
+
+ def each(&block); cache.each(&block); end
+ def key?(key); cache.key?(key); end
+ def [](key); cache[key]; end
+ def length; cache.length; end
+
+ def []=(sql, key)
+ while @max <= cache.size
+ dealloc(cache.shift.last[:stmt])
+ end
+ cache[sql] = key
+ end
+
+ def clear
+ cache.values.each do |hash|
+ dealloc hash[:stmt]
+ end
+ cache.clear
+ end
+
+ private
+ def cache
+ @cache[$$]
+ end
+
+ def dealloc(stmt)
+ stmt.close unless stmt.closed?
+ end
+ end
+
+ def initialize(connection, logger, connection_options, config)
+ super(connection, logger)
+
+ @active = nil
+ @statements = StatementPool.new(@connection,
+ self.class.type_cast_config_to_integer(config.fetch(:statement_limit) { 1000 }))
+ @config = config
+
+ @visitor = Arel::Visitors::SQLite.new self
+
+ if self.class.type_cast_config_to_boolean(config.fetch(:prepared_statements) { true })
+ @prepared_statements = true
+ else
+ @prepared_statements = false
+ end
+ end
+
+ def adapter_name #:nodoc:
+ 'SQLite'
+ end
+
+ def supports_ddl_transactions?
+ true
+ end
+
+ def supports_savepoints?
+ true
+ end
+
+ def supports_partial_index?
+ sqlite_version >= '3.8.0'
+ end
+
+ # Returns true, since this connection adapter supports prepared statement
+ # caching.
+ def supports_statement_cache?
+ true
+ end
+
+ # Returns true, since this connection adapter supports migrations.
+ def supports_migrations? #:nodoc:
+ true
+ end
+
+ def supports_primary_key? #:nodoc:
+ true
+ end
+
+ def requires_reloading?
+ true
+ end
+
+ def supports_add_column?
+ true
+ end
+
+ def active?
+ @active != false
+ end
+
+ # Disconnects from the database if already connected. Otherwise, this
+ # method does nothing.
+ def disconnect!
+ super
+ @active = false
+ @connection.close rescue nil
+ end
+
+ # Clears the prepared statements cache.
+ def clear_cache!
+ @statements.clear
+ end
+
+ def supports_index_sort_order?
+ true
+ end
+
+ # Returns 62. SQLite supports index names up to 64
+ # characters. The rest is used by rails internally to perform
+ # temporary rename operations
+ def allowed_index_name_length
+ index_name_length - 2
+ end
+
+ def native_database_types #:nodoc:
+ NATIVE_DATABASE_TYPES
+ end
+
+ # Returns the current database encoding format as a string, eg: 'UTF-8'
+ def encoding
+ @connection.encoding.to_s
+ end
+
+ def supports_explain?
+ true
+ end
+
+ # QUOTING ==================================================
+
+ def _quote(value) # :nodoc:
+ case value
+ when Type::Binary::Data
+ "x'#{value.hex}'"
+ else
+ super
+ end
+ end
+
+ def _type_cast(value) # :nodoc:
+ case value
+ when BigDecimal
+ value.to_f
+ else
+ super
+ end
+ end
+
+ def quote_string(s) #:nodoc:
+ @connection.class.quote(s)
+ end
+
+ def quote_table_name_for_assignment(table, attr)
+ quote_column_name(attr)
+ end
+
+ def quote_column_name(name) #:nodoc:
+ %Q("#{name.to_s.gsub('"', '""')}")
+ end
+
+ # Quote date/time values for use in SQL input. Includes microseconds
+ # if the value is a Time responding to usec.
+ def quoted_date(value) #:nodoc:
+ if value.respond_to?(:usec)
+ "#{super}.#{sprintf("%06d", value.usec)}"
+ else
+ super
+ end
+ end
+
+ # DATABASE STATEMENTS ======================================
+
+ def explain(arel, binds = [])
+ sql = "EXPLAIN QUERY PLAN #{to_sql(arel, binds)}"
+ ExplainPrettyPrinter.new.pp(exec_query(sql, 'EXPLAIN', []))
+ end
+
+ class ExplainPrettyPrinter
+ # Pretty prints the result of a EXPLAIN QUERY PLAN in a way that resembles
+ # the output of the SQLite shell:
+ #
+ # 0|0|0|SEARCH TABLE users USING INTEGER PRIMARY KEY (rowid=?) (~1 rows)
+ # 0|1|1|SCAN TABLE posts (~100000 rows)
+ #
+ def pp(result) # :nodoc:
+ result.rows.map do |row|
+ row.join('|')
+ end.join("\n") + "\n"
+ end
+ end
+
+ def exec_query(sql, name = nil, binds = [])
+ type_casted_binds = binds.map { |col, val|
+ [col, type_cast(val, col)]
+ }
+
+ log(sql, name, type_casted_binds) do
+ # Don't cache statements if they are not prepared
+ if without_prepared_statement?(binds)
+ stmt = @connection.prepare(sql)
+ begin
+ cols = stmt.columns
+ records = stmt.to_a
+ ensure
+ stmt.close
+ end
+ stmt = records
+ else
+ cache = @statements[sql] ||= {
+ :stmt => @connection.prepare(sql)
+ }
+ stmt = cache[:stmt]
+ cols = cache[:cols] ||= stmt.columns
+ stmt.reset!
+ stmt.bind_params type_casted_binds.map { |_, val| val }
+ end
+
+ ActiveRecord::Result.new(cols, stmt.to_a)
+ end
+ end
+
+ def exec_delete(sql, name = 'SQL', binds = [])
+ exec_query(sql, name, binds)
+ @connection.changes
+ end
+ alias :exec_update :exec_delete
+
+ def last_inserted_id(result)
+ @connection.last_insert_row_id
+ end
+
+ def execute(sql, name = nil) #:nodoc:
+ log(sql, name) { @connection.execute(sql) }
+ end
+
+ def update_sql(sql, name = nil) #:nodoc:
+ super
+ @connection.changes
+ end
+
+ def delete_sql(sql, name = nil) #:nodoc:
+ sql += " WHERE 1=1" unless sql =~ /WHERE/i
+ super sql, name
+ end
+
+ def insert_sql(sql, name = nil, pk = nil, id_value = nil, sequence_name = nil) #:nodoc:
+ super
+ id_value || @connection.last_insert_row_id
+ end
+ alias :create :insert_sql
+
+ def select_rows(sql, name = nil, binds = [])
+ exec_query(sql, name, binds).rows
+ end
+
+ def begin_db_transaction #:nodoc:
+ log('begin transaction',nil) { @connection.transaction }
+ end
+
+ def commit_db_transaction #:nodoc:
+ log('commit transaction',nil) { @connection.commit }
+ end
+
+ def rollback_db_transaction #:nodoc:
+ log('rollback transaction',nil) { @connection.rollback }
+ end
+
+ # SCHEMA STATEMENTS ========================================
+
+ def tables(name = nil, table_name = nil) #:nodoc:
+ sql = <<-SQL
+ SELECT name
+ FROM sqlite_master
+ WHERE type = 'table' AND NOT name = 'sqlite_sequence'
+ SQL
+ sql << " AND name = #{quote_table_name(table_name)}" if table_name
+
+ exec_query(sql, 'SCHEMA').map do |row|
+ row['name']
+ end
+ end
+
+ def table_exists?(table_name)
+ table_name && tables(nil, table_name).any?
+ end
+
+ # Returns an array of +Column+ objects for the table specified by +table_name+.
+ def columns(table_name) #:nodoc:
+ table_structure(table_name).map do |field|
+ case field["dflt_value"]
+ when /^null$/i
+ field["dflt_value"] = nil
+ when /^'(.*)'$/m
+ field["dflt_value"] = $1.gsub("''", "'")
+ when /^"(.*)"$/m
+ field["dflt_value"] = $1.gsub('""', '"')
+ end
+
+ sql_type = field['type']
+ cast_type = lookup_cast_type(sql_type)
+ new_column(field['name'], field['dflt_value'], cast_type, sql_type, field['notnull'].to_i == 0)
+ end
+ end
+
+ # Returns an array of indexes for the given table.
+ def indexes(table_name, name = nil) #:nodoc:
+ exec_query("PRAGMA index_list(#{quote_table_name(table_name)})", 'SCHEMA').map do |row|
+ sql = <<-SQL
+ SELECT sql
+ FROM sqlite_master
+ WHERE name=#{quote(row['name'])} AND type='index'
+ UNION ALL
+ SELECT sql
+ FROM sqlite_temp_master
+ WHERE name=#{quote(row['name'])} AND type='index'
+ SQL
+ index_sql = exec_query(sql).first['sql']
+ match = /\sWHERE\s+(.+)$/i.match(index_sql)
+ where = match[1] if match
+ IndexDefinition.new(
+ table_name,
+ row['name'],
+ row['unique'] != 0,
+ exec_query("PRAGMA index_info('#{row['name']}')", "SCHEMA").map { |col|
+ col['name']
+ }, nil, nil, where)
+ end
+ end
+
+ def primary_key(table_name) #:nodoc:
+ column = table_structure(table_name).find { |field|
+ field['pk'] == 1
+ }
+ column && column['name']
+ end
+
+ def remove_index!(table_name, index_name) #:nodoc:
+ exec_query "DROP INDEX #{quote_column_name(index_name)}"
+ end
+
+ # Renames a table.
+ #
+ # Example:
+ # rename_table('octopuses', 'octopi')
+ def rename_table(table_name, new_name)
+ exec_query "ALTER TABLE #{quote_table_name(table_name)} RENAME TO #{quote_table_name(new_name)}"
+ rename_table_indexes(table_name, new_name)
+ end
+
+ # See: http://www.sqlite.org/lang_altertable.html
+ # SQLite has an additional restriction on the ALTER TABLE statement
+ def valid_alter_table_options( type, options)
+ type.to_sym != :primary_key
+ end
+
+ def add_column(table_name, column_name, type, options = {}) #:nodoc:
+ if supports_add_column? && valid_alter_table_options( type, options )
+ super(table_name, column_name, type, options)
+ else
+ alter_table(table_name) do |definition|
+ definition.column(column_name, type, options)
+ end
+ end
+ end
+
+ def remove_column(table_name, column_name, type = nil, options = {}) #:nodoc:
+ alter_table(table_name) do |definition|
+ definition.remove_column column_name
+ end
+ end
+
+ def change_column_default(table_name, column_name, default) #:nodoc:
+ alter_table(table_name) do |definition|
+ definition[column_name].default = default
+ end
+ end
+
+ def change_column_null(table_name, column_name, null, default = nil)
+ unless null || default.nil?
+ exec_query("UPDATE #{quote_table_name(table_name)} SET #{quote_column_name(column_name)}=#{quote(default)} WHERE #{quote_column_name(column_name)} IS NULL")
+ end
+ alter_table(table_name) do |definition|
+ definition[column_name].null = null
+ end
+ end
+
+ def change_column(table_name, column_name, type, options = {}) #:nodoc:
+ alter_table(table_name) do |definition|
+ include_default = options_include_default?(options)
+ definition[column_name].instance_eval do
+ self.type = type
+ self.limit = options[:limit] if options.include?(:limit)
+ self.default = options[:default] if include_default
+ self.null = options[:null] if options.include?(:null)
+ self.precision = options[:precision] if options.include?(:precision)
+ self.scale = options[:scale] if options.include?(:scale)
+ end
+ end
+ end
+
+ def rename_column(table_name, column_name, new_column_name) #:nodoc:
+ column = column_for(table_name, column_name)
+ alter_table(table_name, rename: {column.name => new_column_name.to_s})
+ rename_column_indexes(table_name, column.name, new_column_name)
+ end
+
+ protected
+
+ def initialize_type_map(m)
+ super
+ m.register_type(/binary/i, SQLite3Binary.new)
+ register_class_with_limit m, %r(char)i, SQLite3String
+ end
+
+ def select(sql, name = nil, binds = []) #:nodoc:
+ exec_query(sql, name, binds)
+ end
+
+ def table_structure(table_name)
+ structure = exec_query("PRAGMA table_info(#{quote_table_name(table_name)})", 'SCHEMA').to_hash
+ raise(ActiveRecord::StatementInvalid, "Could not find table '#{table_name}'") if structure.empty?
+ structure
+ end
+
+ def alter_table(table_name, options = {}) #:nodoc:
+ altered_table_name = "a#{table_name}"
+ caller = lambda {|definition| yield definition if block_given?}
+
+ transaction do
+ move_table(table_name, altered_table_name,
+ options.merge(:temporary => true))
+ move_table(altered_table_name, table_name, &caller)
+ end
+ end
+
+ def move_table(from, to, options = {}, &block) #:nodoc:
+ copy_table(from, to, options, &block)
+ drop_table(from)
+ end
+
+ def copy_table(from, to, options = {}) #:nodoc:
+ from_primary_key = primary_key(from)
+ options[:id] = false
+ create_table(to, options) do |definition|
+ @definition = definition
+ @definition.primary_key(from_primary_key) if from_primary_key.present?
+ columns(from).each do |column|
+ column_name = options[:rename] ?
+ (options[:rename][column.name] ||
+ options[:rename][column.name.to_sym] ||
+ column.name) : column.name
+ next if column_name == from_primary_key
+
+ @definition.column(column_name, column.type,
+ :limit => column.limit, :default => column.default,
+ :precision => column.precision, :scale => column.scale,
+ :null => column.null)
+ end
+ yield @definition if block_given?
+ end
+ copy_table_indexes(from, to, options[:rename] || {})
+ copy_table_contents(from, to,
+ @definition.columns.map {|column| column.name},
+ options[:rename] || {})
+ end
+
+ def copy_table_indexes(from, to, rename = {}) #:nodoc:
+ indexes(from).each do |index|
+ name = index.name
+ if to == "a#{from}"
+ name = "t#{name}"
+ elsif from == "a#{to}"
+ name = name[1..-1]
+ end
+
+ to_column_names = columns(to).map { |c| c.name }
+ columns = index.columns.map {|c| rename[c] || c }.select do |column|
+ to_column_names.include?(column)
+ end
+
+ unless columns.empty?
+ # index name can't be the same
+ opts = { name: name.gsub(/(^|_)(#{from})_/, "\\1#{to}_"), internal: true }
+ opts[:unique] = true if index.unique
+ add_index(to, columns, opts)
+ end
+ end
+ end
+
+ def copy_table_contents(from, to, columns, rename = {}) #:nodoc:
+ column_mappings = Hash[columns.map {|name| [name, name]}]
+ rename.each { |a| column_mappings[a.last] = a.first }
+ from_columns = columns(from).collect {|col| col.name}
+ columns = columns.find_all{|col| from_columns.include?(column_mappings[col])}
+ quoted_columns = columns.map { |col| quote_column_name(col) } * ','
+
+ quoted_to = quote_table_name(to)
+
+ raw_column_mappings = Hash[columns(from).map { |c| [c.name, c] }]
+
+ exec_query("SELECT * FROM #{quote_table_name(from)}").each do |row|
+ sql = "INSERT INTO #{quoted_to} (#{quoted_columns}) VALUES ("
+
+ column_values = columns.map do |col|
+ quote(row[column_mappings[col]], raw_column_mappings[col])
+ end
+
+ sql << column_values * ', '
+ sql << ')'
+ exec_query sql
+ end
+ end
+
+ def sqlite_version
+ @sqlite_version ||= SQLite3Adapter::Version.new(select_value('select sqlite_version(*)'))
+ end
+
+ def translate_exception(exception, message)
+ case exception.message
+ # SQLite 3.8.2 returns a newly formatted error message:
+ # UNIQUE constraint failed: *table_name*.*column_name*
+ # Older versions of SQLite return:
+ # column *column_name* is not unique
+ when /column(s)? .* (is|are) not unique/, /UNIQUE constraint failed: .*/
+ RecordNotUnique.new(message, exception)
+ else
+ super
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_adapters/statement_pool.rb b/activerecord/lib/active_record/connection_adapters/statement_pool.rb
new file mode 100644
index 0000000000..c6b1bc8b5b
--- /dev/null
+++ b/activerecord/lib/active_record/connection_adapters/statement_pool.rb
@@ -0,0 +1,40 @@
+module ActiveRecord
+ module ConnectionAdapters
+ class StatementPool
+ include Enumerable
+
+ def initialize(connection, max = 1000)
+ @connection = connection
+ @max = max
+ end
+
+ def each
+ raise NotImplementedError
+ end
+
+ def key?(key)
+ raise NotImplementedError
+ end
+
+ def [](key)
+ raise NotImplementedError
+ end
+
+ def length
+ raise NotImplementedError
+ end
+
+ def []=(sql, key)
+ raise NotImplementedError
+ end
+
+ def clear
+ raise NotImplementedError
+ end
+
+ def delete(key)
+ raise NotImplementedError
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/connection_handling.rb b/activerecord/lib/active_record/connection_handling.rb
new file mode 100644
index 0000000000..31e7390bf7
--- /dev/null
+++ b/activerecord/lib/active_record/connection_handling.rb
@@ -0,0 +1,132 @@
+module ActiveRecord
+ module ConnectionHandling
+ RAILS_ENV = -> { Rails.env if defined?(Rails) }
+ DEFAULT_ENV = -> { RAILS_ENV.call || "default_env" }
+
+ # Establishes the connection to the database. Accepts a hash as input where
+ # the <tt>:adapter</tt> key must be specified with the name of a database adapter (in lower-case)
+ # example for regular databases (MySQL, Postgresql, etc):
+ #
+ # ActiveRecord::Base.establish_connection(
+ # adapter: "mysql",
+ # host: "localhost",
+ # username: "myuser",
+ # password: "mypass",
+ # database: "somedatabase"
+ # )
+ #
+ # Example for SQLite database:
+ #
+ # ActiveRecord::Base.establish_connection(
+ # adapter: "sqlite3",
+ # database: "path/to/dbfile"
+ # )
+ #
+ # Also accepts keys as strings (for parsing from YAML for example):
+ #
+ # ActiveRecord::Base.establish_connection(
+ # "adapter" => "sqlite3",
+ # "database" => "path/to/dbfile"
+ # )
+ #
+ # Or a URL:
+ #
+ # ActiveRecord::Base.establish_connection(
+ # "postgres://myuser:mypass@localhost/somedatabase"
+ # )
+ #
+ # In case <tt>ActiveRecord::Base.configurations</tt> is set (Rails
+ # automatically loads the contents of config/database.yml into it),
+ # a symbol can also be given as argument, representing a key in the
+ # configuration hash:
+ #
+ # ActiveRecord::Base.establish_connection(:production)
+ #
+ # The exceptions AdapterNotSpecified, AdapterNotFound and ArgumentError
+ # may be returned on an error.
+ def establish_connection(spec = nil)
+ spec ||= DEFAULT_ENV.call.to_sym
+ resolver = ConnectionAdapters::ConnectionSpecification::Resolver.new configurations
+ spec = resolver.spec(spec)
+
+ unless respond_to?(spec.adapter_method)
+ raise AdapterNotFound, "database configuration specifies nonexistent #{spec.config[:adapter]} adapter"
+ end
+
+ remove_connection
+ connection_handler.establish_connection self, spec
+ end
+
+ class MergeAndResolveDefaultUrlConfig # :nodoc:
+ def initialize(raw_configurations)
+ @raw_config = raw_configurations.dup
+ @env = DEFAULT_ENV.call.to_s
+ end
+
+ # Returns fully resolved connection hashes.
+ # Merges connection information from `ENV['DATABASE_URL']` if available.
+ def resolve
+ ConnectionAdapters::ConnectionSpecification::Resolver.new(config).resolve_all
+ end
+
+ private
+ def config
+ @raw_config.dup.tap do |cfg|
+ if url = ENV['DATABASE_URL']
+ cfg[@env] ||= {}
+ cfg[@env]["url"] ||= url
+ end
+ end
+ end
+ end
+
+ # Returns the connection currently associated with the class. This can
+ # also be used to "borrow" the connection to do database work unrelated
+ # to any of the specific Active Records.
+ def connection
+ retrieve_connection
+ end
+
+ def connection_id
+ ActiveRecord::RuntimeRegistry.connection_id
+ end
+
+ def connection_id=(connection_id)
+ ActiveRecord::RuntimeRegistry.connection_id = connection_id
+ end
+
+ # Returns the configuration of the associated connection as a hash:
+ #
+ # ActiveRecord::Base.connection_config
+ # # => {pool: 5, timeout: 5000, database: "db/development.sqlite3", adapter: "sqlite3"}
+ #
+ # Please use only for reading.
+ def connection_config
+ connection_pool.spec.config
+ end
+
+ def connection_pool
+ connection_handler.retrieve_connection_pool(self) or raise ConnectionNotEstablished
+ end
+
+ def retrieve_connection
+ connection_handler.retrieve_connection(self)
+ end
+
+ # Returns +true+ if Active Record is connected.
+ def connected?
+ connection_handler.connected?(self)
+ end
+
+ def remove_connection(klass = self)
+ connection_handler.remove_connection(klass)
+ end
+
+ def clear_cache! # :nodoc:
+ connection.schema_cache.clear!
+ end
+
+ delegate :clear_active_connections!, :clear_reloadable_connections!,
+ :clear_all_connections!, :to => :connection_handler
+ end
+end
diff --git a/activerecord/lib/active_record/core.rb b/activerecord/lib/active_record/core.rb
new file mode 100644
index 0000000000..d22806fbdf
--- /dev/null
+++ b/activerecord/lib/active_record/core.rb
@@ -0,0 +1,552 @@
+require 'active_support/core_ext/hash/indifferent_access'
+require 'active_support/core_ext/object/duplicable'
+require 'thread'
+
+module ActiveRecord
+ module Core
+ extend ActiveSupport::Concern
+
+ included do
+ ##
+ # :singleton-method:
+ #
+ # Accepts a logger conforming to the interface of Log4r which is then
+ # passed on to any new database connections made and which can be
+ # retrieved on both a class and instance level by calling +logger+.
+ mattr_accessor :logger, instance_writer: false
+
+ ##
+ # Contains the database configuration - as is typically stored in config/database.yml -
+ # as a Hash.
+ #
+ # For example, the following database.yml...
+ #
+ # development:
+ # adapter: sqlite3
+ # database: db/development.sqlite3
+ #
+ # production:
+ # adapter: sqlite3
+ # database: db/production.sqlite3
+ #
+ # ...would result in ActiveRecord::Base.configurations to look like this:
+ #
+ # {
+ # 'development' => {
+ # 'adapter' => 'sqlite3',
+ # 'database' => 'db/development.sqlite3'
+ # },
+ # 'production' => {
+ # 'adapter' => 'sqlite3',
+ # 'database' => 'db/production.sqlite3'
+ # }
+ # }
+ def self.configurations=(config)
+ @@configurations = ActiveRecord::ConnectionHandling::MergeAndResolveDefaultUrlConfig.new(config).resolve
+ end
+ self.configurations = {}
+
+ # Returns fully resolved configurations hash
+ def self.configurations
+ @@configurations
+ end
+
+ ##
+ # :singleton-method:
+ # Determines whether to use Time.utc (using :utc) or Time.local (using :local) when pulling
+ # dates and times from the database. This is set to :utc by default.
+ mattr_accessor :default_timezone, instance_writer: false
+ self.default_timezone = :utc
+
+ ##
+ # :singleton-method:
+ # Specifies the format to use when dumping the database schema with Rails'
+ # Rakefile. If :sql, the schema is dumped as (potentially database-
+ # specific) SQL statements. If :ruby, the schema is dumped as an
+ # ActiveRecord::Schema file which can be loaded into any database that
+ # supports migrations. Use :ruby if you want to have different database
+ # adapters for, e.g., your development and test environments.
+ mattr_accessor :schema_format, instance_writer: false
+ self.schema_format = :ruby
+
+ ##
+ # :singleton-method:
+ # Specify whether or not to use timestamps for migration versions
+ mattr_accessor :timestamped_migrations, instance_writer: false
+ self.timestamped_migrations = true
+
+ ##
+ # :singleton-method:
+ # Specify whether schema dump should happen at the end of the
+ # db:migrate rake task. This is true by default, which is useful for the
+ # development environment. This should ideally be false in the production
+ # environment where dumping schema is rarely needed.
+ mattr_accessor :dump_schema_after_migration, instance_writer: false
+ self.dump_schema_after_migration = true
+
+ # :nodoc:
+ mattr_accessor :maintain_test_schema, instance_accessor: false
+
+ def self.disable_implicit_join_references=(value)
+ ActiveSupport::Deprecation.warn("Implicit join references were removed with Rails 4.1." \
+ "Make sure to remove this configuration because it does nothing.")
+ end
+
+ class_attribute :default_connection_handler, instance_writer: false
+ class_attribute :find_by_statement_cache
+
+ def self.connection_handler
+ ActiveRecord::RuntimeRegistry.connection_handler || default_connection_handler
+ end
+
+ def self.connection_handler=(handler)
+ ActiveRecord::RuntimeRegistry.connection_handler = handler
+ end
+
+ self.default_connection_handler = ConnectionAdapters::ConnectionHandler.new
+ end
+
+ module ClassMethods
+ def allocate
+ define_attribute_methods
+ super
+ end
+
+ def initialize_find_by_cache
+ self.find_by_statement_cache = {}.extend(Mutex_m)
+ end
+
+ def inherited(child_class)
+ child_class.initialize_find_by_cache
+ super
+ end
+
+ def find(*ids)
+ # We don't have cache keys for this stuff yet
+ return super unless ids.length == 1
+ return super if block_given? ||
+ primary_key.nil? ||
+ default_scopes.any? ||
+ columns_hash.include?(inheritance_column) ||
+ ids.first.kind_of?(Array)
+
+ id = ids.first
+ if ActiveRecord::Base === id
+ id = id.id
+ ActiveSupport::Deprecation.warn "You are passing an instance of ActiveRecord::Base to `find`." \
+ "Please pass the id of the object by calling `.id`"
+ end
+ key = primary_key
+
+ s = find_by_statement_cache[key] || find_by_statement_cache.synchronize {
+ find_by_statement_cache[key] ||= StatementCache.create(connection) { |params|
+ where(key => params.bind).limit(1)
+ }
+ }
+ record = s.execute([id], self, connection).first
+ unless record
+ raise RecordNotFound, "Couldn't find #{name} with '#{primary_key}'=#{id}"
+ end
+ record
+ end
+
+ def find_by(*args)
+ return super if current_scope || args.length > 1 || reflect_on_all_aggregations.any?
+
+ hash = args.first
+
+ return super if hash.values.any? { |v|
+ v.nil? || Array === v || Hash === v
+ }
+
+ key = hash.keys
+
+ klass = self
+ s = find_by_statement_cache[key] || find_by_statement_cache.synchronize {
+ find_by_statement_cache[key] ||= StatementCache.create(connection) { |params|
+ wheres = key.each_with_object({}) { |param,o|
+ o[param] = params.bind
+ }
+ klass.where(wheres).limit(1)
+ }
+ }
+ begin
+ s.execute(hash.values, self, connection).first
+ rescue TypeError => e
+ raise ActiveRecord::StatementInvalid.new(e.message, e)
+ end
+ end
+
+ def initialize_generated_modules
+ super
+
+ generated_association_methods
+ end
+
+ def generated_association_methods
+ @generated_association_methods ||= begin
+ mod = const_set(:GeneratedAssociationMethods, Module.new)
+ include mod
+ mod
+ end
+ end
+
+ # Returns a string like 'Post(id:integer, title:string, body:text)'
+ def inspect
+ if self == Base
+ super
+ elsif abstract_class?
+ "#{super}(abstract)"
+ elsif !connected?
+ "#{super} (call '#{super}.connection' to establish a connection)"
+ elsif table_exists?
+ attr_list = columns.map { |c| "#{c.name}: #{c.type}" } * ', '
+ "#{super}(#{attr_list})"
+ else
+ "#{super}(Table doesn't exist)"
+ end
+ end
+
+ # Overwrite the default class equality method to provide support for association proxies.
+ def ===(object)
+ object.is_a?(self)
+ end
+
+ # Returns an instance of <tt>Arel::Table</tt> loaded with the current table name.
+ #
+ # class Post < ActiveRecord::Base
+ # scope :published_and_commented, -> { published.and(self.arel_table[:comments_count].gt(0)) }
+ # end
+ def arel_table # :nodoc:
+ @arel_table ||= Arel::Table.new(table_name, arel_engine)
+ end
+
+ # Returns the Arel engine.
+ def arel_engine # :nodoc:
+ @arel_engine ||=
+ if Base == self || connection_handler.retrieve_connection_pool(self)
+ self
+ else
+ superclass.arel_engine
+ end
+ end
+
+ private
+
+ def relation #:nodoc:
+ relation = Relation.create(self, arel_table)
+
+ if finder_needs_type_condition?
+ relation.where(type_condition).create_with(inheritance_column.to_sym => sti_name)
+ else
+ relation
+ end
+ end
+ end
+
+ # New objects can be instantiated as either empty (pass no construction parameter) or pre-set with
+ # attributes but not yet saved (pass a hash with key names matching the associated table column names).
+ # In both instances, valid attribute keys are determined by the column names of the associated table --
+ # hence you can't have attributes that aren't part of the table columns.
+ #
+ # ==== Example:
+ # # Instantiates a single new object
+ # User.new(first_name: 'Jamie')
+ def initialize(attributes = nil, options = {})
+ @attributes = self.class.default_attributes.dup
+
+ init_internals
+ initialize_internals_callback
+
+ self.class.define_attribute_methods
+ # +options+ argument is only needed to make protected_attributes gem easier to hook.
+ # Remove it when we drop support to this gem.
+ init_attributes(attributes, options) if attributes
+
+ yield self if block_given?
+ run_callbacks :initialize unless _initialize_callbacks.empty?
+ end
+
+ # Initialize an empty model object from +coder+. +coder+ must contain
+ # the attributes necessary for initializing an empty model object. For
+ # example:
+ #
+ # class Post < ActiveRecord::Base
+ # end
+ #
+ # post = Post.allocate
+ # post.init_with('attributes' => { 'title' => 'hello world' })
+ # post.title # => 'hello world'
+ def init_with(coder)
+ @attributes = coder['attributes']
+
+ init_internals
+
+ @new_record = coder['new_record']
+
+ self.class.define_attribute_methods
+
+ run_callbacks :find
+ run_callbacks :initialize
+
+ self
+ end
+
+ ##
+ # :method: clone
+ # Identical to Ruby's clone method. This is a "shallow" copy. Be warned that your attributes are not copied.
+ # That means that modifying attributes of the clone will modify the original, since they will both point to the
+ # same attributes hash. If you need a copy of your attributes hash, please use the #dup method.
+ #
+ # user = User.first
+ # new_user = user.clone
+ # user.name # => "Bob"
+ # new_user.name = "Joe"
+ # user.name # => "Joe"
+ #
+ # user.object_id == new_user.object_id # => false
+ # user.name.object_id == new_user.name.object_id # => true
+ #
+ # user.name.object_id == user.dup.name.object_id # => false
+
+ ##
+ # :method: dup
+ # Duped objects have no id assigned and are treated as new records. Note
+ # that this is a "shallow" copy as it copies the object's attributes
+ # only, not its associations. The extent of a "deep" copy is application
+ # specific and is therefore left to the application to implement according
+ # to its need.
+ # The dup method does not preserve the timestamps (created|updated)_(at|on).
+
+ ##
+ def initialize_dup(other) # :nodoc:
+ @attributes = @attributes.dup
+ @attributes.reset(self.class.primary_key)
+
+ run_callbacks(:initialize) unless _initialize_callbacks.empty?
+
+ @aggregation_cache = {}
+ @association_cache = {}
+
+ @new_record = true
+ @destroyed = false
+
+ super
+ end
+
+ # Populate +coder+ with attributes about this record that should be
+ # serialized. The structure of +coder+ defined in this method is
+ # guaranteed to match the structure of +coder+ passed to the +init_with+
+ # method.
+ #
+ # Example:
+ #
+ # class Post < ActiveRecord::Base
+ # end
+ # coder = {}
+ # Post.new.encode_with(coder)
+ # coder # => {"attributes" => {"id" => nil, ... }}
+ def encode_with(coder)
+ # FIXME: Remove this when we better serialize attributes
+ coder['raw_attributes'] = attributes_before_type_cast
+ coder['attributes'] = @attributes
+ coder['new_record'] = new_record?
+ end
+
+ # Returns true if +comparison_object+ is the same exact object, or +comparison_object+
+ # is of the same type and +self+ has an ID and it is equal to +comparison_object.id+.
+ #
+ # Note that new records are different from any other record by definition, unless the
+ # other record is the receiver itself. Besides, if you fetch existing records with
+ # +select+ and leave the ID out, you're on your own, this predicate will return false.
+ #
+ # Note also that destroying a record preserves its ID in the model instance, so deleted
+ # models are still comparable.
+ def ==(comparison_object)
+ super ||
+ comparison_object.instance_of?(self.class) &&
+ !id.nil? &&
+ comparison_object.id == id
+ end
+ alias :eql? :==
+
+ # Delegates to id in order to allow two records of the same type and id to work with something like:
+ # [ Person.find(1), Person.find(2), Person.find(3) ] & [ Person.find(1), Person.find(4) ] # => [ Person.find(1) ]
+ def hash
+ if id
+ id.hash
+ else
+ super
+ end
+ end
+
+ # Clone and freeze the attributes hash such that associations are still
+ # accessible, even on destroyed records, but cloned models will not be
+ # frozen.
+ def freeze
+ @attributes = @attributes.clone.freeze
+ self
+ end
+
+ # Returns +true+ if the attributes hash has been frozen.
+ def frozen?
+ @attributes.frozen?
+ end
+
+ # Allows sort on objects
+ def <=>(other_object)
+ if other_object.is_a?(self.class)
+ self.to_key <=> other_object.to_key
+ else
+ super
+ end
+ end
+
+ # Returns +true+ if the record is read only. Records loaded through joins with piggy-back
+ # attributes will be marked as read only since they cannot be saved.
+ def readonly?
+ @readonly
+ end
+
+ # Marks this record as read only.
+ def readonly!
+ @readonly = true
+ end
+
+ def connection_handler
+ self.class.connection_handler
+ end
+
+ # Returns the contents of the record as a nicely formatted string.
+ def inspect
+ # We check defined?(@attributes) not to issue warnings if the object is
+ # allocated but not initialized.
+ inspection = if defined?(@attributes) && @attributes
+ self.class.column_names.collect { |name|
+ if has_attribute?(name)
+ "#{name}: #{attribute_for_inspect(name)}"
+ end
+ }.compact.join(", ")
+ else
+ "not initialized"
+ end
+ "#<#{self.class} #{inspection}>"
+ end
+
+ # Takes a PP and prettily prints this record to it, allowing you to get a nice result from `pp record`
+ # when pp is required.
+ def pretty_print(pp)
+ pp.object_address_group(self) do
+ if defined?(@attributes) && @attributes
+ column_names = self.class.column_names.select { |name| has_attribute?(name) || new_record? }
+ pp.seplist(column_names, proc { pp.text ',' }) do |column_name|
+ column_value = read_attribute(column_name)
+ pp.breakable ' '
+ pp.group(1) do
+ pp.text column_name
+ pp.text ':'
+ pp.breakable
+ pp.pp column_value
+ end
+ end
+ else
+ pp.breakable ' '
+ pp.text 'not initialized'
+ end
+ end
+ end
+
+ # Returns a hash of the given methods with their names as keys and returned values as values.
+ def slice(*methods)
+ Hash[methods.map! { |method| [method, public_send(method)] }].with_indifferent_access
+ end
+
+ def set_transaction_state(state) # :nodoc:
+ @transaction_state = state
+ end
+
+ def has_transactional_callbacks? # :nodoc:
+ !_rollback_callbacks.empty? || !_commit_callbacks.empty? || !_create_callbacks.empty?
+ end
+
+ private
+
+ # Updates the attributes on this particular ActiveRecord object so that
+ # if it is associated with a transaction, then the state of the AR object
+ # will be updated to reflect the current state of the transaction
+ #
+ # The @transaction_state variable stores the states of the associated
+ # transaction. This relies on the fact that a transaction can only be in
+ # one rollback or commit (otherwise a list of states would be required)
+ # Each AR object inside of a transaction carries that transaction's
+ # TransactionState.
+ #
+ # This method checks to see if the ActiveRecord object's state reflects
+ # the TransactionState, and rolls back or commits the ActiveRecord object
+ # as appropriate.
+ #
+ # Since ActiveRecord objects can be inside multiple transactions, this
+ # method recursively goes through the parent of the TransactionState and
+ # checks if the ActiveRecord object reflects the state of the object.
+ def sync_with_transaction_state
+ update_attributes_from_transaction_state(@transaction_state, 0)
+ end
+
+ def update_attributes_from_transaction_state(transaction_state, depth)
+ if transaction_state && transaction_state.finalized? && !has_transactional_callbacks?
+ unless @reflects_state[depth]
+ restore_transaction_record_state if transaction_state.rolledback?
+ clear_transaction_record_state
+ @reflects_state[depth] = true
+ end
+
+ if transaction_state.parent && !@reflects_state[depth+1]
+ update_attributes_from_transaction_state(transaction_state.parent, depth+1)
+ end
+ end
+ end
+
+ # Under Ruby 1.9, Array#flatten will call #to_ary (recursively) on each of the elements
+ # of the array, and then rescues from the possible NoMethodError. If those elements are
+ # ActiveRecord::Base's, then this triggers the various method_missing's that we have,
+ # which significantly impacts upon performance.
+ #
+ # So we can avoid the method_missing hit by explicitly defining #to_ary as nil here.
+ #
+ # See also http://tenderlovemaking.com/2011/06/28/til-its-ok-to-return-nil-from-to_ary.html
+ def to_ary # :nodoc:
+ nil
+ end
+
+ def init_internals
+ @attributes.ensure_initialized(self.class.primary_key)
+
+ @aggregation_cache = {}
+ @association_cache = {}
+ @readonly = false
+ @destroyed = false
+ @marked_for_destruction = false
+ @destroyed_by_association = nil
+ @new_record = true
+ @txn = nil
+ @_start_transaction_state = {}
+ @transaction_state = nil
+ @reflects_state = [false]
+ end
+
+ def initialize_internals_callback
+ end
+
+ # This method is needed to make protected_attributes gem easier to hook.
+ # Remove it when we drop support to this gem.
+ def init_attributes(attributes, options)
+ assign_attributes(attributes)
+ end
+
+ def thaw
+ if frozen?
+ @attributes = @attributes.dup
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/counter_cache.rb b/activerecord/lib/active_record/counter_cache.rb
new file mode 100644
index 0000000000..f0b6afc4b4
--- /dev/null
+++ b/activerecord/lib/active_record/counter_cache.rb
@@ -0,0 +1,175 @@
+module ActiveRecord
+ # = Active Record Counter Cache
+ module CounterCache
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ # Resets one or more counter caches to their correct value using an SQL
+ # count query. This is useful when adding new counter caches, or if the
+ # counter has been corrupted or modified directly by SQL.
+ #
+ # ==== Parameters
+ #
+ # * +id+ - The id of the object you wish to reset a counter on.
+ # * +counters+ - One or more association counters to reset. Association name or counter name can be given.
+ #
+ # ==== Examples
+ #
+ # # For Post with id #1 records reset the comments_count
+ # Post.reset_counters(1, :comments)
+ def reset_counters(id, *counters)
+ object = find(id)
+ counters.each do |counter_association|
+ has_many_association = _reflect_on_association(counter_association.to_sym)
+ unless has_many_association
+ has_many = reflect_on_all_associations(:has_many)
+ has_many_association = has_many.find { |association| association.counter_cache_column && association.counter_cache_column.to_sym == counter_association.to_sym }
+ counter_association = has_many_association.plural_name if has_many_association
+ end
+ raise ArgumentError, "'#{self.name}' has no association called '#{counter_association}'" unless has_many_association
+
+ if has_many_association.is_a? ActiveRecord::Reflection::ThroughReflection
+ has_many_association = has_many_association.through_reflection
+ end
+
+ foreign_key = has_many_association.foreign_key.to_s
+ child_class = has_many_association.klass
+ reflection = child_class._reflections.values.find { |e| e.belongs_to? && e.foreign_key.to_s == foreign_key && e.options[:counter_cache].present? }
+ counter_name = reflection.counter_cache_column
+
+ stmt = unscoped.where(arel_table[primary_key].eq(object.id)).arel.compile_update({
+ arel_table[counter_name] => object.send(counter_association).count(:all)
+ }, primary_key)
+ connection.update stmt
+ end
+ return true
+ end
+
+ # A generic "counter updater" implementation, intended primarily to be
+ # used by increment_counter and decrement_counter, but which may also
+ # be useful on its own. It simply does a direct SQL update for the record
+ # with the given ID, altering the given hash of counters by the amount
+ # given by the corresponding value:
+ #
+ # ==== Parameters
+ #
+ # * +id+ - The id of the object you wish to update a counter on or an Array of ids.
+ # * +counters+ - A Hash containing the names of the fields
+ # to update as keys and the amount to update the field by as values.
+ #
+ # ==== Examples
+ #
+ # # For the Post with id of 5, decrement the comment_count by 1, and
+ # # increment the action_count by 1
+ # Post.update_counters 5, comment_count: -1, action_count: 1
+ # # Executes the following SQL:
+ # # UPDATE posts
+ # # SET comment_count = COALESCE(comment_count, 0) - 1,
+ # # action_count = COALESCE(action_count, 0) + 1
+ # # WHERE id = 5
+ #
+ # # For the Posts with id of 10 and 15, increment the comment_count by 1
+ # Post.update_counters [10, 15], comment_count: 1
+ # # Executes the following SQL:
+ # # UPDATE posts
+ # # SET comment_count = COALESCE(comment_count, 0) + 1
+ # # WHERE id IN (10, 15)
+ def update_counters(id, counters)
+ updates = counters.map do |counter_name, value|
+ operator = value < 0 ? '-' : '+'
+ quoted_column = connection.quote_column_name(counter_name)
+ "#{quoted_column} = COALESCE(#{quoted_column}, 0) #{operator} #{value.abs}"
+ end
+
+ unscoped.where(primary_key => id).update_all updates.join(', ')
+ end
+
+ # Increment a numeric field by one, via a direct SQL update.
+ #
+ # This method is used primarily for maintaining counter_cache columns that are
+ # used to store aggregate values. For example, a DiscussionBoard may cache
+ # posts_count and comments_count to avoid running an SQL query to calculate the
+ # number of posts and comments there are, each time it is displayed.
+ #
+ # ==== Parameters
+ #
+ # * +counter_name+ - The name of the field that should be incremented.
+ # * +id+ - The id of the object that should be incremented or an Array of ids.
+ #
+ # ==== Examples
+ #
+ # # Increment the post_count column for the record with an id of 5
+ # DiscussionBoard.increment_counter(:post_count, 5)
+ def increment_counter(counter_name, id)
+ update_counters(id, counter_name => 1)
+ end
+
+ # Decrement a numeric field by one, via a direct SQL update.
+ #
+ # This works the same as increment_counter but reduces the column value by
+ # 1 instead of increasing it.
+ #
+ # ==== Parameters
+ #
+ # * +counter_name+ - The name of the field that should be decremented.
+ # * +id+ - The id of the object that should be decremented or an Array of ids.
+ #
+ # ==== Examples
+ #
+ # # Decrement the post_count column for the record with an id of 5
+ # DiscussionBoard.decrement_counter(:post_count, 5)
+ def decrement_counter(counter_name, id)
+ update_counters(id, counter_name => -1)
+ end
+ end
+
+ protected
+
+ def actually_destroyed?
+ @_actually_destroyed
+ end
+
+ def clear_destroy_state
+ @_actually_destroyed = nil
+ end
+
+ private
+
+ def _create_record(*)
+ id = super
+
+ each_counter_cached_associations do |association|
+ if send(association.reflection.name)
+ association.increment_counters
+ @_after_create_counter_called = true
+ end
+ end
+
+ id
+ end
+
+ def destroy_row
+ affected_rows = super
+
+ if affected_rows > 0
+ each_counter_cached_associations do |association|
+ foreign_key = association.reflection.foreign_key.to_sym
+ unless destroyed_by_association && destroyed_by_association.foreign_key.to_sym == foreign_key
+ if send(association.reflection.name)
+ association.decrement_counters
+ end
+ end
+ end
+ end
+
+ affected_rows
+ end
+
+ def each_counter_cached_associations
+ _reflections.each do |name, reflection|
+ yield association(name) if reflection.belongs_to? && reflection.counter_cache_column
+ end
+ end
+
+ end
+end
diff --git a/activerecord/lib/active_record/dynamic_matchers.rb b/activerecord/lib/active_record/dynamic_matchers.rb
new file mode 100644
index 0000000000..e94b74063e
--- /dev/null
+++ b/activerecord/lib/active_record/dynamic_matchers.rb
@@ -0,0 +1,140 @@
+module ActiveRecord
+ module DynamicMatchers #:nodoc:
+ # This code in this file seems to have a lot of indirection, but the indirection
+ # is there to provide extension points for the activerecord-deprecated_finders
+ # gem. When we stop supporting activerecord-deprecated_finders (from Rails 5),
+ # then we can remove the indirection.
+
+ def respond_to?(name, include_private = false)
+ if self == Base
+ super
+ else
+ match = Method.match(self, name)
+ match && match.valid? || super
+ end
+ end
+
+ private
+
+ def method_missing(name, *arguments, &block)
+ match = Method.match(self, name)
+
+ if match && match.valid?
+ match.define
+ send(name, *arguments, &block)
+ else
+ super
+ end
+ end
+
+ class Method
+ @matchers = []
+
+ class << self
+ attr_reader :matchers
+
+ def match(model, name)
+ klass = matchers.find { |k| name =~ k.pattern }
+ klass.new(model, name) if klass
+ end
+
+ def pattern
+ @pattern ||= /\A#{prefix}_([_a-zA-Z]\w*)#{suffix}\Z/
+ end
+
+ def prefix
+ raise NotImplementedError
+ end
+
+ def suffix
+ ''
+ end
+ end
+
+ attr_reader :model, :name, :attribute_names
+
+ def initialize(model, name)
+ @model = model
+ @name = name.to_s
+ @attribute_names = @name.match(self.class.pattern)[1].split('_and_')
+ @attribute_names.map! { |n| @model.attribute_aliases[n] || n }
+ end
+
+ def valid?
+ attribute_names.all? { |name| model.columns_hash[name] || model.reflect_on_aggregation(name.to_sym) }
+ end
+
+ def define
+ model.class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def self.#{name}(#{signature})
+ #{body}
+ end
+ CODE
+ end
+
+ def body
+ raise NotImplementedError
+ end
+ end
+
+ module Finder
+ # Extended in activerecord-deprecated_finders
+ def body
+ result
+ end
+
+ # Extended in activerecord-deprecated_finders
+ def result
+ "#{finder}(#{attributes_hash})"
+ end
+
+ # The parameters in the signature may have reserved Ruby words, in order
+ # to prevent errors, we start each param name with `_`.
+ #
+ # Extended in activerecord-deprecated_finders
+ def signature
+ attribute_names.map { |name| "_#{name}" }.join(', ')
+ end
+
+ # Given that the parameters starts with `_`, the finder needs to use the
+ # same parameter name.
+ def attributes_hash
+ "{" + attribute_names.map { |name| ":#{name} => _#{name}" }.join(',') + "}"
+ end
+
+ def finder
+ raise NotImplementedError
+ end
+ end
+
+ class FindBy < Method
+ Method.matchers << self
+ include Finder
+
+ def self.prefix
+ "find_by"
+ end
+
+ def finder
+ "find_by"
+ end
+ end
+
+ class FindByBang < Method
+ Method.matchers << self
+ include Finder
+
+ def self.prefix
+ "find_by"
+ end
+
+ def self.suffix
+ "!"
+ end
+
+ def finder
+ "find_by!"
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/enum.rb b/activerecord/lib/active_record/enum.rb
new file mode 100644
index 0000000000..f0ee433d0b
--- /dev/null
+++ b/activerecord/lib/active_record/enum.rb
@@ -0,0 +1,198 @@
+require 'active_support/core_ext/object/deep_dup'
+
+module ActiveRecord
+ # Declare an enum attribute where the values map to integers in the database,
+ # but can be queried by name. Example:
+ #
+ # class Conversation < ActiveRecord::Base
+ # enum status: [ :active, :archived ]
+ # end
+ #
+ # # conversation.update! status: 0
+ # conversation.active!
+ # conversation.active? # => true
+ # conversation.status # => "active"
+ #
+ # # conversation.update! status: 1
+ # conversation.archived!
+ # conversation.archived? # => true
+ # conversation.status # => "archived"
+ #
+ # # conversation.update! status: 1
+ # conversation.status = "archived"
+ #
+ # # conversation.update! status: nil
+ # conversation.status = nil
+ # conversation.status.nil? # => true
+ # conversation.status # => nil
+ #
+ # Scopes based on the allowed values of the enum field will be provided
+ # as well. With the above example:
+ #
+ # Conversation.active
+ # Conversation.archived
+ #
+ # You can set the default value from the database declaration, like:
+ #
+ # create_table :conversations do |t|
+ # t.column :status, :integer, default: 0
+ # end
+ #
+ # Good practice is to let the first declared status be the default.
+ #
+ # Finally, it's also possible to explicitly map the relation between attribute and
+ # database integer with a +Hash+:
+ #
+ # class Conversation < ActiveRecord::Base
+ # enum status: { active: 0, archived: 1 }
+ # end
+ #
+ # Note that when an +Array+ is used, the implicit mapping from the values to database
+ # integers is derived from the order the values appear in the array. In the example,
+ # <tt>:active</tt> is mapped to +0+ as it's the first element, and <tt>:archived</tt>
+ # is mapped to +1+. In general, the +i+-th element is mapped to <tt>i-1</tt> in the
+ # database.
+ #
+ # Therefore, once a value is added to the enum array, its position in the array must
+ # be maintained, and new values should only be added to the end of the array. To
+ # remove unused values, the explicit +Hash+ syntax should be used.
+ #
+ # In rare circumstances you might need to access the mapping directly.
+ # The mappings are exposed through a class method with the pluralized attribute
+ # name:
+ #
+ # Conversation.statuses # => { "active" => 0, "archived" => 1 }
+ #
+ # Use that class method when you need to know the ordinal value of an enum:
+ #
+ # Conversation.where("status <> ?", Conversation.statuses[:archived])
+ #
+ # Where conditions on an enum attribute must use the ordinal value of an enum.
+ module Enum
+ def self.extended(base) # :nodoc:
+ base.class_attribute(:defined_enums)
+ base.defined_enums = {}
+ end
+
+ def inherited(base) # :nodoc:
+ base.defined_enums = defined_enums.deep_dup
+ super
+ end
+
+ def enum(definitions)
+ klass = self
+ definitions.each do |name, values|
+ # statuses = { }
+ enum_values = ActiveSupport::HashWithIndifferentAccess.new
+ name = name.to_sym
+
+ # def self.statuses statuses end
+ detect_enum_conflict!(name, name.to_s.pluralize, true)
+ klass.singleton_class.send(:define_method, name.to_s.pluralize) { enum_values }
+
+ _enum_methods_module.module_eval do
+ # def status=(value) self[:status] = statuses[value] end
+ klass.send(:detect_enum_conflict!, name, "#{name}=")
+ define_method("#{name}=") { |value|
+ if enum_values.has_key?(value) || value.blank?
+ self[name] = enum_values[value]
+ elsif enum_values.has_value?(value)
+ # Assigning a value directly is not a end-user feature, hence it's not documented.
+ # This is used internally to make building objects from the generated scopes work
+ # as expected, i.e. +Conversation.archived.build.archived?+ should be true.
+ self[name] = value
+ else
+ raise ArgumentError, "'#{value}' is not a valid #{name}"
+ end
+ }
+
+ # def status() statuses.key self[:status] end
+ klass.send(:detect_enum_conflict!, name, name)
+ define_method(name) { enum_values.key self[name] }
+
+ # def status_before_type_cast() statuses.key self[:status] end
+ klass.send(:detect_enum_conflict!, name, "#{name}_before_type_cast")
+ define_method("#{name}_before_type_cast") { enum_values.key self[name] }
+
+ pairs = values.respond_to?(:each_pair) ? values.each_pair : values.each_with_index
+ pairs.each do |value, i|
+ enum_values[value] = i
+
+ # def active?() status == 0 end
+ klass.send(:detect_enum_conflict!, name, "#{value}?")
+ define_method("#{value}?") { self[name] == i }
+
+ # def active!() update! status: :active end
+ klass.send(:detect_enum_conflict!, name, "#{value}!")
+ define_method("#{value}!") { update! name => value }
+
+ # scope :active, -> { where status: 0 }
+ klass.send(:detect_enum_conflict!, name, value, true)
+ klass.scope value, -> { klass.where name => i }
+ end
+ end
+ defined_enums[name.to_s] = enum_values
+ end
+ end
+
+ private
+ def _enum_methods_module
+ @_enum_methods_module ||= begin
+ mod = Module.new do
+ private
+ def save_changed_attribute(attr_name, old)
+ if (mapping = self.class.defined_enums[attr_name.to_s])
+ value = read_attribute(attr_name)
+ if attribute_changed?(attr_name)
+ if mapping[old] == value
+ changed_attributes.delete(attr_name)
+ end
+ else
+ if old != value
+ changed_attributes[attr_name] = mapping.key old
+ end
+ end
+ else
+ super
+ end
+ end
+ end
+ include mod
+ mod
+ end
+ end
+
+ ENUM_CONFLICT_MESSAGE = \
+ "You tried to define an enum named \"%{enum}\" on the model \"%{klass}\", but " \
+ "this will generate a %{type} method \"%{method}\", which is already defined " \
+ "by %{source}."
+
+ def detect_enum_conflict!(enum_name, method_name, klass_method = false)
+ if klass_method && dangerous_class_method?(method_name)
+ raise ArgumentError, ENUM_CONFLICT_MESSAGE % {
+ enum: enum_name,
+ klass: self.name,
+ type: 'class',
+ method: method_name,
+ source: 'Active Record'
+ }
+ elsif !klass_method && dangerous_attribute_method?(method_name)
+ raise ArgumentError, ENUM_CONFLICT_MESSAGE % {
+ enum: enum_name,
+ klass: self.name,
+ type: 'instance',
+ method: method_name,
+ source: 'Active Record'
+ }
+ elsif !klass_method && method_defined_within?(method_name, _enum_methods_module, Module)
+ raise ArgumentError, ENUM_CONFLICT_MESSAGE % {
+ enum: enum_name,
+ klass: self.name,
+ type: 'instance',
+ method: method_name,
+ source: 'another enum'
+ }
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/errors.rb b/activerecord/lib/active_record/errors.rb
new file mode 100644
index 0000000000..52c70977ef
--- /dev/null
+++ b/activerecord/lib/active_record/errors.rb
@@ -0,0 +1,231 @@
+module ActiveRecord
+
+ # = Active Record Errors
+ #
+ # Generic Active Record exception class.
+ class ActiveRecordError < StandardError
+ end
+
+ # Raised when the single-table inheritance mechanism fails to locate the subclass
+ # (for example due to improper usage of column that +inheritance_column+ points to).
+ class SubclassNotFound < ActiveRecordError #:nodoc:
+ end
+
+ # Raised when an object assigned to an association has an incorrect type.
+ #
+ # class Ticket < ActiveRecord::Base
+ # has_many :patches
+ # end
+ #
+ # class Patch < ActiveRecord::Base
+ # belongs_to :ticket
+ # end
+ #
+ # # Comments are not patches, this assignment raises AssociationTypeMismatch.
+ # @ticket.patches << Comment.new(content: "Please attach tests to your patch.")
+ class AssociationTypeMismatch < ActiveRecordError
+ end
+
+ # Raised when unserialized object's type mismatches one specified for serializable field.
+ class SerializationTypeMismatch < ActiveRecordError
+ end
+
+ # Raised when adapter not specified on connection (or configuration file
+ # +config/database.yml+ misses adapter field).
+ class AdapterNotSpecified < ActiveRecordError
+ end
+
+ # Raised when Active Record cannot find database adapter specified in
+ # +config/database.yml+ or programmatically.
+ class AdapterNotFound < ActiveRecordError
+ end
+
+ # Raised when connection to the database could not been established (for
+ # example when +connection=+ is given a nil object).
+ class ConnectionNotEstablished < ActiveRecordError
+ end
+
+ # Raised when Active Record cannot find record by given id or set of ids.
+ class RecordNotFound < ActiveRecordError
+ end
+
+ # Raised by ActiveRecord::Base.save! and ActiveRecord::Base.create! methods when record cannot be
+ # saved because record is invalid.
+ class RecordNotSaved < ActiveRecordError
+ end
+
+ # Raised by ActiveRecord::Base.destroy! when a call to destroy would return false.
+ class RecordNotDestroyed < ActiveRecordError
+ end
+
+ # Superclass for all database execution errors.
+ #
+ # Wraps the underlying database error as +original_exception+.
+ class StatementInvalid < ActiveRecordError
+ attr_reader :original_exception
+
+ def initialize(message, original_exception = nil)
+ super(message)
+ @original_exception = original_exception
+ end
+ end
+
+ # Defunct wrapper class kept for compatibility.
+ # +StatementInvalid+ wraps the original exception now.
+ class WrappedDatabaseException < StatementInvalid
+ end
+
+ # Raised when a record cannot be inserted because it would violate a uniqueness constraint.
+ class RecordNotUnique < WrappedDatabaseException
+ end
+
+ # Raised when a record cannot be inserted or updated because it references a non-existent record.
+ class InvalidForeignKey < WrappedDatabaseException
+ end
+
+ # Raised when number of bind variables in statement given to +:condition+ key
+ # (for example, when using +find+ method) does not match number of expected
+ # values supplied.
+ #
+ # For example, when there are two placeholders with only one value supplied:
+ #
+ # Location.where("lat = ? AND lng = ?", 53.7362)
+ class PreparedStatementInvalid < ActiveRecordError
+ end
+
+ # Raised when a given database does not exist.
+ class NoDatabaseError < StatementInvalid
+ end
+
+ # Raised on attempt to save stale record. Record is stale when it's being saved in another query after
+ # instantiation, for example, when two users edit the same wiki page and one starts editing and saves
+ # the page before the other.
+ #
+ # Read more about optimistic locking in ActiveRecord::Locking module
+ # documentation.
+ class StaleObjectError < ActiveRecordError
+ attr_reader :record, :attempted_action
+
+ def initialize(record, attempted_action)
+ super("Attempted to #{attempted_action} a stale object: #{record.class.name}")
+ @record = record
+ @attempted_action = attempted_action
+ end
+
+ end
+
+ # Raised when association is being configured improperly or user tries to use
+ # offset and limit together with +has_many+ or +has_and_belongs_to_many+
+ # associations.
+ class ConfigurationError < ActiveRecordError
+ end
+
+ # Raised on attempt to update record that is instantiated as read only.
+ class ReadOnlyRecord < ActiveRecordError
+ end
+
+ # ActiveRecord::Transactions::ClassMethods.transaction uses this exception
+ # to distinguish a deliberate rollback from other exceptional situations.
+ # Normally, raising an exception will cause the +transaction+ method to rollback
+ # the database transaction *and* pass on the exception. But if you raise an
+ # ActiveRecord::Rollback exception, then the database transaction will be rolled back,
+ # without passing on the exception.
+ #
+ # For example, you could do this in your controller to rollback a transaction:
+ #
+ # class BooksController < ActionController::Base
+ # def create
+ # Book.transaction do
+ # book = Book.new(params[:book])
+ # book.save!
+ # if today_is_friday?
+ # # The system must fail on Friday so that our support department
+ # # won't be out of job. We silently rollback this transaction
+ # # without telling the user.
+ # raise ActiveRecord::Rollback, "Call tech support!"
+ # end
+ # end
+ # # ActiveRecord::Rollback is the only exception that won't be passed on
+ # # by ActiveRecord::Base.transaction, so this line will still be reached
+ # # even on Friday.
+ # redirect_to root_url
+ # end
+ # end
+ class Rollback < ActiveRecordError
+ end
+
+ # Raised when attribute has a name reserved by Active Record (when attribute
+ # has name of one of Active Record instance methods).
+ class DangerousAttributeError < ActiveRecordError
+ end
+
+ # Raised when unknown attributes are supplied via mass assignment.
+ class UnknownAttributeError < NoMethodError
+
+ attr_reader :record, :attribute
+
+ def initialize(record, attribute)
+ @record = record
+ @attribute = attribute.to_s
+ super("unknown attribute: #{attribute}")
+ end
+
+ end
+
+ # Raised when an error occurred while doing a mass assignment to an attribute through the
+ # +attributes=+ method. The exception has an +attribute+ property that is the name of the
+ # offending attribute.
+ class AttributeAssignmentError < ActiveRecordError
+ attr_reader :exception, :attribute
+ def initialize(message, exception, attribute)
+ super(message)
+ @exception = exception
+ @attribute = attribute
+ end
+ end
+
+ # Raised when there are multiple errors while doing a mass assignment through the +attributes+
+ # method. The exception has an +errors+ property that contains an array of AttributeAssignmentError
+ # objects, each corresponding to the error while assigning to an attribute.
+ class MultiparameterAssignmentErrors < ActiveRecordError
+ attr_reader :errors
+ def initialize(errors)
+ @errors = errors
+ end
+ end
+
+ # Raised when a primary key is needed, but not specified in the schema or model.
+ class UnknownPrimaryKey < ActiveRecordError
+ attr_reader :model
+
+ def initialize(model)
+ super("Unknown primary key for table #{model.table_name} in model #{model}.")
+ @model = model
+ end
+
+ end
+
+ # Raised when a relation cannot be mutated because it's already loaded.
+ #
+ # class Task < ActiveRecord::Base
+ # end
+ #
+ # relation = Task.all
+ # relation.loaded? # => true
+ #
+ # # Methods which try to mutate a loaded relation fail.
+ # relation.where!(title: 'TODO') # => ActiveRecord::ImmutableRelation
+ # relation.limit!(5) # => ActiveRecord::ImmutableRelation
+ class ImmutableRelation < ActiveRecordError
+ end
+
+ # TransactionIsolationError will be raised under the following conditions:
+ #
+ # * The adapter does not support setting the isolation level
+ # * You are joining an existing open transaction
+ # * You are creating a nested (savepoint) transaction
+ #
+ # The mysql, mysql2 and postgresql adapters support setting the transaction isolation level.
+ class TransactionIsolationError < ActiveRecordError
+ end
+end
diff --git a/activerecord/lib/active_record/explain.rb b/activerecord/lib/active_record/explain.rb
new file mode 100644
index 0000000000..727a9befc1
--- /dev/null
+++ b/activerecord/lib/active_record/explain.rb
@@ -0,0 +1,38 @@
+require 'active_support/lazy_load_hooks'
+require 'active_record/explain_registry'
+
+module ActiveRecord
+ module Explain
+ # Executes the block with the collect flag enabled. Queries are collected
+ # asynchronously by the subscriber and returned.
+ def collecting_queries_for_explain # :nodoc:
+ ExplainRegistry.collect = true
+ yield
+ ExplainRegistry.queries
+ ensure
+ ExplainRegistry.reset
+ end
+
+ # Makes the adapter execute EXPLAIN for the tuples of queries and bindings.
+ # Returns a formatted string ready to be logged.
+ def exec_explain(queries) # :nodoc:
+ str = queries.map do |sql, bind|
+ [].tap do |msg|
+ msg << "EXPLAIN for: #{sql}"
+ unless bind.empty?
+ bind_msg = bind.map {|col, val| [col.name, val]}.inspect
+ msg.last << " #{bind_msg}"
+ end
+ msg << connection.explain(sql, bind)
+ end.join("\n")
+ end.join("\n")
+
+ # Overriding inspect to be more human readable, especially in the console.
+ def str.inspect
+ self
+ end
+
+ str
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/explain_registry.rb b/activerecord/lib/active_record/explain_registry.rb
new file mode 100644
index 0000000000..f5cd57e075
--- /dev/null
+++ b/activerecord/lib/active_record/explain_registry.rb
@@ -0,0 +1,30 @@
+require 'active_support/per_thread_registry'
+
+module ActiveRecord
+ # This is a thread locals registry for EXPLAIN. For example
+ #
+ # ActiveRecord::ExplainRegistry.queries
+ #
+ # returns the collected queries local to the current thread.
+ #
+ # See the documentation of <tt>ActiveSupport::PerThreadRegistry</tt>
+ # for further details.
+ class ExplainRegistry # :nodoc:
+ extend ActiveSupport::PerThreadRegistry
+
+ attr_accessor :queries, :collect
+
+ def initialize
+ reset
+ end
+
+ def collect?
+ @collect
+ end
+
+ def reset
+ @collect = false
+ @queries = []
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/explain_subscriber.rb b/activerecord/lib/active_record/explain_subscriber.rb
new file mode 100644
index 0000000000..6a49936644
--- /dev/null
+++ b/activerecord/lib/active_record/explain_subscriber.rb
@@ -0,0 +1,29 @@
+require 'active_support/notifications'
+require 'active_record/explain_registry'
+
+module ActiveRecord
+ class ExplainSubscriber # :nodoc:
+ def start(name, id, payload)
+ # unused
+ end
+
+ def finish(name, id, payload)
+ if ExplainRegistry.collect? && !ignore_payload?(payload)
+ ExplainRegistry.queries << payload.values_at(:sql, :binds)
+ end
+ end
+
+ # SCHEMA queries cannot be EXPLAINed, also we do not want to run EXPLAIN on
+ # our own EXPLAINs now matter how loopingly beautiful that would be.
+ #
+ # On the other hand, we want to monitor the performance of our real database
+ # queries, not the performance of the access to the query cache.
+ IGNORED_PAYLOADS = %w(SCHEMA EXPLAIN CACHE)
+ EXPLAINED_SQLS = /\A\s*(select|update|delete|insert)\b/i
+ def ignore_payload?(payload)
+ payload[:exception] || IGNORED_PAYLOADS.include?(payload[:name]) || payload[:sql] !~ EXPLAINED_SQLS
+ end
+
+ ActiveSupport::Notifications.subscribe("sql.active_record", new)
+ end
+end
diff --git a/activerecord/lib/active_record/fixture_set/file.rb b/activerecord/lib/active_record/fixture_set/file.rb
new file mode 100644
index 0000000000..8132310c91
--- /dev/null
+++ b/activerecord/lib/active_record/fixture_set/file.rb
@@ -0,0 +1,56 @@
+require 'erb'
+require 'yaml'
+
+module ActiveRecord
+ class FixtureSet
+ class File # :nodoc:
+ include Enumerable
+
+ ##
+ # Open a fixture file named +file+. When called with a block, the block
+ # is called with the filehandle and the filehandle is automatically closed
+ # when the block finishes.
+ def self.open(file)
+ x = new file
+ block_given? ? yield(x) : x
+ end
+
+ def initialize(file)
+ @file = file
+ @rows = nil
+ end
+
+ def each(&block)
+ rows.each(&block)
+ end
+
+
+ private
+ def rows
+ return @rows if @rows
+
+ begin
+ data = YAML.load(render(IO.read(@file)))
+ rescue ArgumentError, Psych::SyntaxError => error
+ raise Fixture::FormatError, "a YAML error occurred parsing #{@file}. Please note that YAML must be consistently indented using spaces. Tabs are not allowed. Please have a look at http://www.yaml.org/faq.html\nThe exact error was:\n #{error.class}: #{error}", error.backtrace
+ end
+ @rows = data ? validate(data).to_a : []
+ end
+
+ def render(content)
+ context = ActiveRecord::FixtureSet::RenderContext.create_subclass.new
+ ERB.new(content).result(context.get_binding)
+ end
+
+ # Validate our unmarshalled data.
+ def validate(data)
+ unless Hash === data || YAML::Omap === data
+ raise Fixture::FormatError, 'fixture is not a hash'
+ end
+
+ raise Fixture::FormatError unless data.all? { |name, row| Hash === row }
+ data
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/fixtures.rb b/activerecord/lib/active_record/fixtures.rb
new file mode 100644
index 0000000000..4306b36ae1
--- /dev/null
+++ b/activerecord/lib/active_record/fixtures.rb
@@ -0,0 +1,1030 @@
+require 'erb'
+require 'yaml'
+require 'zlib'
+require 'active_support/dependencies'
+require 'active_support/core_ext/digest/uuid'
+require 'active_record/fixture_set/file'
+require 'active_record/errors'
+
+module ActiveRecord
+ class FixtureClassNotFound < ActiveRecord::ActiveRecordError #:nodoc:
+ end
+
+ # \Fixtures are a way of organizing data that you want to test against; in short, sample data.
+ #
+ # They are stored in YAML files, one file per model, which are placed in the directory
+ # appointed by <tt>ActiveSupport::TestCase.fixture_path=(path)</tt> (this is automatically
+ # configured for Rails, so you can just put your files in <tt><your-rails-app>/test/fixtures/</tt>).
+ # The fixture file ends with the +.yml+ file extension, for example:
+ # <tt><your-rails-app>/test/fixtures/web_sites.yml</tt>).
+ #
+ # The format of a fixture file looks like this:
+ #
+ # rubyonrails:
+ # id: 1
+ # name: Ruby on Rails
+ # url: http://www.rubyonrails.org
+ #
+ # google:
+ # id: 2
+ # name: Google
+ # url: http://www.google.com
+ #
+ # This fixture file includes two fixtures. Each YAML fixture (ie. record) is given a name and
+ # is followed by an indented list of key/value pairs in the "key: value" format. Records are
+ # separated by a blank line for your viewing pleasure.
+ #
+ # Note: Fixtures are unordered. If you want ordered fixtures, use the omap YAML type.
+ # See http://yaml.org/type/omap.html
+ # for the specification. You will need ordered fixtures when you have foreign key constraints
+ # on keys in the same table. This is commonly needed for tree structures. Example:
+ #
+ # --- !omap
+ # - parent:
+ # id: 1
+ # parent_id: NULL
+ # title: Parent
+ # - child:
+ # id: 2
+ # parent_id: 1
+ # title: Child
+ #
+ # = Using Fixtures in Test Cases
+ #
+ # Since fixtures are a testing construct, we use them in our unit and functional tests. There
+ # are two ways to use the fixtures, but first let's take a look at a sample unit test:
+ #
+ # require 'test_helper'
+ #
+ # class WebSiteTest < ActiveSupport::TestCase
+ # test "web_site_count" do
+ # assert_equal 2, WebSite.count
+ # end
+ # end
+ #
+ # By default, +test_helper.rb+ will load all of your fixtures into your test
+ # database, so this test will succeed.
+ #
+ # The testing environment will automatically load the all fixtures into the database before each
+ # test. To ensure consistent data, the environment deletes the fixtures before running the load.
+ #
+ # In addition to being available in the database, the fixture's data may also be accessed by
+ # using a special dynamic method, which has the same name as the model, and accepts the
+ # name of the fixture to instantiate:
+ #
+ # test "find" do
+ # assert_equal "Ruby on Rails", web_sites(:rubyonrails).name
+ # end
+ #
+ # Alternatively, you may enable auto-instantiation of the fixture data. For instance, take the
+ # following tests:
+ #
+ # test "find_alt_method_1" do
+ # assert_equal "Ruby on Rails", @web_sites['rubyonrails']['name']
+ # end
+ #
+ # test "find_alt_method_2" do
+ # assert_equal "Ruby on Rails", @rubyonrails.name
+ # end
+ #
+ # In order to use these methods to access fixtured data within your testcases, you must specify one of the
+ # following in your <tt>ActiveSupport::TestCase</tt>-derived class:
+ #
+ # - to fully enable instantiated fixtures (enable alternate methods #1 and #2 above)
+ # self.use_instantiated_fixtures = true
+ #
+ # - create only the hash for the fixtures, do not 'find' each instance (enable alternate method #1 only)
+ # self.use_instantiated_fixtures = :no_instances
+ #
+ # Using either of these alternate methods incurs a performance hit, as the fixtured data must be fully
+ # traversed in the database to create the fixture hash and/or instance variables. This is expensive for
+ # large sets of fixtured data.
+ #
+ # = Dynamic fixtures with ERB
+ #
+ # Some times you don't care about the content of the fixtures as much as you care about the volume.
+ # In these cases, you can mix ERB in with your YAML fixtures to create a bunch of fixtures for load
+ # testing, like:
+ #
+ # <% 1.upto(1000) do |i| %>
+ # fix_<%= i %>:
+ # id: <%= i %>
+ # name: guy_<%= 1 %>
+ # <% end %>
+ #
+ # This will create 1000 very simple fixtures.
+ #
+ # Using ERB, you can also inject dynamic values into your fixtures with inserts like
+ # <tt><%= Date.today.strftime("%Y-%m-%d") %></tt>.
+ # This is however a feature to be used with some caution. The point of fixtures are that they're
+ # stable units of predictable sample data. If you feel that you need to inject dynamic values, then
+ # perhaps you should reexamine whether your application is properly testable. Hence, dynamic values
+ # in fixtures are to be considered a code smell.
+ #
+ # Helper methods defined in a fixture will not be available in other fixtures, to prevent against
+ # unwanted inter-test dependencies. Methods used by multiple fixtures should be defined in a module
+ # that is included in <tt>ActiveRecord::FixtureSet.context_class</tt>.
+ #
+ # - define a helper method in `test_helper.rb`
+ # class FixtureFileHelpers
+ # def file_sha(path)
+ # Digest::SHA2.hexdigest(File.read(Rails.root.join('test/fixtures', path)))
+ # end
+ # end
+ # ActiveRecord::FixtureSet.context_class.send :include, FixtureFileHelpers
+ #
+ # - use the helper method in a fixture
+ # photo:
+ # name: kitten.png
+ # sha: <%= file_sha 'files/kitten.png' %>
+ #
+ # = Transactional Fixtures
+ #
+ # Test cases can use begin+rollback to isolate their changes to the database instead of having to
+ # delete+insert for every test case.
+ #
+ # class FooTest < ActiveSupport::TestCase
+ # self.use_transactional_fixtures = true
+ #
+ # test "godzilla" do
+ # assert !Foo.all.empty?
+ # Foo.destroy_all
+ # assert Foo.all.empty?
+ # end
+ #
+ # test "godzilla aftermath" do
+ # assert !Foo.all.empty?
+ # end
+ # end
+ #
+ # If you preload your test database with all fixture data (probably in the rake task) and use
+ # transactional fixtures, then you may omit all fixtures declarations in your test cases since
+ # all the data's already there and every case rolls back its changes.
+ #
+ # In order to use instantiated fixtures with preloaded data, set +self.pre_loaded_fixtures+ to
+ # true. This will provide access to fixture data for every table that has been loaded through
+ # fixtures (depending on the value of +use_instantiated_fixtures+).
+ #
+ # When *not* to use transactional fixtures:
+ #
+ # 1. You're testing whether a transaction works correctly. Nested transactions don't commit until
+ # all parent transactions commit, particularly, the fixtures transaction which is begun in setup
+ # and rolled back in teardown. Thus, you won't be able to verify
+ # the results of your transaction until Active Record supports nested transactions or savepoints (in progress).
+ # 2. Your database does not support transactions. Every Active Record database supports transactions except MySQL MyISAM.
+ # Use InnoDB, MaxDB, or NDB instead.
+ #
+ # = Advanced Fixtures
+ #
+ # Fixtures that don't specify an ID get some extra features:
+ #
+ # * Stable, autogenerated IDs
+ # * Label references for associations (belongs_to, has_one, has_many)
+ # * HABTM associations as inline lists
+ # * Autofilled timestamp columns
+ # * Fixture label interpolation
+ # * Support for YAML defaults
+ #
+ # == Stable, Autogenerated IDs
+ #
+ # Here, have a monkey fixture:
+ #
+ # george:
+ # id: 1
+ # name: George the Monkey
+ #
+ # reginald:
+ # id: 2
+ # name: Reginald the Pirate
+ #
+ # Each of these fixtures has two unique identifiers: one for the database
+ # and one for the humans. Why don't we generate the primary key instead?
+ # Hashing each fixture's label yields a consistent ID:
+ #
+ # george: # generated id: 503576764
+ # name: George the Monkey
+ #
+ # reginald: # generated id: 324201669
+ # name: Reginald the Pirate
+ #
+ # Active Record looks at the fixture's model class, discovers the correct
+ # primary key, and generates it right before inserting the fixture
+ # into the database.
+ #
+ # The generated ID for a given label is constant, so we can discover
+ # any fixture's ID without loading anything, as long as we know the label.
+ #
+ # == Label references for associations (belongs_to, has_one, has_many)
+ #
+ # Specifying foreign keys in fixtures can be very fragile, not to
+ # mention difficult to read. Since Active Record can figure out the ID of
+ # any fixture from its label, you can specify FK's by label instead of ID.
+ #
+ # === belongs_to
+ #
+ # Let's break out some more monkeys and pirates.
+ #
+ # ### in pirates.yml
+ #
+ # reginald:
+ # id: 1
+ # name: Reginald the Pirate
+ # monkey_id: 1
+ #
+ # ### in monkeys.yml
+ #
+ # george:
+ # id: 1
+ # name: George the Monkey
+ # pirate_id: 1
+ #
+ # Add a few more monkeys and pirates and break this into multiple files,
+ # and it gets pretty hard to keep track of what's going on. Let's
+ # use labels instead of IDs:
+ #
+ # ### in pirates.yml
+ #
+ # reginald:
+ # name: Reginald the Pirate
+ # monkey: george
+ #
+ # ### in monkeys.yml
+ #
+ # george:
+ # name: George the Monkey
+ # pirate: reginald
+ #
+ # Pow! All is made clear. Active Record reflects on the fixture's model class,
+ # finds all the +belongs_to+ associations, and allows you to specify
+ # a target *label* for the *association* (monkey: george) rather than
+ # a target *id* for the *FK* (<tt>monkey_id: 1</tt>).
+ #
+ # ==== Polymorphic belongs_to
+ #
+ # Supporting polymorphic relationships is a little bit more complicated, since
+ # Active Record needs to know what type your association is pointing at. Something
+ # like this should look familiar:
+ #
+ # ### in fruit.rb
+ #
+ # belongs_to :eater, polymorphic: true
+ #
+ # ### in fruits.yml
+ #
+ # apple:
+ # id: 1
+ # name: apple
+ # eater_id: 1
+ # eater_type: Monkey
+ #
+ # Can we do better? You bet!
+ #
+ # apple:
+ # eater: george (Monkey)
+ #
+ # Just provide the polymorphic target type and Active Record will take care of the rest.
+ #
+ # === has_and_belongs_to_many
+ #
+ # Time to give our monkey some fruit.
+ #
+ # ### in monkeys.yml
+ #
+ # george:
+ # id: 1
+ # name: George the Monkey
+ #
+ # ### in fruits.yml
+ #
+ # apple:
+ # id: 1
+ # name: apple
+ #
+ # orange:
+ # id: 2
+ # name: orange
+ #
+ # grape:
+ # id: 3
+ # name: grape
+ #
+ # ### in fruits_monkeys.yml
+ #
+ # apple_george:
+ # fruit_id: 1
+ # monkey_id: 1
+ #
+ # orange_george:
+ # fruit_id: 2
+ # monkey_id: 1
+ #
+ # grape_george:
+ # fruit_id: 3
+ # monkey_id: 1
+ #
+ # Let's make the HABTM fixture go away.
+ #
+ # ### in monkeys.yml
+ #
+ # george:
+ # id: 1
+ # name: George the Monkey
+ # fruits: apple, orange, grape
+ #
+ # ### in fruits.yml
+ #
+ # apple:
+ # name: apple
+ #
+ # orange:
+ # name: orange
+ #
+ # grape:
+ # name: grape
+ #
+ # Zap! No more fruits_monkeys.yml file. We've specified the list of fruits
+ # on George's fixture, but we could've just as easily specified a list
+ # of monkeys on each fruit. As with +belongs_to+, Active Record reflects on
+ # the fixture's model class and discovers the +has_and_belongs_to_many+
+ # associations.
+ #
+ # == Autofilled Timestamp Columns
+ #
+ # If your table/model specifies any of Active Record's
+ # standard timestamp columns (+created_at+, +created_on+, +updated_at+, +updated_on+),
+ # they will automatically be set to <tt>Time.now</tt>.
+ #
+ # If you've set specific values, they'll be left alone.
+ #
+ # == Fixture label interpolation
+ #
+ # The label of the current fixture is always available as a column value:
+ #
+ # geeksomnia:
+ # name: Geeksomnia's Account
+ # subdomain: $LABEL
+ # email: $LABEL@email.com
+ #
+ # Also, sometimes (like when porting older join table fixtures) you'll need
+ # to be able to get a hold of the identifier for a given label. ERB
+ # to the rescue:
+ #
+ # george_reginald:
+ # monkey_id: <%= ActiveRecord::FixtureSet.identify(:reginald) %>
+ # pirate_id: <%= ActiveRecord::FixtureSet.identify(:george) %>
+ #
+ # == Support for YAML defaults
+ #
+ # You can set and reuse defaults in your fixtures YAML file.
+ # This is the same technique used in the +database.yml+ file to specify
+ # defaults:
+ #
+ # DEFAULTS: &DEFAULTS
+ # created_on: <%= 3.weeks.ago.to_s(:db) %>
+ #
+ # first:
+ # name: Smurf
+ # <<: *DEFAULTS
+ #
+ # second:
+ # name: Fraggle
+ # <<: *DEFAULTS
+ #
+ # Any fixture labeled "DEFAULTS" is safely ignored.
+ class FixtureSet
+ #--
+ # An instance of FixtureSet is normally stored in a single YAML file and
+ # possibly in a folder with the same name.
+ #++
+
+ MAX_ID = 2 ** 30 - 1
+
+ @@all_cached_fixtures = Hash.new { |h,k| h[k] = {} }
+
+ def self.default_fixture_model_name(fixture_set_name, config = ActiveRecord::Base) # :nodoc:
+ config.pluralize_table_names ?
+ fixture_set_name.singularize.camelize :
+ fixture_set_name.camelize
+ end
+
+ def self.default_fixture_table_name(fixture_set_name, config = ActiveRecord::Base) # :nodoc:
+ "#{ config.table_name_prefix }"\
+ "#{ fixture_set_name.tr('/', '_') }"\
+ "#{ config.table_name_suffix }".to_sym
+ end
+
+ def self.reset_cache
+ @@all_cached_fixtures.clear
+ end
+
+ def self.cache_for_connection(connection)
+ @@all_cached_fixtures[connection]
+ end
+
+ def self.fixture_is_cached?(connection, table_name)
+ cache_for_connection(connection)[table_name]
+ end
+
+ def self.cached_fixtures(connection, keys_to_fetch = nil)
+ if keys_to_fetch
+ cache_for_connection(connection).values_at(*keys_to_fetch)
+ else
+ cache_for_connection(connection).values
+ end
+ end
+
+ def self.cache_fixtures(connection, fixtures_map)
+ cache_for_connection(connection).update(fixtures_map)
+ end
+
+ def self.instantiate_fixtures(object, fixture_set, load_instances = true)
+ if load_instances
+ fixture_set.each do |fixture_name, fixture|
+ begin
+ object.instance_variable_set "@#{fixture_name}", fixture.find
+ rescue FixtureClassNotFound
+ nil
+ end
+ end
+ end
+ end
+
+ def self.instantiate_all_loaded_fixtures(object, load_instances = true)
+ all_loaded_fixtures.each_value do |fixture_set|
+ instantiate_fixtures(object, fixture_set, load_instances)
+ end
+ end
+
+ cattr_accessor :all_loaded_fixtures
+ self.all_loaded_fixtures = {}
+
+ class ClassCache
+ def initialize(class_names, config)
+ @class_names = class_names.stringify_keys
+ @config = config
+
+ # Remove string values that aren't constants or subclasses of AR
+ @class_names.delete_if { |klass_name, klass| !insert_class(@class_names, klass_name, klass) }
+ end
+
+ def [](fs_name)
+ @class_names.fetch(fs_name) {
+ klass = default_fixture_model(fs_name, @config).safe_constantize
+ insert_class(@class_names, fs_name, klass)
+ }
+ end
+
+ private
+
+ def insert_class(class_names, name, klass)
+ # We only want to deal with AR objects.
+ if klass && klass < ActiveRecord::Base
+ class_names[name] = klass
+ else
+ class_names[name] = nil
+ end
+ end
+
+ def default_fixture_model(fs_name, config)
+ ActiveRecord::FixtureSet.default_fixture_model_name(fs_name, config)
+ end
+ end
+
+ def self.create_fixtures(fixtures_directory, fixture_set_names, class_names = {}, config = ActiveRecord::Base)
+ fixture_set_names = Array(fixture_set_names).map(&:to_s)
+ class_names = ClassCache.new class_names, config
+
+ # FIXME: Apparently JK uses this.
+ connection = block_given? ? yield : ActiveRecord::Base.connection
+
+ files_to_read = fixture_set_names.reject { |fs_name|
+ fixture_is_cached?(connection, fs_name)
+ }
+
+ unless files_to_read.empty?
+ connection.disable_referential_integrity do
+ fixtures_map = {}
+
+ fixture_sets = files_to_read.map do |fs_name|
+ klass = class_names[fs_name]
+ conn = klass ? klass.connection : connection
+ fixtures_map[fs_name] = new( # ActiveRecord::FixtureSet.new
+ conn,
+ fs_name,
+ klass,
+ ::File.join(fixtures_directory, fs_name))
+ end
+
+ all_loaded_fixtures.update(fixtures_map)
+
+ connection.transaction(:requires_new => true) do
+ fixture_sets.each do |fs|
+ conn = fs.model_class.respond_to?(:connection) ? fs.model_class.connection : connection
+ table_rows = fs.table_rows
+
+ table_rows.keys.each do |table|
+ conn.delete "DELETE FROM #{conn.quote_table_name(table)}", 'Fixture Delete'
+ end
+
+ table_rows.each do |fixture_set_name, rows|
+ rows.each do |row|
+ conn.insert_fixture(row, fixture_set_name)
+ end
+ end
+ end
+
+ # Cap primary key sequences to max(pk).
+ if connection.respond_to?(:reset_pk_sequence!)
+ fixture_sets.each do |fs|
+ connection.reset_pk_sequence!(fs.table_name)
+ end
+ end
+ end
+
+ cache_fixtures(connection, fixtures_map)
+ end
+ end
+ cached_fixtures(connection, fixture_set_names)
+ end
+
+ # Returns a consistent, platform-independent identifier for +label+.
+ # Integer identifiers are values less than 2^30. UUIDs are RFC 4122 version 5 SHA-1 hashes.
+ def self.identify(label, column_type = :integer)
+ if column_type == :uuid
+ Digest::UUID.uuid_v5(Digest::UUID::OID_NAMESPACE, label.to_s)
+ else
+ Zlib.crc32(label.to_s) % MAX_ID
+ end
+ end
+
+ # Superclass for the evaluation contexts used by ERB fixtures.
+ def self.context_class
+ @context_class ||= Class.new
+ end
+
+ attr_reader :table_name, :name, :fixtures, :model_class, :config
+
+ def initialize(connection, name, class_name, path, config = ActiveRecord::Base)
+ @name = name
+ @path = path
+ @config = config
+ @model_class = nil
+
+ if class_name.is_a?(Class) # TODO: Should be an AR::Base type class, or any?
+ @model_class = class_name
+ else
+ @model_class = class_name.safe_constantize if class_name
+ end
+
+ @connection = connection
+
+ @table_name = ( model_class.respond_to?(:table_name) ?
+ model_class.table_name :
+ self.class.default_fixture_table_name(name, config) )
+
+ @fixtures = read_fixture_files path, @model_class
+ end
+
+ def [](x)
+ fixtures[x]
+ end
+
+ def []=(k,v)
+ fixtures[k] = v
+ end
+
+ def each(&block)
+ fixtures.each(&block)
+ end
+
+ def size
+ fixtures.size
+ end
+
+ # Returns a hash of rows to be inserted. The key is the table, the value is
+ # a list of rows to insert to that table.
+ def table_rows
+ now = config.default_timezone == :utc ? Time.now.utc : Time.now
+ now = now.to_s(:db)
+
+ # allow a standard key to be used for doing defaults in YAML
+ fixtures.delete('DEFAULTS')
+
+ # track any join tables we need to insert later
+ rows = Hash.new { |h,table| h[table] = [] }
+
+ rows[table_name] = fixtures.map do |label, fixture|
+ row = fixture.to_hash
+
+ if model_class
+ # fill in timestamp columns if they aren't specified and the model is set to record_timestamps
+ if model_class.record_timestamps
+ timestamp_column_names.each do |c_name|
+ row[c_name] = now unless row.key?(c_name)
+ end
+ end
+
+ # interpolate the fixture label
+ row.each do |key, value|
+ row[key] = value.gsub("$LABEL", label) if value.is_a?(String)
+ end
+
+ # generate a primary key if necessary
+ if has_primary_key_column? && !row.include?(primary_key_name)
+ row[primary_key_name] = ActiveRecord::FixtureSet.identify(label, primary_key_type)
+ end
+
+ # If STI is used, find the correct subclass for association reflection
+ reflection_class =
+ if row.include?(inheritance_column_name)
+ row[inheritance_column_name].constantize rescue model_class
+ else
+ model_class
+ end
+
+ reflection_class._reflections.values.each do |association|
+ case association.macro
+ when :belongs_to
+ # Do not replace association name with association foreign key if they are named the same
+ fk_name = (association.options[:foreign_key] || "#{association.name}_id").to_s
+
+ if association.name.to_s != fk_name && value = row.delete(association.name.to_s)
+ if association.polymorphic? && value.sub!(/\s*\(([^\)]*)\)\s*$/, "")
+ # support polymorphic belongs_to as "label (Type)"
+ row[association.foreign_type] = $1
+ end
+
+ fk_type = association.active_record.columns_hash[association.foreign_key].type
+ row[fk_name] = ActiveRecord::FixtureSet.identify(value, fk_type)
+ end
+ when :has_many
+ if association.options[:through]
+ add_join_records(rows, row, HasManyThroughProxy.new(association))
+ end
+ end
+ end
+ end
+
+ row
+ end
+ rows
+ end
+
+ class ReflectionProxy # :nodoc:
+ def initialize(association)
+ @association = association
+ end
+
+ def join_table
+ @association.join_table
+ end
+
+ def name
+ @association.name
+ end
+
+ def primary_key_type
+ @association.klass.column_types[@association.klass.primary_key].type
+ end
+ end
+
+ class HasManyThroughProxy < ReflectionProxy # :nodoc:
+ def rhs_key
+ @association.foreign_key
+ end
+
+ def lhs_key
+ @association.through_reflection.foreign_key
+ end
+ end
+
+ private
+ def primary_key_name
+ @primary_key_name ||= model_class && model_class.primary_key
+ end
+
+ def primary_key_type
+ @primary_key_type ||= model_class && model_class.column_types[model_class.primary_key].type
+ end
+
+ def add_join_records(rows, row, association)
+ # This is the case when the join table has no fixtures file
+ if (targets = row.delete(association.name.to_s))
+ table_name = association.join_table
+ column_type = association.primary_key_type
+ lhs_key = association.lhs_key
+ rhs_key = association.rhs_key
+
+ targets = targets.is_a?(Array) ? targets : targets.split(/\s*,\s*/)
+ rows[table_name].concat targets.map { |target|
+ { lhs_key => row[primary_key_name],
+ rhs_key => ActiveRecord::FixtureSet.identify(target, column_type) }
+ }
+ end
+ end
+
+ def has_primary_key_column?
+ @has_primary_key_column ||= primary_key_name &&
+ model_class.columns.any? { |c| c.name == primary_key_name }
+ end
+
+ def timestamp_column_names
+ @timestamp_column_names ||=
+ %w(created_at created_on updated_at updated_on) & column_names
+ end
+
+ def inheritance_column_name
+ @inheritance_column_name ||= model_class && model_class.inheritance_column
+ end
+
+ def column_names
+ @column_names ||= @connection.columns(@table_name).collect { |c| c.name }
+ end
+
+ def read_fixture_files(path, model_class)
+ yaml_files = Dir["#{path}/{**,*}/*.yml"].select { |f|
+ ::File.file?(f)
+ } + [yaml_file_path(path)]
+
+ yaml_files.each_with_object({}) do |file, fixtures|
+ FixtureSet::File.open(file) do |fh|
+ fh.each do |fixture_name, row|
+ fixtures[fixture_name] = ActiveRecord::Fixture.new(row, model_class)
+ end
+ end
+ end
+ end
+
+ def yaml_file_path(path)
+ "#{path}.yml"
+ end
+
+ end
+
+ #--
+ # Deprecate 'Fixtures' in favor of 'FixtureSet'.
+ #++
+ # :nodoc:
+ Fixtures = ActiveSupport::Deprecation::DeprecatedConstantProxy.new('ActiveRecord::Fixtures', 'ActiveRecord::FixtureSet')
+
+ class Fixture #:nodoc:
+ include Enumerable
+
+ class FixtureError < StandardError #:nodoc:
+ end
+
+ class FormatError < FixtureError #:nodoc:
+ end
+
+ attr_reader :model_class, :fixture
+
+ def initialize(fixture, model_class)
+ @fixture = fixture
+ @model_class = model_class
+ end
+
+ def class_name
+ model_class.name if model_class
+ end
+
+ def each
+ fixture.each { |item| yield item }
+ end
+
+ def [](key)
+ fixture[key]
+ end
+
+ alias :to_hash :fixture
+
+ def find
+ if model_class
+ model_class.find(fixture[model_class.primary_key])
+ else
+ raise FixtureClassNotFound, "No class attached to find."
+ end
+ end
+ end
+end
+
+module ActiveRecord
+ module TestFixtures
+ extend ActiveSupport::Concern
+
+ def before_setup
+ setup_fixtures
+ super
+ end
+
+ def after_teardown
+ super
+ teardown_fixtures
+ end
+
+ included do
+ class_attribute :fixture_path, :instance_writer => false
+ class_attribute :fixture_table_names
+ class_attribute :fixture_class_names
+ class_attribute :use_transactional_fixtures
+ class_attribute :use_instantiated_fixtures # true, false, or :no_instances
+ class_attribute :pre_loaded_fixtures
+ class_attribute :config
+
+ self.fixture_table_names = []
+ self.use_transactional_fixtures = true
+ self.use_instantiated_fixtures = false
+ self.pre_loaded_fixtures = false
+ self.config = ActiveRecord::Base
+
+ self.fixture_class_names = Hash.new do |h, fixture_set_name|
+ h[fixture_set_name] = ActiveRecord::FixtureSet.default_fixture_model_name(fixture_set_name, self.config)
+ end
+ end
+
+ module ClassMethods
+ # Sets the model class for a fixture when the class name cannot be inferred from the fixture name.
+ #
+ # Examples:
+ #
+ # set_fixture_class some_fixture: SomeModel,
+ # 'namespaced/fixture' => Another::Model
+ #
+ # The keys must be the fixture names, that coincide with the short paths to the fixture files.
+ def set_fixture_class(class_names = {})
+ self.fixture_class_names = self.fixture_class_names.merge(class_names.stringify_keys)
+ end
+
+ def fixtures(*fixture_set_names)
+ if fixture_set_names.first == :all
+ fixture_set_names = Dir["#{fixture_path}/{**,*}/*.{yml}"]
+ fixture_set_names.map! { |f| f[(fixture_path.to_s.size + 1)..-5] }
+ else
+ fixture_set_names = fixture_set_names.flatten.map { |n| n.to_s }
+ end
+
+ self.fixture_table_names |= fixture_set_names
+ require_fixture_classes(fixture_set_names, self.config)
+ setup_fixture_accessors(fixture_set_names)
+ end
+
+ def try_to_load_dependency(file_name)
+ require_dependency file_name
+ rescue LoadError => e
+ # Let's hope the developer has included it
+ # Let's warn in case this is a subdependency, otherwise
+ # subdependency error messages are totally cryptic
+ if ActiveRecord::Base.logger
+ ActiveRecord::Base.logger.warn("Unable to load #{file_name}, underlying cause #{e.message} \n\n #{e.backtrace.join("\n")}")
+ end
+ end
+
+ def require_fixture_classes(fixture_set_names = nil, config = ActiveRecord::Base)
+ if fixture_set_names
+ fixture_set_names = fixture_set_names.map { |n| n.to_s }
+ else
+ fixture_set_names = fixture_table_names
+ end
+
+ fixture_set_names.each do |file_name|
+ file_name = file_name.singularize if config.pluralize_table_names
+ try_to_load_dependency(file_name)
+ end
+ end
+
+ def setup_fixture_accessors(fixture_set_names = nil)
+ fixture_set_names = Array(fixture_set_names || fixture_table_names)
+ methods = Module.new do
+ fixture_set_names.each do |fs_name|
+ fs_name = fs_name.to_s
+ accessor_name = fs_name.tr('/', '_').to_sym
+
+ define_method(accessor_name) do |*fixture_names|
+ force_reload = fixture_names.pop if fixture_names.last == true || fixture_names.last == :reload
+
+ @fixture_cache[fs_name] ||= {}
+
+ instances = fixture_names.map do |f_name|
+ f_name = f_name.to_s
+ @fixture_cache[fs_name].delete(f_name) if force_reload
+
+ if @loaded_fixtures[fs_name][f_name]
+ @fixture_cache[fs_name][f_name] ||= @loaded_fixtures[fs_name][f_name].find
+ else
+ raise StandardError, "No fixture named '#{f_name}' found for fixture set '#{fs_name}'"
+ end
+ end
+
+ instances.size == 1 ? instances.first : instances
+ end
+ private accessor_name
+ end
+ end
+ include methods
+ end
+
+ def uses_transaction(*methods)
+ @uses_transaction = [] unless defined?(@uses_transaction)
+ @uses_transaction.concat methods.map { |m| m.to_s }
+ end
+
+ def uses_transaction?(method)
+ @uses_transaction = [] unless defined?(@uses_transaction)
+ @uses_transaction.include?(method.to_s)
+ end
+ end
+
+ def run_in_transaction?
+ use_transactional_fixtures &&
+ !self.class.uses_transaction?(method_name)
+ end
+
+ def setup_fixtures(config = ActiveRecord::Base)
+ if pre_loaded_fixtures && !use_transactional_fixtures
+ raise RuntimeError, 'pre_loaded_fixtures requires use_transactional_fixtures'
+ end
+
+ @fixture_cache = {}
+ @fixture_connections = []
+ @@already_loaded_fixtures ||= {}
+
+ # Load fixtures once and begin transaction.
+ if run_in_transaction?
+ if @@already_loaded_fixtures[self.class]
+ @loaded_fixtures = @@already_loaded_fixtures[self.class]
+ else
+ @loaded_fixtures = load_fixtures(config)
+ @@already_loaded_fixtures[self.class] = @loaded_fixtures
+ end
+ @fixture_connections = enlist_fixture_connections
+ @fixture_connections.each do |connection|
+ connection.begin_transaction joinable: false
+ end
+ # Load fixtures for every test.
+ else
+ ActiveRecord::FixtureSet.reset_cache
+ @@already_loaded_fixtures[self.class] = nil
+ @loaded_fixtures = load_fixtures(config)
+ end
+
+ # Instantiate fixtures for every test if requested.
+ instantiate_fixtures(config) if use_instantiated_fixtures
+ end
+
+ def teardown_fixtures
+ # Rollback changes if a transaction is active.
+ if run_in_transaction?
+ @fixture_connections.each do |connection|
+ connection.rollback_transaction if connection.transaction_open?
+ end
+ @fixture_connections.clear
+ else
+ ActiveRecord::FixtureSet.reset_cache
+ end
+
+ ActiveRecord::Base.clear_active_connections!
+ end
+
+ def enlist_fixture_connections
+ ActiveRecord::Base.connection_handler.connection_pool_list.map(&:connection)
+ end
+
+ private
+ def load_fixtures(config)
+ fixtures = ActiveRecord::FixtureSet.create_fixtures(fixture_path, fixture_table_names, fixture_class_names, config)
+ Hash[fixtures.map { |f| [f.name, f] }]
+ end
+
+ # for pre_loaded_fixtures, only require the classes once. huge speed improvement
+ @@required_fixture_classes = false
+
+ def instantiate_fixtures(config)
+ if pre_loaded_fixtures
+ raise RuntimeError, 'Load fixtures before instantiating them.' if ActiveRecord::FixtureSet.all_loaded_fixtures.empty?
+ unless @@required_fixture_classes
+ self.class.require_fixture_classes ActiveRecord::FixtureSet.all_loaded_fixtures.keys, config
+ @@required_fixture_classes = true
+ end
+ ActiveRecord::FixtureSet.instantiate_all_loaded_fixtures(self, load_instances?)
+ else
+ raise RuntimeError, 'Load fixtures before instantiating them.' if @loaded_fixtures.nil?
+ @loaded_fixtures.each_value do |fixture_set|
+ ActiveRecord::FixtureSet.instantiate_fixtures(self, fixture_set, load_instances?)
+ end
+ end
+ end
+
+ def load_instances?
+ use_instantiated_fixtures != :no_instances
+ end
+ end
+end
+
+class ActiveRecord::FixtureSet::RenderContext # :nodoc:
+ def self.create_subclass
+ Class.new ActiveRecord::FixtureSet.context_class do
+ def get_binding
+ binding()
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/gem_version.rb b/activerecord/lib/active_record/gem_version.rb
new file mode 100644
index 0000000000..4a7aace460
--- /dev/null
+++ b/activerecord/lib/active_record/gem_version.rb
@@ -0,0 +1,15 @@
+module ActiveRecord
+ # Returns the version of the currently loaded ActiveRecord as a <tt>Gem::Version</tt>
+ def self.gem_version
+ Gem::Version.new VERSION::STRING
+ end
+
+ module VERSION
+ MAJOR = 4
+ MINOR = 2
+ TINY = 0
+ PRE = "alpha"
+
+ STRING = [MAJOR, MINOR, TINY, PRE].compact.join(".")
+ end
+end
diff --git a/activerecord/lib/active_record/inheritance.rb b/activerecord/lib/active_record/inheritance.rb
new file mode 100644
index 0000000000..251d682a02
--- /dev/null
+++ b/activerecord/lib/active_record/inheritance.rb
@@ -0,0 +1,247 @@
+require 'active_support/core_ext/hash/indifferent_access'
+
+module ActiveRecord
+ # == Single table inheritance
+ #
+ # Active Record allows inheritance by storing the name of the class in a column that by
+ # default is named "type" (can be changed by overwriting <tt>Base.inheritance_column</tt>).
+ # This means that an inheritance looking like this:
+ #
+ # class Company < ActiveRecord::Base; end
+ # class Firm < Company; end
+ # class Client < Company; end
+ # class PriorityClient < Client; end
+ #
+ # When you do <tt>Firm.create(name: "37signals")</tt>, this record will be saved in
+ # the companies table with type = "Firm". You can then fetch this row again using
+ # <tt>Company.where(name: '37signals').first</tt> and it will return a Firm object.
+ #
+ # Be aware that because the type column is an attribute on the record every new
+ # subclass will instantly be marked as dirty and the type column will be included
+ # in the list of changed attributes on the record. This is different from non
+ # STI classes:
+ #
+ # Company.new.changed? # => false
+ # Firm.new.changed? # => true
+ # Firm.new.changes # => {"type"=>["","Firm"]}
+ #
+ # If you don't have a type column defined in your table, single-table inheritance won't
+ # be triggered. In that case, it'll work just like normal subclasses with no special magic
+ # for differentiating between them or reloading the right type with find.
+ #
+ # Note, all the attributes for all the cases are kept in the same table. Read more:
+ # http://www.martinfowler.com/eaaCatalog/singleTableInheritance.html
+ #
+ module Inheritance
+ extend ActiveSupport::Concern
+
+ included do
+ # Determines whether to store the full constant name including namespace when using STI.
+ class_attribute :store_full_sti_class, instance_writer: false
+ self.store_full_sti_class = true
+ end
+
+ module ClassMethods
+ # Determines if one of the attributes passed in is the inheritance column,
+ # and if the inheritance column is attr accessible, it initializes an
+ # instance of the given subclass instead of the base class.
+ def new(*args, &block)
+ if abstract_class? || self == Base
+ raise NotImplementedError, "#{self} is an abstract class and cannot be instantiated."
+ end
+
+ attrs = args.first
+ if subclass_from_attributes?(attrs)
+ subclass = subclass_from_attributes(attrs)
+ end
+
+ if subclass
+ subclass.new(*args, &block)
+ else
+ super
+ end
+ end
+
+ # Returns +true+ if this does not need STI type condition. Returns
+ # +false+ if STI type condition needs to be applied.
+ def descends_from_active_record?
+ if self == Base
+ false
+ elsif superclass.abstract_class?
+ superclass.descends_from_active_record?
+ else
+ superclass == Base || !columns_hash.include?(inheritance_column)
+ end
+ end
+
+ def finder_needs_type_condition? #:nodoc:
+ # This is like this because benchmarking justifies the strange :false stuff
+ :true == (@finder_needs_type_condition ||= descends_from_active_record? ? :false : :true)
+ end
+
+ def symbolized_base_class
+ ActiveSupport::Deprecation.warn("ActiveRecord::Base.symbolized_base_class is deprecated and will be removed without replacement.")
+ @symbolized_base_class ||= base_class.to_s.to_sym
+ end
+
+ def symbolized_sti_name
+ ActiveSupport::Deprecation.warn("ActiveRecord::Base.symbolized_sti_name is deprecated and will be removed without replacement.")
+ @symbolized_sti_name ||= sti_name.present? ? sti_name.to_sym : symbolized_base_class
+ end
+
+ # Returns the class descending directly from ActiveRecord::Base, or
+ # an abstract class, if any, in the inheritance hierarchy.
+ #
+ # If A extends AR::Base, A.base_class will return A. If B descends from A
+ # through some arbitrarily deep hierarchy, B.base_class will return A.
+ #
+ # If B < A and C < B and if A is an abstract_class then both B.base_class
+ # and C.base_class would return B as the answer since A is an abstract_class.
+ def base_class
+ unless self < Base
+ raise ActiveRecordError, "#{name} doesn't belong in a hierarchy descending from ActiveRecord"
+ end
+
+ if superclass == Base || superclass.abstract_class?
+ self
+ else
+ superclass.base_class
+ end
+ end
+
+ # Set this to true if this is an abstract class (see <tt>abstract_class?</tt>).
+ # If you are using inheritance with ActiveRecord and don't want child classes
+ # to utilize the implied STI table name of the parent class, this will need to be true.
+ # For example, given the following:
+ #
+ # class SuperClass < ActiveRecord::Base
+ # self.abstract_class = true
+ # end
+ # class Child < SuperClass
+ # self.table_name = 'the_table_i_really_want'
+ # end
+ #
+ #
+ # <tt>self.abstract_class = true</tt> is required to make <tt>Child<.find,.create, or any Arel method></tt> use <tt>the_table_i_really_want</tt> instead of a table called <tt>super_classes</tt>
+ #
+ attr_accessor :abstract_class
+
+ # Returns whether this class is an abstract class or not.
+ def abstract_class?
+ defined?(@abstract_class) && @abstract_class == true
+ end
+
+ def sti_name
+ store_full_sti_class ? name : name.demodulize
+ end
+
+ protected
+
+ # Returns the class type of the record using the current module as a prefix. So descendants of
+ # MyApp::Business::Account would appear as MyApp::Business::AccountSubclass.
+ def compute_type(type_name)
+ if type_name.match(/^::/)
+ # If the type is prefixed with a scope operator then we assume that
+ # the type_name is an absolute reference.
+ ActiveSupport::Dependencies.constantize(type_name)
+ else
+ # Build a list of candidates to search for
+ candidates = []
+ name.scan(/::|$/) { candidates.unshift "#{$`}::#{type_name}" }
+ candidates << type_name
+
+ candidates.each do |candidate|
+ constant = ActiveSupport::Dependencies.safe_constantize(candidate)
+ return constant if candidate == constant.to_s
+ end
+
+ raise NameError.new("uninitialized constant #{candidates.first}", candidates.first)
+ end
+ end
+
+ private
+
+ # Called by +instantiate+ to decide which class to use for a new
+ # record instance. For single-table inheritance, we check the record
+ # for a +type+ column and return the corresponding class.
+ def discriminate_class_for_record(record)
+ if using_single_table_inheritance?(record)
+ find_sti_class(record[inheritance_column])
+ else
+ super
+ end
+ end
+
+ def using_single_table_inheritance?(record)
+ record[inheritance_column].present? && columns_hash.include?(inheritance_column)
+ end
+
+ def find_sti_class(type_name)
+ if store_full_sti_class
+ ActiveSupport::Dependencies.constantize(type_name)
+ else
+ compute_type(type_name)
+ end
+ rescue NameError
+ raise SubclassNotFound,
+ "The single-table inheritance mechanism failed to locate the subclass: '#{type_name}'. " +
+ "This error is raised because the column '#{inheritance_column}' is reserved for storing the class in case of inheritance. " +
+ "Please rename this column if you didn't intend it to be used for storing the inheritance class " +
+ "or overwrite #{name}.inheritance_column to use another column for that information."
+ end
+
+ def type_condition(table = arel_table)
+ sti_column = table[inheritance_column]
+ sti_names = ([self] + descendants).map { |model| model.sti_name }
+
+ sti_column.in(sti_names)
+ end
+
+ # Detect the subclass from the inheritance column of attrs. If the inheritance column value
+ # is not self or a valid subclass, raises ActiveRecord::SubclassNotFound
+ # If this is a StrongParameters hash, and access to inheritance_column is not permitted,
+ # this will ignore the inheritance column and return nil
+ def subclass_from_attributes?(attrs)
+ columns_hash.include?(inheritance_column) && attrs.is_a?(Hash)
+ end
+
+ def subclass_from_attributes(attrs)
+ subclass_name = attrs.with_indifferent_access[inheritance_column]
+
+ if subclass_name.present? && subclass_name != self.name
+ subclass = subclass_name.safe_constantize
+
+ unless descendants.include?(subclass)
+ raise ActiveRecord::SubclassNotFound.new("Invalid single-table inheritance type: #{subclass_name} is not a subclass of #{name}")
+ end
+
+ subclass
+ end
+ end
+ end
+
+ def initialize_dup(other)
+ super
+ ensure_proper_type
+ end
+
+ private
+
+ def initialize_internals_callback
+ super
+ ensure_proper_type
+ end
+
+ # Sets the attribute used for single table inheritance to this class name if this is not the
+ # ActiveRecord::Base descendant.
+ # Considering the hierarchy Reply < Message < ActiveRecord::Base, this makes it possible to
+ # do Reply.new without having to set <tt>Reply[Reply.inheritance_column] = "Reply"</tt> yourself.
+ # No such attribute would be set for objects of the Message class in that example.
+ def ensure_proper_type
+ klass = self.class
+ if klass.finder_needs_type_condition?
+ write_attribute(klass.inheritance_column, klass.sti_name)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/integration.rb b/activerecord/lib/active_record/integration.rb
new file mode 100644
index 0000000000..31e2518540
--- /dev/null
+++ b/activerecord/lib/active_record/integration.rb
@@ -0,0 +1,113 @@
+require 'active_support/core_ext/string/filters'
+
+module ActiveRecord
+ module Integration
+ extend ActiveSupport::Concern
+
+ included do
+ ##
+ # :singleton-method:
+ # Indicates the format used to generate the timestamp in the cache key.
+ # Accepts any of the symbols in <tt>Time::DATE_FORMATS</tt>.
+ #
+ # This is +:nsec+, by default.
+ class_attribute :cache_timestamp_format, :instance_writer => false
+ self.cache_timestamp_format = :nsec
+ end
+
+ # Returns a String, which Action Pack uses for constructing an URL to this
+ # object. The default implementation returns this record's id as a String,
+ # or nil if this record's unsaved.
+ #
+ # For example, suppose that you have a User model, and that you have a
+ # <tt>resources :users</tt> route. Normally, +user_path+ will
+ # construct a path with the user object's 'id' in it:
+ #
+ # user = User.find_by(name: 'Phusion')
+ # user_path(user) # => "/users/1"
+ #
+ # You can override +to_param+ in your model to make +user_path+ construct
+ # a path using the user's name instead of the user's id:
+ #
+ # class User < ActiveRecord::Base
+ # def to_param # overridden
+ # name
+ # end
+ # end
+ #
+ # user = User.find_by(name: 'Phusion')
+ # user_path(user) # => "/users/Phusion"
+ def to_param
+ # We can't use alias_method here, because method 'id' optimizes itself on the fly.
+ id && id.to_s # Be sure to stringify the id for routes
+ end
+
+ # Returns a cache key that can be used to identify this record.
+ #
+ # Product.new.cache_key # => "products/new"
+ # Product.find(5).cache_key # => "products/5" (updated_at not available)
+ # Person.find(5).cache_key # => "people/5-20071224150000" (updated_at available)
+ #
+ # You can also pass a list of named timestamps, and the newest in the list will be
+ # used to generate the key:
+ #
+ # Person.find(5).cache_key(:updated_at, :last_reviewed_at)
+ def cache_key(*timestamp_names)
+ case
+ when new_record?
+ "#{self.class.model_name.cache_key}/new"
+ when timestamp_names.any?
+ timestamp = max_updated_column_timestamp(timestamp_names)
+ timestamp = timestamp.utc.to_s(cache_timestamp_format)
+ "#{self.class.model_name.cache_key}/#{id}-#{timestamp}"
+ when timestamp = max_updated_column_timestamp
+ timestamp = timestamp.utc.to_s(cache_timestamp_format)
+ "#{self.class.model_name.cache_key}/#{id}-#{timestamp}"
+ else
+ "#{self.class.model_name.cache_key}/#{id}"
+ end
+ end
+
+ module ClassMethods
+ # Defines your model's +to_param+ method to generate "pretty" URLs
+ # using +method_name+, which can be any attribute or method that
+ # responds to +to_s+.
+ #
+ # class User < ActiveRecord::Base
+ # to_param :name
+ # end
+ #
+ # user = User.find_by(name: 'Fancy Pants')
+ # user.id # => 123
+ # user_path(user) # => "/users/123-fancy-pants"
+ #
+ # Values longer than 20 characters will be truncated. The value
+ # is truncated word by word.
+ #
+ # user = User.find_by(name: 'David HeinemeierHansson')
+ # user.id # => 125
+ # user_path(user) # => "/users/125-david"
+ #
+ # Because the generated param begins with the record's +id+, it is
+ # suitable for passing to +find+. In a controller, for example:
+ #
+ # params[:id] # => "123-fancy-pants"
+ # User.find(params[:id]).id # => 123
+ def to_param(method_name = nil)
+ if method_name.nil?
+ super()
+ else
+ define_method :to_param do
+ if (default = super()) &&
+ (result = send(method_name).to_s).present? &&
+ (param = result.squish.truncate(20, separator: /\s/, omission: nil).parameterize).present?
+ "#{default}-#{param}"
+ else
+ default
+ end
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/locale/en.yml b/activerecord/lib/active_record/locale/en.yml
new file mode 100644
index 0000000000..b1fbd38622
--- /dev/null
+++ b/activerecord/lib/active_record/locale/en.yml
@@ -0,0 +1,47 @@
+en:
+ # Attributes names common to most models
+ #attributes:
+ #created_at: "Created at"
+ #updated_at: "Updated at"
+
+ # Default error messages
+ errors:
+ messages:
+ taken: "has already been taken"
+
+ # Active Record models configuration
+ activerecord:
+ errors:
+ messages:
+ record_invalid: "Validation failed: %{errors}"
+ restrict_dependent_destroy:
+ one: "Cannot delete record because a dependent %{record} exists"
+ many: "Cannot delete record because dependent %{record} exist"
+ # Append your own errors here or at the model/attributes scope.
+
+ # You can define own errors for models or model attributes.
+ # The values :model, :attribute and :value are always available for interpolation.
+ #
+ # For example,
+ # models:
+ # user:
+ # blank: "This is a custom blank message for %{model}: %{attribute}"
+ # attributes:
+ # login:
+ # blank: "This is a custom blank message for User login"
+ # Will define custom blank validation message for User model and
+ # custom blank validation message for login attribute of User model.
+ #models:
+
+ # Translate model names. Used in Model.human_name().
+ #models:
+ # For example,
+ # user: "Dude"
+ # will translate User model name to "Dude"
+
+ # Translate model attribute names. Used in Model.human_attribute_name(attribute).
+ #attributes:
+ # For example,
+ # user:
+ # login: "Handle"
+ # will translate User attribute "login" as "Handle"
diff --git a/activerecord/lib/active_record/locking/optimistic.rb b/activerecord/lib/active_record/locking/optimistic.rb
new file mode 100644
index 0000000000..52eeb8ae1f
--- /dev/null
+++ b/activerecord/lib/active_record/locking/optimistic.rb
@@ -0,0 +1,204 @@
+module ActiveRecord
+ module Locking
+ # == What is Optimistic Locking
+ #
+ # Optimistic locking allows multiple users to access the same record for edits, and assumes a minimum of
+ # conflicts with the data. It does this by checking whether another process has made changes to a record since
+ # it was opened, an <tt>ActiveRecord::StaleObjectError</tt> exception is thrown if that has occurred
+ # and the update is ignored.
+ #
+ # Check out <tt>ActiveRecord::Locking::Pessimistic</tt> for an alternative.
+ #
+ # == Usage
+ #
+ # Active Records support optimistic locking if the field +lock_version+ is present. Each update to the
+ # record increments the +lock_version+ column and the locking facilities ensure that records instantiated twice
+ # will let the last one saved raise a +StaleObjectError+ if the first was also updated. Example:
+ #
+ # p1 = Person.find(1)
+ # p2 = Person.find(1)
+ #
+ # p1.first_name = "Michael"
+ # p1.save
+ #
+ # p2.first_name = "should fail"
+ # p2.save # Raises a ActiveRecord::StaleObjectError
+ #
+ # Optimistic locking will also check for stale data when objects are destroyed. Example:
+ #
+ # p1 = Person.find(1)
+ # p2 = Person.find(1)
+ #
+ # p1.first_name = "Michael"
+ # p1.save
+ #
+ # p2.destroy # Raises a ActiveRecord::StaleObjectError
+ #
+ # You're then responsible for dealing with the conflict by rescuing the exception and either rolling back, merging,
+ # or otherwise apply the business logic needed to resolve the conflict.
+ #
+ # This locking mechanism will function inside a single Ruby process. To make it work across all
+ # web requests, the recommended approach is to add +lock_version+ as a hidden field to your form.
+ #
+ # This behavior can be turned off by setting <tt>ActiveRecord::Base.lock_optimistically = false</tt>.
+ # To override the name of the +lock_version+ column, set the <tt>locking_column</tt> class attribute:
+ #
+ # class Person < ActiveRecord::Base
+ # self.locking_column = :lock_person
+ # end
+ #
+ module Optimistic
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :lock_optimistically, instance_writer: false
+ self.lock_optimistically = true
+ end
+
+ def locking_enabled? #:nodoc:
+ self.class.locking_enabled?
+ end
+
+ private
+ def increment_lock
+ lock_col = self.class.locking_column
+ previous_lock_value = send(lock_col).to_i
+ send(lock_col + '=', previous_lock_value + 1)
+ end
+
+ def _update_record(attribute_names = self.attribute_names) #:nodoc:
+ return super unless locking_enabled?
+ return 0 if attribute_names.empty?
+
+ lock_col = self.class.locking_column
+ previous_lock_value = send(lock_col).to_i
+ increment_lock
+
+ attribute_names += [lock_col]
+ attribute_names.uniq!
+
+ begin
+ relation = self.class.unscoped
+
+ stmt = relation.where(
+ relation.table[self.class.primary_key].eq(id).and(
+ relation.table[lock_col].eq(self.class.quote_value(previous_lock_value, column_for_attribute(lock_col)))
+ )
+ ).arel.compile_update(
+ arel_attributes_with_values_for_update(attribute_names),
+ self.class.primary_key
+ )
+
+ affected_rows = self.class.connection.update stmt
+
+ unless affected_rows == 1
+ raise ActiveRecord::StaleObjectError.new(self, "update")
+ end
+
+ affected_rows
+
+ # If something went wrong, revert the version.
+ rescue Exception
+ send(lock_col + '=', previous_lock_value)
+ raise
+ end
+ end
+
+ def destroy_row
+ affected_rows = super
+
+ if locking_enabled? && affected_rows != 1
+ raise ActiveRecord::StaleObjectError.new(self, "destroy")
+ end
+
+ affected_rows
+ end
+
+ def relation_for_destroy
+ relation = super
+
+ if locking_enabled?
+ column_name = self.class.locking_column
+ column = self.class.columns_hash[column_name]
+ substitute = self.class.connection.substitute_at(column, relation.bind_values.length)
+
+ relation = relation.where(self.class.arel_table[column_name].eq(substitute))
+ relation.bind_values << [column, self[column_name].to_i]
+ end
+
+ relation
+ end
+
+ module ClassMethods
+ DEFAULT_LOCKING_COLUMN = 'lock_version'
+
+ # Returns true if the +lock_optimistically+ flag is set to true
+ # (which it is, by default) and the table includes the
+ # +locking_column+ column (defaults to +lock_version+).
+ def locking_enabled?
+ lock_optimistically && columns_hash[locking_column]
+ end
+
+ # Set the column to use for optimistic locking. Defaults to +lock_version+.
+ def locking_column=(value)
+ clear_caches_calculated_from_columns
+ @locking_column = value.to_s
+ end
+
+ # The version column used for optimistic locking. Defaults to +lock_version+.
+ def locking_column
+ reset_locking_column unless defined?(@locking_column)
+ @locking_column
+ end
+
+ # Reset the column used for optimistic locking back to the +lock_version+ default.
+ def reset_locking_column
+ self.locking_column = DEFAULT_LOCKING_COLUMN
+ end
+
+ # Make sure the lock version column gets updated when counters are
+ # updated.
+ def update_counters(id, counters)
+ counters = counters.merge(locking_column => 1) if locking_enabled?
+ super
+ end
+
+ private
+
+ # We need to apply this decorator here, rather than on module inclusion. The closure
+ # created by the matcher would otherwise evaluate for `ActiveRecord::Base`, not the
+ # sub class being decorated. As such, changes to `lock_optimistically`, or
+ # `locking_column` would not be picked up.
+ def inherited(subclass)
+ subclass.class_eval do
+ is_lock_column = ->(name, _) { lock_optimistically && name == locking_column }
+ decorate_matching_attribute_types(is_lock_column, :_optimistic_locking) do |type|
+ LockingType.new(type)
+ end
+ end
+ super
+ end
+ end
+ end
+
+ class LockingType < SimpleDelegator # :nodoc:
+ def type_cast_from_database(value)
+ # `nil` *should* be changed to 0
+ super.to_i
+ end
+
+ def changed?(old_value, *)
+ # Ensure we save if the default was `nil`
+ super || old_value == 0
+ end
+
+ def init_with(coder)
+ __setobj__(coder['subtype'])
+ end
+
+ def encode_with(coder)
+ coder['subtype'] = __getobj__
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/locking/pessimistic.rb b/activerecord/lib/active_record/locking/pessimistic.rb
new file mode 100644
index 0000000000..ff7102d35b
--- /dev/null
+++ b/activerecord/lib/active_record/locking/pessimistic.rb
@@ -0,0 +1,77 @@
+module ActiveRecord
+ module Locking
+ # Locking::Pessimistic provides support for row-level locking using
+ # SELECT ... FOR UPDATE and other lock types.
+ #
+ # Chain <tt>ActiveRecord::Base#find</tt> to <tt>ActiveRecord::QueryMethods#lock</tt> to obtain an exclusive
+ # lock on the selected rows:
+ # # select * from accounts where id=1 for update
+ # Account.lock.find(1)
+ #
+ # Call <tt>lock('some locking clause')</tt> to use a database-specific locking clause
+ # of your own such as 'LOCK IN SHARE MODE' or 'FOR UPDATE NOWAIT'. Example:
+ #
+ # Account.transaction do
+ # # select * from accounts where name = 'shugo' limit 1 for update
+ # shugo = Account.where("name = 'shugo'").lock(true).first
+ # yuko = Account.where("name = 'yuko'").lock(true).first
+ # shugo.balance -= 100
+ # shugo.save!
+ # yuko.balance += 100
+ # yuko.save!
+ # end
+ #
+ # You can also use <tt>ActiveRecord::Base#lock!</tt> method to lock one record by id.
+ # This may be better if you don't need to lock every row. Example:
+ #
+ # Account.transaction do
+ # # select * from accounts where ...
+ # accounts = Account.where(...)
+ # account1 = accounts.detect { |account| ... }
+ # account2 = accounts.detect { |account| ... }
+ # # select * from accounts where id=? for update
+ # account1.lock!
+ # account2.lock!
+ # account1.balance -= 100
+ # account1.save!
+ # account2.balance += 100
+ # account2.save!
+ # end
+ #
+ # You can start a transaction and acquire the lock in one go by calling
+ # <tt>with_lock</tt> with a block. The block is called from within
+ # a transaction, the object is already locked. Example:
+ #
+ # account = Account.first
+ # account.with_lock do
+ # # This block is called within a transaction,
+ # # account is already locked.
+ # account.balance -= 100
+ # account.save!
+ # end
+ #
+ # Database-specific information on row locking:
+ # MySQL: http://dev.mysql.com/doc/refman/5.1/en/innodb-locking-reads.html
+ # PostgreSQL: http://www.postgresql.org/docs/current/interactive/sql-select.html#SQL-FOR-UPDATE-SHARE
+ module Pessimistic
+ # Obtain a row lock on this record. Reloads the record to obtain the requested
+ # lock. Pass an SQL locking clause to append the end of the SELECT statement
+ # or pass true for "FOR UPDATE" (the default, an exclusive row lock). Returns
+ # the locked record.
+ def lock!(lock = true)
+ reload(:lock => lock) if persisted?
+ self
+ end
+
+ # Wraps the passed block in a transaction, locking the object
+ # before yielding. You can pass the SQL locking clause
+ # as argument (see <tt>lock!</tt>).
+ def with_lock(lock = true)
+ transaction do
+ lock!(lock)
+ yield
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/log_subscriber.rb b/activerecord/lib/active_record/log_subscriber.rb
new file mode 100644
index 0000000000..eb64d197f0
--- /dev/null
+++ b/activerecord/lib/active_record/log_subscriber.rb
@@ -0,0 +1,75 @@
+module ActiveRecord
+ class LogSubscriber < ActiveSupport::LogSubscriber
+ IGNORE_PAYLOAD_NAMES = ["SCHEMA", "EXPLAIN"]
+
+ def self.runtime=(value)
+ ActiveRecord::RuntimeRegistry.sql_runtime = value
+ end
+
+ def self.runtime
+ ActiveRecord::RuntimeRegistry.sql_runtime ||= 0
+ end
+
+ def self.reset_runtime
+ rt, self.runtime = runtime, 0
+ rt
+ end
+
+ def initialize
+ super
+ @odd = false
+ end
+
+ def render_bind(column, value)
+ if column
+ if column.binary?
+ # This specifically deals with the PG adapter that casts bytea columns into a Hash.
+ value = value[:value] if value.is_a?(Hash)
+ value = value ? "<#{value.bytesize} bytes of binary data>" : "<NULL binary data>"
+ end
+
+ [column.name, value]
+ else
+ [nil, value]
+ end
+ end
+
+ def sql(event)
+ self.class.runtime += event.duration
+ return unless logger.debug?
+
+ payload = event.payload
+
+ return if IGNORE_PAYLOAD_NAMES.include?(payload[:name])
+
+ name = "#{payload[:name]} (#{event.duration.round(1)}ms)"
+ sql = payload[:sql]
+ binds = nil
+
+ unless (payload[:binds] || []).empty?
+ binds = " " + payload[:binds].map { |col,v|
+ render_bind(col, v)
+ }.inspect
+ end
+
+ if odd?
+ name = color(name, CYAN, true)
+ sql = color(sql, nil, true)
+ else
+ name = color(name, MAGENTA, true)
+ end
+
+ debug " #{name} #{sql}#{binds}"
+ end
+
+ def odd?
+ @odd = !@odd
+ end
+
+ def logger
+ ActiveRecord::Base.logger
+ end
+ end
+end
+
+ActiveRecord::LogSubscriber.attach_to :active_record
diff --git a/activerecord/lib/active_record/migration.rb b/activerecord/lib/active_record/migration.rb
new file mode 100644
index 0000000000..a6847e28c2
--- /dev/null
+++ b/activerecord/lib/active_record/migration.rb
@@ -0,0 +1,1045 @@
+require "active_support/core_ext/module/attribute_accessors"
+require 'set'
+
+module ActiveRecord
+ class MigrationError < ActiveRecordError#:nodoc:
+ def initialize(message = nil)
+ message = "\n\n#{message}\n\n" if message
+ super
+ end
+ end
+
+ # Exception that can be raised to stop migrations from going backwards.
+ class IrreversibleMigration < MigrationError
+ end
+
+ class DuplicateMigrationVersionError < MigrationError#:nodoc:
+ def initialize(version)
+ super("Multiple migrations have the version number #{version}")
+ end
+ end
+
+ class DuplicateMigrationNameError < MigrationError#:nodoc:
+ def initialize(name)
+ super("Multiple migrations have the name #{name}")
+ end
+ end
+
+ class UnknownMigrationVersionError < MigrationError #:nodoc:
+ def initialize(version)
+ super("No migration with version number #{version}")
+ end
+ end
+
+ class IllegalMigrationNameError < MigrationError#:nodoc:
+ def initialize(name)
+ super("Illegal name for migration file: #{name}\n\t(only lower case letters, numbers, and '_' allowed)")
+ end
+ end
+
+ class PendingMigrationError < MigrationError#:nodoc:
+ def initialize
+ if defined?(Rails)
+ super("Migrations are pending. To resolve this issue, run:\n\n\tbin/rake db:migrate RAILS_ENV=#{::Rails.env}")
+ else
+ super("Migrations are pending. To resolve this issue, run:\n\n\tbin/rake db:migrate")
+ end
+ end
+ end
+
+ # = Active Record Migrations
+ #
+ # Migrations can manage the evolution of a schema used by several physical
+ # databases. It's a solution to the common problem of adding a field to make
+ # a new feature work in your local database, but being unsure of how to
+ # push that change to other developers and to the production server. With
+ # migrations, you can describe the transformations in self-contained classes
+ # that can be checked into version control systems and executed against
+ # another database that might be one, two, or five versions behind.
+ #
+ # Example of a simple migration:
+ #
+ # class AddSsl < ActiveRecord::Migration
+ # def up
+ # add_column :accounts, :ssl_enabled, :boolean, default: true
+ # end
+ #
+ # def down
+ # remove_column :accounts, :ssl_enabled
+ # end
+ # end
+ #
+ # This migration will add a boolean flag to the accounts table and remove it
+ # if you're backing out of the migration. It shows how all migrations have
+ # two methods +up+ and +down+ that describes the transformations
+ # required to implement or remove the migration. These methods can consist
+ # of both the migration specific methods like +add_column+ and +remove_column+,
+ # but may also contain regular Ruby code for generating data needed for the
+ # transformations.
+ #
+ # Example of a more complex migration that also needs to initialize data:
+ #
+ # class AddSystemSettings < ActiveRecord::Migration
+ # def up
+ # create_table :system_settings do |t|
+ # t.string :name
+ # t.string :label
+ # t.text :value
+ # t.string :type
+ # t.integer :position
+ # end
+ #
+ # SystemSetting.create name: 'notice',
+ # label: 'Use notice?',
+ # value: 1
+ # end
+ #
+ # def down
+ # drop_table :system_settings
+ # end
+ # end
+ #
+ # This migration first adds the +system_settings+ table, then creates the very
+ # first row in it using the Active Record model that relies on the table. It
+ # also uses the more advanced +create_table+ syntax where you can specify a
+ # complete table schema in one block call.
+ #
+ # == Available transformations
+ #
+ # * <tt>create_table(name, options)</tt>: Creates a table called +name+ and
+ # makes the table object available to a block that can then add columns to it,
+ # following the same format as +add_column+. See example above. The options hash
+ # is for fragments like "DEFAULT CHARSET=UTF-8" that are appended to the create
+ # table definition.
+ # * <tt>drop_table(name)</tt>: Drops the table called +name+.
+ # * <tt>change_table(name, options)</tt>: Allows to make column alterations to
+ # the table called +name+. It makes the table object available to a block that
+ # can then add/remove columns, indexes or foreign keys to it.
+ # * <tt>rename_table(old_name, new_name)</tt>: Renames the table called +old_name+
+ # to +new_name+.
+ # * <tt>add_column(table_name, column_name, type, options)</tt>: Adds a new column
+ # to the table called +table_name+
+ # named +column_name+ specified to be one of the following types:
+ # <tt>:string</tt>, <tt>:text</tt>, <tt>:integer</tt>, <tt>:float</tt>,
+ # <tt>:decimal</tt>, <tt>:datetime</tt>, <tt>:timestamp</tt>, <tt>:time</tt>,
+ # <tt>:date</tt>, <tt>:binary</tt>, <tt>:boolean</tt>. A default value can be
+ # specified by passing an +options+ hash like <tt>{ default: 11 }</tt>.
+ # Other options include <tt>:limit</tt> and <tt>:null</tt> (e.g.
+ # <tt>{ limit: 50, null: false }</tt>) -- see
+ # ActiveRecord::ConnectionAdapters::TableDefinition#column for details.
+ # * <tt>rename_column(table_name, column_name, new_column_name)</tt>: Renames
+ # a column but keeps the type and content.
+ # * <tt>change_column(table_name, column_name, type, options)</tt>: Changes
+ # the column to a different type using the same parameters as add_column.
+ # * <tt>remove_column(table_name, column_name, type, options)</tt>: Removes the column
+ # named +column_name+ from the table called +table_name+.
+ # * <tt>add_index(table_name, column_names, options)</tt>: Adds a new index
+ # with the name of the column. Other options include
+ # <tt>:name</tt>, <tt>:unique</tt> (e.g.
+ # <tt>{ name: 'users_name_index', unique: true }</tt>) and <tt>:order</tt>
+ # (e.g. <tt>{ order: { name: :desc } }</tt>).
+ # * <tt>remove_index(table_name, column: column_name)</tt>: Removes the index
+ # specified by +column_name+.
+ # * <tt>remove_index(table_name, name: index_name)</tt>: Removes the index
+ # specified by +index_name+.
+ #
+ # == Irreversible transformations
+ #
+ # Some transformations are destructive in a manner that cannot be reversed.
+ # Migrations of that kind should raise an <tt>ActiveRecord::IrreversibleMigration</tt>
+ # exception in their +down+ method.
+ #
+ # == Running migrations from within Rails
+ #
+ # The Rails package has several tools to help create and apply migrations.
+ #
+ # To generate a new migration, you can use
+ # rails generate migration MyNewMigration
+ #
+ # where MyNewMigration is the name of your migration. The generator will
+ # create an empty migration file <tt>timestamp_my_new_migration.rb</tt>
+ # in the <tt>db/migrate/</tt> directory where <tt>timestamp</tt> is the
+ # UTC formatted date and time that the migration was generated.
+ #
+ # You may then edit the <tt>up</tt> and <tt>down</tt> methods of
+ # MyNewMigration.
+ #
+ # There is a special syntactic shortcut to generate migrations that add fields to a table.
+ #
+ # rails generate migration add_fieldname_to_tablename fieldname:string
+ #
+ # This will generate the file <tt>timestamp_add_fieldname_to_tablename</tt>, which will look like this:
+ # class AddFieldnameToTablename < ActiveRecord::Migration
+ # def up
+ # add_column :tablenames, :fieldname, :string
+ # end
+ #
+ # def down
+ # remove_column :tablenames, :fieldname
+ # end
+ # end
+ #
+ # To run migrations against the currently configured database, use
+ # <tt>rake db:migrate</tt>. This will update the database by running all of the
+ # pending migrations, creating the <tt>schema_migrations</tt> table
+ # (see "About the schema_migrations table" section below) if missing. It will also
+ # invoke the db:schema:dump task, which will update your db/schema.rb file
+ # to match the structure of your database.
+ #
+ # To roll the database back to a previous migration version, use
+ # <tt>rake db:migrate VERSION=X</tt> where <tt>X</tt> is the version to which
+ # you wish to downgrade. If any of the migrations throw an
+ # <tt>ActiveRecord::IrreversibleMigration</tt> exception, that step will fail and you'll
+ # have some manual work to do.
+ #
+ # == Database support
+ #
+ # Migrations are currently supported in MySQL, PostgreSQL, SQLite,
+ # SQL Server, and Oracle (all supported databases except DB2).
+ #
+ # == More examples
+ #
+ # Not all migrations change the schema. Some just fix the data:
+ #
+ # class RemoveEmptyTags < ActiveRecord::Migration
+ # def up
+ # Tag.all.each { |tag| tag.destroy if tag.pages.empty? }
+ # end
+ #
+ # def down
+ # # not much we can do to restore deleted data
+ # raise ActiveRecord::IrreversibleMigration, "Can't recover the deleted tags"
+ # end
+ # end
+ #
+ # Others remove columns when they migrate up instead of down:
+ #
+ # class RemoveUnnecessaryItemAttributes < ActiveRecord::Migration
+ # def up
+ # remove_column :items, :incomplete_items_count
+ # remove_column :items, :completed_items_count
+ # end
+ #
+ # def down
+ # add_column :items, :incomplete_items_count
+ # add_column :items, :completed_items_count
+ # end
+ # end
+ #
+ # And sometimes you need to do something in SQL not abstracted directly by migrations:
+ #
+ # class MakeJoinUnique < ActiveRecord::Migration
+ # def up
+ # execute "ALTER TABLE `pages_linked_pages` ADD UNIQUE `page_id_linked_page_id` (`page_id`,`linked_page_id`)"
+ # end
+ #
+ # def down
+ # execute "ALTER TABLE `pages_linked_pages` DROP INDEX `page_id_linked_page_id`"
+ # end
+ # end
+ #
+ # == Using a model after changing its table
+ #
+ # Sometimes you'll want to add a column in a migration and populate it
+ # immediately after. In that case, you'll need to make a call to
+ # <tt>Base#reset_column_information</tt> in order to ensure that the model has the
+ # latest column data from after the new column was added. Example:
+ #
+ # class AddPeopleSalary < ActiveRecord::Migration
+ # def up
+ # add_column :people, :salary, :integer
+ # Person.reset_column_information
+ # Person.all.each do |p|
+ # p.update_attribute :salary, SalaryCalculator.compute(p)
+ # end
+ # end
+ # end
+ #
+ # == Controlling verbosity
+ #
+ # By default, migrations will describe the actions they are taking, writing
+ # them to the console as they happen, along with benchmarks describing how
+ # long each step took.
+ #
+ # You can quiet them down by setting ActiveRecord::Migration.verbose = false.
+ #
+ # You can also insert your own messages and benchmarks by using the +say_with_time+
+ # method:
+ #
+ # def up
+ # ...
+ # say_with_time "Updating salaries..." do
+ # Person.all.each do |p|
+ # p.update_attribute :salary, SalaryCalculator.compute(p)
+ # end
+ # end
+ # ...
+ # end
+ #
+ # The phrase "Updating salaries..." would then be printed, along with the
+ # benchmark for the block when the block completes.
+ #
+ # == About the schema_migrations table
+ #
+ # Rails versions 2.0 and prior used to create a table called
+ # <tt>schema_info</tt> when using migrations. This table contained the
+ # version of the schema as of the last applied migration.
+ #
+ # Starting with Rails 2.1, the <tt>schema_info</tt> table is
+ # (automatically) replaced by the <tt>schema_migrations</tt> table, which
+ # contains the version numbers of all the migrations applied.
+ #
+ # As a result, it is now possible to add migration files that are numbered
+ # lower than the current schema version: when migrating up, those
+ # never-applied "interleaved" migrations will be automatically applied, and
+ # when migrating down, never-applied "interleaved" migrations will be skipped.
+ #
+ # == Timestamped Migrations
+ #
+ # By default, Rails generates migrations that look like:
+ #
+ # 20080717013526_your_migration_name.rb
+ #
+ # The prefix is a generation timestamp (in UTC).
+ #
+ # If you'd prefer to use numeric prefixes, you can turn timestamped migrations
+ # off by setting:
+ #
+ # config.active_record.timestamped_migrations = false
+ #
+ # In application.rb.
+ #
+ # == Reversible Migrations
+ #
+ # Starting with Rails 3.1, you will be able to define reversible migrations.
+ # Reversible migrations are migrations that know how to go +down+ for you.
+ # You simply supply the +up+ logic, and the Migration system will figure out
+ # how to execute the down commands for you.
+ #
+ # To define a reversible migration, define the +change+ method in your
+ # migration like this:
+ #
+ # class TenderloveMigration < ActiveRecord::Migration
+ # def change
+ # create_table(:horses) do |t|
+ # t.column :content, :text
+ # t.column :remind_at, :datetime
+ # end
+ # end
+ # end
+ #
+ # This migration will create the horses table for you on the way up, and
+ # automatically figure out how to drop the table on the way down.
+ #
+ # Some commands like +remove_column+ cannot be reversed. If you care to
+ # define how to move up and down in these cases, you should define the +up+
+ # and +down+ methods as before.
+ #
+ # If a command cannot be reversed, an
+ # <tt>ActiveRecord::IrreversibleMigration</tt> exception will be raised when
+ # the migration is moving down.
+ #
+ # For a list of commands that are reversible, please see
+ # <tt>ActiveRecord::Migration::CommandRecorder</tt>.
+ #
+ # == Transactional Migrations
+ #
+ # If the database adapter supports DDL transactions, all migrations will
+ # automatically be wrapped in a transaction. There are queries that you
+ # can't execute inside a transaction though, and for these situations
+ # you can turn the automatic transactions off.
+ #
+ # class ChangeEnum < ActiveRecord::Migration
+ # disable_ddl_transaction!
+ #
+ # def up
+ # execute "ALTER TYPE model_size ADD VALUE 'new_value'"
+ # end
+ # end
+ #
+ # Remember that you can still open your own transactions, even if you
+ # are in a Migration with <tt>self.disable_ddl_transaction!</tt>.
+ class Migration
+ autoload :CommandRecorder, 'active_record/migration/command_recorder'
+
+
+ # This class is used to verify that all migrations have been run before
+ # loading a web page if config.active_record.migration_error is set to :page_load
+ class CheckPending
+ def initialize(app)
+ @app = app
+ @last_check = 0
+ end
+
+ def call(env)
+ if connection.supports_migrations?
+ mtime = ActiveRecord::Migrator.last_migration.mtime.to_i
+ if @last_check < mtime
+ ActiveRecord::Migration.check_pending!(connection)
+ @last_check = mtime
+ end
+ end
+ @app.call(env)
+ end
+
+ private
+
+ def connection
+ ActiveRecord::Base.connection
+ end
+ end
+
+ class << self
+ attr_accessor :delegate # :nodoc:
+ attr_accessor :disable_ddl_transaction # :nodoc:
+
+ def check_pending!(connection = Base.connection)
+ raise ActiveRecord::PendingMigrationError if ActiveRecord::Migrator.needs_migration?(connection)
+ end
+
+ def load_schema_if_pending!
+ if ActiveRecord::Migrator.needs_migration?
+ ActiveRecord::Tasks::DatabaseTasks.load_schema_current
+ check_pending!
+ end
+ end
+
+ def maintain_test_schema! # :nodoc:
+ if ActiveRecord::Base.maintain_test_schema
+ suppress_messages { load_schema_if_pending! }
+ end
+ end
+
+ def method_missing(name, *args, &block) # :nodoc:
+ (delegate || superclass.delegate).send(name, *args, &block)
+ end
+
+ def migrate(direction)
+ new.migrate direction
+ end
+
+ # Disable DDL transactions for this migration.
+ def disable_ddl_transaction!
+ @disable_ddl_transaction = true
+ end
+ end
+
+ def disable_ddl_transaction # :nodoc:
+ self.class.disable_ddl_transaction
+ end
+
+ cattr_accessor :verbose
+ attr_accessor :name, :version
+
+ def initialize(name = self.class.name, version = nil)
+ @name = name
+ @version = version
+ @connection = nil
+ end
+
+ self.verbose = true
+ # instantiate the delegate object after initialize is defined
+ self.delegate = new
+
+ # Reverses the migration commands for the given block and
+ # the given migrations.
+ #
+ # The following migration will remove the table 'horses'
+ # and create the table 'apples' on the way up, and the reverse
+ # on the way down.
+ #
+ # class FixTLMigration < ActiveRecord::Migration
+ # def change
+ # revert do
+ # create_table(:horses) do |t|
+ # t.text :content
+ # t.datetime :remind_at
+ # end
+ # end
+ # create_table(:apples) do |t|
+ # t.string :variety
+ # end
+ # end
+ # end
+ #
+ # Or equivalently, if +TenderloveMigration+ is defined as in the
+ # documentation for Migration:
+ #
+ # require_relative '2012121212_tenderlove_migration'
+ #
+ # class FixupTLMigration < ActiveRecord::Migration
+ # def change
+ # revert TenderloveMigration
+ #
+ # create_table(:apples) do |t|
+ # t.string :variety
+ # end
+ # end
+ # end
+ #
+ # This command can be nested.
+ def revert(*migration_classes)
+ run(*migration_classes.reverse, revert: true) unless migration_classes.empty?
+ if block_given?
+ if @connection.respond_to? :revert
+ @connection.revert { yield }
+ else
+ recorder = CommandRecorder.new(@connection)
+ @connection = recorder
+ suppress_messages do
+ @connection.revert { yield }
+ end
+ @connection = recorder.delegate
+ recorder.commands.each do |cmd, args, block|
+ send(cmd, *args, &block)
+ end
+ end
+ end
+ end
+
+ def reverting?
+ @connection.respond_to?(:reverting) && @connection.reverting
+ end
+
+ class ReversibleBlockHelper < Struct.new(:reverting) # :nodoc:
+ def up
+ yield unless reverting
+ end
+
+ def down
+ yield if reverting
+ end
+ end
+
+ # Used to specify an operation that can be run in one direction or another.
+ # Call the methods +up+ and +down+ of the yielded object to run a block
+ # only in one given direction.
+ # The whole block will be called in the right order within the migration.
+ #
+ # In the following example, the looping on users will always be done
+ # when the three columns 'first_name', 'last_name' and 'full_name' exist,
+ # even when migrating down:
+ #
+ # class SplitNameMigration < ActiveRecord::Migration
+ # def change
+ # add_column :users, :first_name, :string
+ # add_column :users, :last_name, :string
+ #
+ # reversible do |dir|
+ # User.reset_column_information
+ # User.all.each do |u|
+ # dir.up { u.first_name, u.last_name = u.full_name.split(' ') }
+ # dir.down { u.full_name = "#{u.first_name} #{u.last_name}" }
+ # u.save
+ # end
+ # end
+ #
+ # revert { add_column :users, :full_name, :string }
+ # end
+ # end
+ def reversible
+ helper = ReversibleBlockHelper.new(reverting?)
+ execute_block{ yield helper }
+ end
+
+ # Runs the given migration classes.
+ # Last argument can specify options:
+ # - :direction (default is :up)
+ # - :revert (default is false)
+ def run(*migration_classes)
+ opts = migration_classes.extract_options!
+ dir = opts[:direction] || :up
+ dir = (dir == :down ? :up : :down) if opts[:revert]
+ if reverting?
+ # If in revert and going :up, say, we want to execute :down without reverting, so
+ revert { run(*migration_classes, direction: dir, revert: true) }
+ else
+ migration_classes.each do |migration_class|
+ migration_class.new.exec_migration(@connection, dir)
+ end
+ end
+ end
+
+ def up
+ self.class.delegate = self
+ return unless self.class.respond_to?(:up)
+ self.class.up
+ end
+
+ def down
+ self.class.delegate = self
+ return unless self.class.respond_to?(:down)
+ self.class.down
+ end
+
+ # Execute this migration in the named direction
+ def migrate(direction)
+ return unless respond_to?(direction)
+
+ case direction
+ when :up then announce "migrating"
+ when :down then announce "reverting"
+ end
+
+ time = nil
+ ActiveRecord::Base.connection_pool.with_connection do |conn|
+ time = Benchmark.measure do
+ exec_migration(conn, direction)
+ end
+ end
+
+ case direction
+ when :up then announce "migrated (%.4fs)" % time.real; write
+ when :down then announce "reverted (%.4fs)" % time.real; write
+ end
+ end
+
+ def exec_migration(conn, direction)
+ @connection = conn
+ if respond_to?(:change)
+ if direction == :down
+ revert { change }
+ else
+ change
+ end
+ else
+ send(direction)
+ end
+ ensure
+ @connection = nil
+ end
+
+ def write(text="")
+ puts(text) if verbose
+ end
+
+ def announce(message)
+ text = "#{version} #{name}: #{message}"
+ length = [0, 75 - text.length].max
+ write "== %s %s" % [text, "=" * length]
+ end
+
+ def say(message, subitem=false)
+ write "#{subitem ? " ->" : "--"} #{message}"
+ end
+
+ def say_with_time(message)
+ say(message)
+ result = nil
+ time = Benchmark.measure { result = yield }
+ say "%.4fs" % time.real, :subitem
+ say("#{result} rows", :subitem) if result.is_a?(Integer)
+ result
+ end
+
+ def suppress_messages
+ save, self.verbose = verbose, false
+ yield
+ ensure
+ self.verbose = save
+ end
+
+ def connection
+ @connection || ActiveRecord::Base.connection
+ end
+
+ def method_missing(method, *arguments, &block)
+ arg_list = arguments.map{ |a| a.inspect } * ', '
+
+ say_with_time "#{method}(#{arg_list})" do
+ unless @connection.respond_to? :revert
+ unless arguments.empty? || [:execute, :enable_extension, :disable_extension].include?(method)
+ arguments[0] = proper_table_name(arguments.first, table_name_options)
+ if [:rename_table, :add_foreign_key].include?(method)
+ arguments[1] = proper_table_name(arguments.second, table_name_options)
+ end
+ end
+ end
+ return super unless connection.respond_to?(method)
+ connection.send(method, *arguments, &block)
+ end
+ end
+
+ def copy(destination, sources, options = {})
+ copied = []
+
+ FileUtils.mkdir_p(destination) unless File.exist?(destination)
+
+ destination_migrations = ActiveRecord::Migrator.migrations(destination)
+ last = destination_migrations.last
+ sources.each do |scope, path|
+ source_migrations = ActiveRecord::Migrator.migrations(path)
+
+ source_migrations.each do |migration|
+ source = File.binread(migration.filename)
+ inserted_comment = "# This migration comes from #{scope} (originally #{migration.version})\n"
+ if /\A#.*\b(?:en)?coding:\s*\S+/ =~ source
+ # If we have a magic comment in the original migration,
+ # insert our comment after the first newline(end of the magic comment line)
+ # so the magic keep working.
+ # Note that magic comments must be at the first line(except sh-bang).
+ source[/\n/] = "\n#{inserted_comment}"
+ else
+ source = "#{inserted_comment}#{source}"
+ end
+
+ if duplicate = destination_migrations.detect { |m| m.name == migration.name }
+ if options[:on_skip] && duplicate.scope != scope.to_s
+ options[:on_skip].call(scope, migration)
+ end
+ next
+ end
+
+ migration.version = next_migration_number(last ? last.version + 1 : 0).to_i
+ new_path = File.join(destination, "#{migration.version}_#{migration.name.underscore}.#{scope}.rb")
+ old_path, migration.filename = migration.filename, new_path
+ last = migration
+
+ File.binwrite(migration.filename, source)
+ copied << migration
+ options[:on_copy].call(scope, migration, old_path) if options[:on_copy]
+ destination_migrations << migration
+ end
+ end
+
+ copied
+ end
+
+ # Finds the correct table name given an Active Record object.
+ # Uses the Active Record object's own table_name, or pre/suffix from the
+ # options passed in.
+ def proper_table_name(name, options = {})
+ if name.respond_to? :table_name
+ name.table_name
+ else
+ "#{options[:table_name_prefix]}#{name}#{options[:table_name_suffix]}"
+ end
+ end
+
+ # Determines the version number of the next migration.
+ def next_migration_number(number)
+ if ActiveRecord::Base.timestamped_migrations
+ [Time.now.utc.strftime("%Y%m%d%H%M%S"), "%.14d" % number].max
+ else
+ SchemaMigration.normalize_migration_number(number)
+ end
+ end
+
+ def table_name_options(config = ActiveRecord::Base)
+ {
+ table_name_prefix: config.table_name_prefix,
+ table_name_suffix: config.table_name_suffix
+ }
+ end
+
+ private
+ def execute_block
+ if connection.respond_to? :execute_block
+ super # use normal delegation to record the block
+ else
+ yield
+ end
+ end
+ end
+
+ # MigrationProxy is used to defer loading of the actual migration classes
+ # until they are needed
+ class MigrationProxy < Struct.new(:name, :version, :filename, :scope)
+
+ def initialize(name, version, filename, scope)
+ super
+ @migration = nil
+ end
+
+ def basename
+ File.basename(filename)
+ end
+
+ def mtime
+ File.mtime filename
+ end
+
+ delegate :migrate, :announce, :write, :disable_ddl_transaction, to: :migration
+
+ private
+
+ def migration
+ @migration ||= load_migration
+ end
+
+ def load_migration
+ require(File.expand_path(filename))
+ name.constantize.new(name, version)
+ end
+
+ end
+
+ class NullMigration < MigrationProxy #:nodoc:
+ def initialize
+ super(nil, 0, nil, nil)
+ end
+
+ def mtime
+ 0
+ end
+ end
+
+ class Migrator#:nodoc:
+ class << self
+ attr_writer :migrations_paths
+ alias :migrations_path= :migrations_paths=
+
+ def migrate(migrations_paths, target_version = nil, &block)
+ case
+ when target_version.nil?
+ up(migrations_paths, target_version, &block)
+ when current_version == 0 && target_version == 0
+ []
+ when current_version > target_version
+ down(migrations_paths, target_version, &block)
+ else
+ up(migrations_paths, target_version, &block)
+ end
+ end
+
+ def rollback(migrations_paths, steps=1)
+ move(:down, migrations_paths, steps)
+ end
+
+ def forward(migrations_paths, steps=1)
+ move(:up, migrations_paths, steps)
+ end
+
+ def up(migrations_paths, target_version = nil)
+ migrations = migrations(migrations_paths)
+ migrations.select! { |m| yield m } if block_given?
+
+ new(:up, migrations, target_version).migrate
+ end
+
+ def down(migrations_paths, target_version = nil, &block)
+ migrations = migrations(migrations_paths)
+ migrations.select! { |m| yield m } if block_given?
+
+ new(:down, migrations, target_version).migrate
+ end
+
+ def run(direction, migrations_paths, target_version)
+ new(direction, migrations(migrations_paths), target_version).run
+ end
+
+ def open(migrations_paths)
+ new(:up, migrations(migrations_paths), nil)
+ end
+
+ def schema_migrations_table_name
+ SchemaMigration.table_name
+ end
+
+ def get_all_versions
+ SchemaMigration.all.map { |x| x.version.to_i }.sort
+ end
+
+ def current_version(connection = Base.connection)
+ sm_table = schema_migrations_table_name
+ if connection.table_exists?(sm_table)
+ get_all_versions.max || 0
+ else
+ 0
+ end
+ end
+
+ def needs_migration?(connection = Base.connection)
+ current_version(connection) < last_version
+ end
+
+ def last_version
+ last_migration.version
+ end
+
+ def last_migration #:nodoc:
+ migrations(migrations_paths).last || NullMigration.new
+ end
+
+ def migrations_paths
+ @migrations_paths ||= ['db/migrate']
+ # just to not break things if someone uses: migration_path = some_string
+ Array(@migrations_paths)
+ end
+
+ def migrations_path
+ migrations_paths.first
+ end
+
+ def migrations(paths)
+ paths = Array(paths)
+
+ files = Dir[*paths.map { |p| "#{p}/**/[0-9]*_*.rb" }]
+
+ migrations = files.map do |file|
+ version, name, scope = file.scan(/([0-9]+)_([_a-z0-9]*)\.?([_a-z0-9]*)?\.rb\z/).first
+
+ raise IllegalMigrationNameError.new(file) unless version
+ version = version.to_i
+ name = name.camelize
+
+ MigrationProxy.new(name, version, file, scope)
+ end
+
+ migrations.sort_by(&:version)
+ end
+
+ private
+
+ def move(direction, migrations_paths, steps)
+ migrator = new(direction, migrations(migrations_paths))
+ start_index = migrator.migrations.index(migrator.current_migration)
+
+ if start_index
+ finish = migrator.migrations[start_index + steps]
+ version = finish ? finish.version : 0
+ send(direction, migrations_paths, version)
+ end
+ end
+ end
+
+ def initialize(direction, migrations, target_version = nil)
+ raise StandardError.new("This database does not yet support migrations") unless Base.connection.supports_migrations?
+
+ @direction = direction
+ @target_version = target_version
+ @migrated_versions = nil
+ @migrations = migrations
+
+ validate(@migrations)
+
+ Base.connection.initialize_schema_migrations_table
+ end
+
+ def current_version
+ migrated.max || 0
+ end
+
+ def current_migration
+ migrations.detect { |m| m.version == current_version }
+ end
+ alias :current :current_migration
+
+ def run
+ migration = migrations.detect { |m| m.version == @target_version }
+ raise UnknownMigrationVersionError.new(@target_version) if migration.nil?
+ unless (up? && migrated.include?(migration.version.to_i)) || (down? && !migrated.include?(migration.version.to_i))
+ begin
+ execute_migration_in_transaction(migration, @direction)
+ rescue => e
+ canceled_msg = use_transaction?(migration) ? ", this migration was canceled" : ""
+ raise StandardError, "An error has occurred#{canceled_msg}:\n\n#{e}", e.backtrace
+ end
+ end
+ end
+
+ def migrate
+ if !target && @target_version && @target_version > 0
+ raise UnknownMigrationVersionError.new(@target_version)
+ end
+
+ runnable.each do |migration|
+ Base.logger.info "Migrating to #{migration.name} (#{migration.version})" if Base.logger
+
+ begin
+ execute_migration_in_transaction(migration, @direction)
+ rescue => e
+ canceled_msg = use_transaction?(migration) ? "this and " : ""
+ raise StandardError, "An error has occurred, #{canceled_msg}all later migrations canceled:\n\n#{e}", e.backtrace
+ end
+ end
+ end
+
+ def runnable
+ runnable = migrations[start..finish]
+ if up?
+ runnable.reject { |m| ran?(m) }
+ else
+ # skip the last migration if we're headed down, but not ALL the way down
+ runnable.pop if target
+ runnable.find_all { |m| ran?(m) }
+ end
+ end
+
+ def migrations
+ down? ? @migrations.reverse : @migrations.sort_by(&:version)
+ end
+
+ def pending_migrations
+ already_migrated = migrated
+ migrations.reject { |m| already_migrated.include?(m.version) }
+ end
+
+ def migrated
+ @migrated_versions ||= Set.new(self.class.get_all_versions)
+ end
+
+ private
+ def ran?(migration)
+ migrated.include?(migration.version.to_i)
+ end
+
+ def execute_migration_in_transaction(migration, direction)
+ ddl_transaction(migration) do
+ migration.migrate(direction)
+ record_version_state_after_migrating(migration.version)
+ end
+ end
+
+ def target
+ migrations.detect { |m| m.version == @target_version }
+ end
+
+ def finish
+ migrations.index(target) || migrations.size - 1
+ end
+
+ def start
+ up? ? 0 : (migrations.index(current) || 0)
+ end
+
+ def validate(migrations)
+ name ,= migrations.group_by(&:name).find { |_,v| v.length > 1 }
+ raise DuplicateMigrationNameError.new(name) if name
+
+ version ,= migrations.group_by(&:version).find { |_,v| v.length > 1 }
+ raise DuplicateMigrationVersionError.new(version) if version
+ end
+
+ def record_version_state_after_migrating(version)
+ if down?
+ migrated.delete(version)
+ ActiveRecord::SchemaMigration.where(:version => version.to_s).delete_all
+ else
+ migrated << version
+ ActiveRecord::SchemaMigration.create!(:version => version.to_s)
+ end
+ end
+
+ def up?
+ @direction == :up
+ end
+
+ def down?
+ @direction == :down
+ end
+
+ # Wrap the migration in a transaction only if supported by the adapter.
+ def ddl_transaction(migration)
+ if use_transaction?(migration)
+ Base.transaction { yield }
+ else
+ yield
+ end
+ end
+
+ def use_transaction?(migration)
+ !migration.disable_ddl_transaction && Base.connection.supports_ddl_transactions?
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/migration/command_recorder.rb b/activerecord/lib/active_record/migration/command_recorder.rb
new file mode 100644
index 0000000000..36256415df
--- /dev/null
+++ b/activerecord/lib/active_record/migration/command_recorder.rb
@@ -0,0 +1,197 @@
+module ActiveRecord
+ class Migration
+ # <tt>ActiveRecord::Migration::CommandRecorder</tt> records commands done during
+ # a migration and knows how to reverse those commands. The CommandRecorder
+ # knows how to invert the following commands:
+ #
+ # * add_column
+ # * add_index
+ # * add_timestamps
+ # * create_table
+ # * create_join_table
+ # * remove_timestamps
+ # * rename_column
+ # * rename_index
+ # * rename_table
+ class CommandRecorder
+ include JoinTable
+
+ attr_accessor :commands, :delegate, :reverting
+
+ def initialize(delegate = nil)
+ @commands = []
+ @delegate = delegate
+ @reverting = false
+ end
+
+ # While executing the given block, the recorded will be in reverting mode.
+ # All commands recorded will end up being recorded reverted
+ # and in reverse order.
+ # For example:
+ #
+ # recorder.revert{ recorder.record(:rename_table, [:old, :new]) }
+ # # same effect as recorder.record(:rename_table, [:new, :old])
+ def revert
+ @reverting = !@reverting
+ previous = @commands
+ @commands = []
+ yield
+ ensure
+ @commands = previous.concat(@commands.reverse)
+ @reverting = !@reverting
+ end
+
+ # record +command+. +command+ should be a method name and arguments.
+ # For example:
+ #
+ # recorder.record(:method_name, [:arg1, :arg2])
+ def record(*command, &block)
+ if @reverting
+ @commands << inverse_of(*command, &block)
+ else
+ @commands << (command << block)
+ end
+ end
+
+ # Returns the inverse of the given command. For example:
+ #
+ # recorder.inverse_of(:rename_table, [:old, :new])
+ # # => [:rename_table, [:new, :old]]
+ #
+ # This method will raise an +IrreversibleMigration+ exception if it cannot
+ # invert the +command+.
+ def inverse_of(command, args, &block)
+ method = :"invert_#{command}"
+ raise IrreversibleMigration unless respond_to?(method, true)
+ send(method, args, &block)
+ end
+
+ def respond_to?(*args) # :nodoc:
+ super || delegate.respond_to?(*args)
+ end
+
+ [:create_table, :create_join_table, :rename_table, :add_column, :remove_column,
+ :rename_index, :rename_column, :add_index, :remove_index, :add_timestamps, :remove_timestamps,
+ :change_column_default, :add_reference, :remove_reference, :transaction,
+ :drop_join_table, :drop_table, :execute_block, :enable_extension,
+ :change_column, :execute, :remove_columns, :change_column_null,
+ :add_foreign_key, :remove_foreign_key
+ # irreversible methods need to be here too
+ ].each do |method|
+ class_eval <<-EOV, __FILE__, __LINE__ + 1
+ def #{method}(*args, &block) # def create_table(*args, &block)
+ record(:"#{method}", args, &block) # record(:create_table, args, &block)
+ end # end
+ EOV
+ end
+ alias :add_belongs_to :add_reference
+ alias :remove_belongs_to :remove_reference
+
+ def change_table(table_name, options = {}) # :nodoc:
+ yield delegate.update_table_definition(table_name, self)
+ end
+
+ private
+
+ module StraightReversions
+ private
+ { transaction: :transaction,
+ execute_block: :execute_block,
+ create_table: :drop_table,
+ create_join_table: :drop_join_table,
+ add_column: :remove_column,
+ add_timestamps: :remove_timestamps,
+ add_reference: :remove_reference,
+ enable_extension: :disable_extension
+ }.each do |cmd, inv|
+ [[inv, cmd], [cmd, inv]].uniq.each do |method, inverse|
+ class_eval <<-EOV, __FILE__, __LINE__ + 1
+ def invert_#{method}(args, &block) # def invert_create_table(args, &block)
+ [:#{inverse}, args, block] # [:drop_table, args, block]
+ end # end
+ EOV
+ end
+ end
+ end
+
+ include StraightReversions
+
+ def invert_drop_table(args, &block)
+ if args.size == 1 && block == nil
+ raise ActiveRecord::IrreversibleMigration, "To avoid mistakes, drop_table is only reversible if given options or a block (can be empty)."
+ end
+ super
+ end
+
+ def invert_rename_table(args)
+ [:rename_table, args.reverse]
+ end
+
+ def invert_remove_column(args)
+ raise ActiveRecord::IrreversibleMigration, "remove_column is only reversible if given a type." if args.size <= 2
+ super
+ end
+
+ def invert_rename_index(args)
+ [:rename_index, [args.first] + args.last(2).reverse]
+ end
+
+ def invert_rename_column(args)
+ [:rename_column, [args.first] + args.last(2).reverse]
+ end
+
+ def invert_add_index(args)
+ table, columns, options = *args
+ options ||= {}
+
+ index_name = options[:name]
+ options_hash = index_name ? { name: index_name } : { column: columns }
+
+ [:remove_index, [table, options_hash]]
+ end
+
+ def invert_remove_index(args)
+ table, options = *args
+
+ unless options && options.is_a?(Hash) && options[:column]
+ raise ActiveRecord::IrreversibleMigration, "remove_index is only reversible if given a :column option."
+ end
+
+ options = options.dup
+ [:add_index, [table, options.delete(:column), options]]
+ end
+
+ alias :invert_add_belongs_to :invert_add_reference
+ alias :invert_remove_belongs_to :invert_remove_reference
+
+ def invert_change_column_null(args)
+ args[2] = !args[2]
+ [:change_column_null, args]
+ end
+
+ def invert_add_foreign_key(args)
+ from_table, to_table, add_options = args
+ add_options ||= {}
+
+ if add_options[:name]
+ options = { name: add_options[:name] }
+ elsif add_options[:column]
+ options = { column: add_options[:column] }
+ else
+ options = to_table
+ end
+
+ [:remove_foreign_key, [from_table, options]]
+ end
+
+ # Forwards any missing method call to the \target.
+ def method_missing(method, *args, &block)
+ if @delegate.respond_to?(method)
+ @delegate.send(method, *args, &block)
+ else
+ super
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/migration/join_table.rb b/activerecord/lib/active_record/migration/join_table.rb
new file mode 100644
index 0000000000..05569fadbd
--- /dev/null
+++ b/activerecord/lib/active_record/migration/join_table.rb
@@ -0,0 +1,15 @@
+module ActiveRecord
+ class Migration
+ module JoinTable #:nodoc:
+ private
+
+ def find_join_table_name(table_1, table_2, options = {})
+ options.delete(:table_name) || join_table_name(table_1, table_2)
+ end
+
+ def join_table_name(table_1, table_2)
+ ModelSchema.derive_join_table_name(table_1, table_2).to_sym
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/model_schema.rb b/activerecord/lib/active_record/model_schema.rb
new file mode 100644
index 0000000000..850220babd
--- /dev/null
+++ b/activerecord/lib/active_record/model_schema.rb
@@ -0,0 +1,339 @@
+module ActiveRecord
+ module ModelSchema
+ extend ActiveSupport::Concern
+
+ included do
+ ##
+ # :singleton-method:
+ # Accessor for the prefix type that will be prepended to every primary key column name.
+ # The options are :table_name and :table_name_with_underscore. If the first is specified,
+ # the Product class will look for "productid" instead of "id" as the primary column. If the
+ # latter is specified, the Product class will look for "product_id" instead of "id". Remember
+ # that this is a global setting for all Active Records.
+ mattr_accessor :primary_key_prefix_type, instance_writer: false
+
+ ##
+ # :singleton-method:
+ # Accessor for the name of the prefix string to prepend to every table name. So if set
+ # to "basecamp_", all table names will be named like "basecamp_projects", "basecamp_people",
+ # etc. This is a convenient way of creating a namespace for tables in a shared database.
+ # By default, the prefix is the empty string.
+ #
+ # If you are organising your models within modules you can add a prefix to the models within
+ # a namespace by defining a singleton method in the parent module called table_name_prefix which
+ # returns your chosen prefix.
+ class_attribute :table_name_prefix, instance_writer: false
+ self.table_name_prefix = ""
+
+ ##
+ # :singleton-method:
+ # Works like +table_name_prefix+, but appends instead of prepends (set to "_basecamp" gives "projects_basecamp",
+ # "people_basecamp"). By default, the suffix is the empty string.
+ #
+ # If you are organising your models within modules, you can add a suffix to the models within
+ # a namespace by defining a singleton method in the parent module called table_name_suffix which
+ # returns your chosen suffix.
+ class_attribute :table_name_suffix, instance_writer: false
+ self.table_name_suffix = ""
+
+ ##
+ # :singleton-method:
+ # Accessor for the name of the schema migrations table. By default, the value is "schema_migrations"
+ class_attribute :schema_migrations_table_name, instance_accessor: false
+ self.schema_migrations_table_name = "schema_migrations"
+
+ ##
+ # :singleton-method:
+ # Indicates whether table names should be the pluralized versions of the corresponding class names.
+ # If true, the default table name for a Product class will be +products+. If false, it would just be +product+.
+ # See table_name for the full rules on table/class naming. This is true, by default.
+ class_attribute :pluralize_table_names, instance_writer: false
+ self.pluralize_table_names = true
+
+ self.inheritance_column = 'type'
+
+ delegate :type_for_attribute, to: :class
+ end
+
+ # Derives the join table name for +first_table+ and +second_table+. The
+ # table names appear in alphabetical order. A common prefix is removed
+ # (useful for namespaced models like Music::Artist and Music::Record):
+ #
+ # artists, records => artists_records
+ # records, artists => artists_records
+ # music_artists, music_records => music_artists_records
+ def self.derive_join_table_name(first_table, second_table) # :nodoc:
+ [first_table.to_s, second_table.to_s].sort.join("\0").gsub(/^(.*_)(.+)\0\1(.+)/, '\1\2_\3').gsub("\0", "_")
+ end
+
+ module ClassMethods
+ # Guesses the table name (in forced lower-case) based on the name of the class in the
+ # inheritance hierarchy descending directly from ActiveRecord::Base. So if the hierarchy
+ # looks like: Reply < Message < ActiveRecord::Base, then Message is used
+ # to guess the table name even when called on Reply. The rules used to do the guess
+ # are handled by the Inflector class in Active Support, which knows almost all common
+ # English inflections. You can add new inflections in config/initializers/inflections.rb.
+ #
+ # Nested classes are given table names prefixed by the singular form of
+ # the parent's table name. Enclosing modules are not considered.
+ #
+ # ==== Examples
+ #
+ # class Invoice < ActiveRecord::Base
+ # end
+ #
+ # file class table_name
+ # invoice.rb Invoice invoices
+ #
+ # class Invoice < ActiveRecord::Base
+ # class Lineitem < ActiveRecord::Base
+ # end
+ # end
+ #
+ # file class table_name
+ # invoice.rb Invoice::Lineitem invoice_lineitems
+ #
+ # module Invoice
+ # class Lineitem < ActiveRecord::Base
+ # end
+ # end
+ #
+ # file class table_name
+ # invoice/lineitem.rb Invoice::Lineitem lineitems
+ #
+ # Additionally, the class-level +table_name_prefix+ is prepended and the
+ # +table_name_suffix+ is appended. So if you have "myapp_" as a prefix,
+ # the table name guess for an Invoice class becomes "myapp_invoices".
+ # Invoice::Lineitem becomes "myapp_invoice_lineitems".
+ #
+ # You can also set your own table name explicitly:
+ #
+ # class Mouse < ActiveRecord::Base
+ # self.table_name = "mice"
+ # end
+ #
+ # Alternatively, you can override the table_name method to define your
+ # own computation. (Possibly using <tt>super</tt> to manipulate the default
+ # table name.) Example:
+ #
+ # class Post < ActiveRecord::Base
+ # def self.table_name
+ # "special_" + super
+ # end
+ # end
+ # Post.table_name # => "special_posts"
+ def table_name
+ reset_table_name unless defined?(@table_name)
+ @table_name
+ end
+
+ # Sets the table name explicitly. Example:
+ #
+ # class Project < ActiveRecord::Base
+ # self.table_name = "project"
+ # end
+ #
+ # You can also just define your own <tt>self.table_name</tt> method; see
+ # the documentation for ActiveRecord::Base#table_name.
+ def table_name=(value)
+ value = value && value.to_s
+
+ if defined?(@table_name)
+ return if value == @table_name
+ reset_column_information if connected?
+ end
+
+ @table_name = value
+ @quoted_table_name = nil
+ @arel_table = nil
+ @sequence_name = nil unless defined?(@explicit_sequence_name) && @explicit_sequence_name
+ @relation = Relation.create(self, arel_table)
+ end
+
+ # Returns a quoted version of the table name, used to construct SQL statements.
+ def quoted_table_name
+ @quoted_table_name ||= connection.quote_table_name(table_name)
+ end
+
+ # Computes the table name, (re)sets it internally, and returns it.
+ def reset_table_name #:nodoc:
+ self.table_name = if abstract_class?
+ superclass == Base ? nil : superclass.table_name
+ elsif superclass.abstract_class?
+ superclass.table_name || compute_table_name
+ else
+ compute_table_name
+ end
+ end
+
+ def full_table_name_prefix #:nodoc:
+ (parents.detect{ |p| p.respond_to?(:table_name_prefix) } || self).table_name_prefix
+ end
+
+ def full_table_name_suffix #:nodoc:
+ (parents.detect {|p| p.respond_to?(:table_name_suffix) } || self).table_name_suffix
+ end
+
+ # Defines the name of the table column which will store the class name on single-table
+ # inheritance situations.
+ #
+ # The default inheritance column name is +type+, which means it's a
+ # reserved word inside Active Record. To be able to use single-table
+ # inheritance with another column name, or to use the column +type+ in
+ # your own model for something else, you can set +inheritance_column+:
+ #
+ # self.inheritance_column = 'zoink'
+ def inheritance_column
+ (@inheritance_column ||= nil) || superclass.inheritance_column
+ end
+
+ # Sets the value of inheritance_column
+ def inheritance_column=(value)
+ @inheritance_column = value.to_s
+ @explicit_inheritance_column = true
+ end
+
+ def sequence_name
+ if base_class == self
+ @sequence_name ||= reset_sequence_name
+ else
+ (@sequence_name ||= nil) || base_class.sequence_name
+ end
+ end
+
+ def reset_sequence_name #:nodoc:
+ @explicit_sequence_name = false
+ @sequence_name = connection.default_sequence_name(table_name, primary_key)
+ end
+
+ # Sets the name of the sequence to use when generating ids to the given
+ # value, or (if the value is nil or false) to the value returned by the
+ # given block. This is required for Oracle and is useful for any
+ # database which relies on sequences for primary key generation.
+ #
+ # If a sequence name is not explicitly set when using Oracle,
+ # it will default to the commonly used pattern of: #{table_name}_seq
+ #
+ # If a sequence name is not explicitly set when using PostgreSQL, it
+ # will discover the sequence corresponding to your primary key for you.
+ #
+ # class Project < ActiveRecord::Base
+ # self.sequence_name = "projectseq" # default would have been "project_seq"
+ # end
+ def sequence_name=(value)
+ @sequence_name = value.to_s
+ @explicit_sequence_name = true
+ end
+
+ # Indicates whether the table associated with this class exists
+ def table_exists?
+ connection.schema_cache.table_exists?(table_name)
+ end
+
+ def attributes_builder # :nodoc:
+ @attributes_builder ||= AttributeSet::Builder.new(column_types)
+ end
+
+ def column_types # :nodoc:
+ @column_types ||= columns_hash.transform_values(&:cast_type).tap do |h|
+ h.default = Type::Value.new
+ end
+ end
+
+ def type_for_attribute(attr_name) # :nodoc:
+ column_types[attr_name]
+ end
+
+ # Returns a hash where the keys are column names and the values are
+ # default values when instantiating the AR object for this table.
+ def column_defaults
+ default_attributes.to_hash
+ end
+
+ def default_attributes # :nodoc:
+ @default_attributes ||= attributes_builder.build_from_database(
+ columns_hash.transform_values(&:default))
+ end
+
+ # Returns an array of column names as strings.
+ def column_names
+ @column_names ||= columns.map { |column| column.name }
+ end
+
+ # Returns an array of column objects where the primary id, all columns ending in "_id" or "_count",
+ # and columns used for single table inheritance have been removed.
+ def content_columns
+ @content_columns ||= columns.reject { |c| c.name == primary_key || c.name =~ /(_id|_count)$/ || c.name == inheritance_column }
+ end
+
+ # Resets all the cached information about columns, which will cause them
+ # to be reloaded on the next request.
+ #
+ # The most common usage pattern for this method is probably in a migration,
+ # when just after creating a table you want to populate it with some default
+ # values, eg:
+ #
+ # class CreateJobLevels < ActiveRecord::Migration
+ # def up
+ # create_table :job_levels do |t|
+ # t.integer :id
+ # t.string :name
+ #
+ # t.timestamps
+ # end
+ #
+ # JobLevel.reset_column_information
+ # %w{assistant executive manager director}.each do |type|
+ # JobLevel.create(name: type)
+ # end
+ # end
+ #
+ # def down
+ # drop_table :job_levels
+ # end
+ # end
+ def reset_column_information
+ connection.clear_cache!
+ undefine_attribute_methods
+ connection.schema_cache.clear_table_cache!(table_name) if table_exists?
+
+ @arel_engine = nil
+ @column_names = nil
+ @column_types = nil
+ @content_columns = nil
+ @default_attributes = nil
+ @dynamic_methods_hash = nil
+ @inheritance_column = nil unless defined?(@explicit_inheritance_column) && @explicit_inheritance_column
+ @relation = nil
+ @time_zone_column_names = nil
+ @cached_time_zone = nil
+ end
+
+ private
+
+ # Guesses the table name, but does not decorate it with prefix and suffix information.
+ def undecorated_table_name(class_name = base_class.name)
+ table_name = class_name.to_s.demodulize.underscore
+ pluralize_table_names ? table_name.pluralize : table_name
+ end
+
+ # Computes and returns a table name according to default conventions.
+ def compute_table_name
+ base = base_class
+ if self == base
+ # Nested classes are prefixed with singular parent table name.
+ if parent < Base && !parent.abstract_class?
+ contained = parent.table_name
+ contained = contained.singularize if parent.pluralize_table_names
+ contained += '_'
+ end
+
+ "#{full_table_name_prefix}#{contained}#{undecorated_table_name(name)}#{full_table_name_suffix}"
+ else
+ # STI subclasses always use their superclass' table.
+ base.table_name
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/nested_attributes.rb b/activerecord/lib/active_record/nested_attributes.rb
new file mode 100644
index 0000000000..8a2a06f2ca
--- /dev/null
+++ b/activerecord/lib/active_record/nested_attributes.rb
@@ -0,0 +1,548 @@
+require 'active_support/core_ext/hash/except'
+require 'active_support/core_ext/object/try'
+require 'active_support/core_ext/hash/indifferent_access'
+
+module ActiveRecord
+ module NestedAttributes #:nodoc:
+ class TooManyRecords < ActiveRecordError
+ end
+
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :nested_attributes_options, instance_writer: false
+ self.nested_attributes_options = {}
+ end
+
+ # = Active Record Nested Attributes
+ #
+ # Nested attributes allow you to save attributes on associated records
+ # through the parent. By default nested attribute updating is turned off
+ # and you can enable it using the accepts_nested_attributes_for class
+ # method. When you enable nested attributes an attribute writer is
+ # defined on the model.
+ #
+ # The attribute writer is named after the association, which means that
+ # in the following example, two new methods are added to your model:
+ #
+ # <tt>author_attributes=(attributes)</tt> and
+ # <tt>pages_attributes=(attributes)</tt>.
+ #
+ # class Book < ActiveRecord::Base
+ # has_one :author
+ # has_many :pages
+ #
+ # accepts_nested_attributes_for :author, :pages
+ # end
+ #
+ # Note that the <tt>:autosave</tt> option is automatically enabled on every
+ # association that accepts_nested_attributes_for is used for.
+ #
+ # === One-to-one
+ #
+ # Consider a Member model that has one Avatar:
+ #
+ # class Member < ActiveRecord::Base
+ # has_one :avatar
+ # accepts_nested_attributes_for :avatar
+ # end
+ #
+ # Enabling nested attributes on a one-to-one association allows you to
+ # create the member and avatar in one go:
+ #
+ # params = { member: { name: 'Jack', avatar_attributes: { icon: 'smiling' } } }
+ # member = Member.create(params[:member])
+ # member.avatar.id # => 2
+ # member.avatar.icon # => 'smiling'
+ #
+ # It also allows you to update the avatar through the member:
+ #
+ # params = { member: { avatar_attributes: { id: '2', icon: 'sad' } } }
+ # member.update params[:member]
+ # member.avatar.icon # => 'sad'
+ #
+ # By default you will only be able to set and update attributes on the
+ # associated model. If you want to destroy the associated model through the
+ # attributes hash, you have to enable it first using the
+ # <tt>:allow_destroy</tt> option.
+ #
+ # class Member < ActiveRecord::Base
+ # has_one :avatar
+ # accepts_nested_attributes_for :avatar, allow_destroy: true
+ # end
+ #
+ # Now, when you add the <tt>_destroy</tt> key to the attributes hash, with a
+ # value that evaluates to +true+, you will destroy the associated model:
+ #
+ # member.avatar_attributes = { id: '2', _destroy: '1' }
+ # member.avatar.marked_for_destruction? # => true
+ # member.save
+ # member.reload.avatar # => nil
+ #
+ # Note that the model will _not_ be destroyed until the parent is saved.
+ #
+ # === One-to-many
+ #
+ # Consider a member that has a number of posts:
+ #
+ # class Member < ActiveRecord::Base
+ # has_many :posts
+ # accepts_nested_attributes_for :posts
+ # end
+ #
+ # You can now set or update attributes on the associated posts through
+ # an attribute hash for a member: include the key +:posts_attributes+
+ # with an array of hashes of post attributes as a value.
+ #
+ # For each hash that does _not_ have an <tt>id</tt> key a new record will
+ # be instantiated, unless the hash also contains a <tt>_destroy</tt> key
+ # that evaluates to +true+.
+ #
+ # params = { member: {
+ # name: 'joe', posts_attributes: [
+ # { title: 'Kari, the awesome Ruby documentation browser!' },
+ # { title: 'The egalitarian assumption of the modern citizen' },
+ # { title: '', _destroy: '1' } # this will be ignored
+ # ]
+ # }}
+ #
+ # member = Member.create(params[:member])
+ # member.posts.length # => 2
+ # member.posts.first.title # => 'Kari, the awesome Ruby documentation browser!'
+ # member.posts.second.title # => 'The egalitarian assumption of the modern citizen'
+ #
+ # You may also set a :reject_if proc to silently ignore any new record
+ # hashes if they fail to pass your criteria. For example, the previous
+ # example could be rewritten as:
+ #
+ # class Member < ActiveRecord::Base
+ # has_many :posts
+ # accepts_nested_attributes_for :posts, reject_if: proc { |attributes| attributes['title'].blank? }
+ # end
+ #
+ # params = { member: {
+ # name: 'joe', posts_attributes: [
+ # { title: 'Kari, the awesome Ruby documentation browser!' },
+ # { title: 'The egalitarian assumption of the modern citizen' },
+ # { title: '' } # this will be ignored because of the :reject_if proc
+ # ]
+ # }}
+ #
+ # member = Member.create(params[:member])
+ # member.posts.length # => 2
+ # member.posts.first.title # => 'Kari, the awesome Ruby documentation browser!'
+ # member.posts.second.title # => 'The egalitarian assumption of the modern citizen'
+ #
+ # Alternatively, :reject_if also accepts a symbol for using methods:
+ #
+ # class Member < ActiveRecord::Base
+ # has_many :posts
+ # accepts_nested_attributes_for :posts, reject_if: :new_record?
+ # end
+ #
+ # class Member < ActiveRecord::Base
+ # has_many :posts
+ # accepts_nested_attributes_for :posts, reject_if: :reject_posts
+ #
+ # def reject_posts(attributed)
+ # attributed['title'].blank?
+ # end
+ # end
+ #
+ # If the hash contains an <tt>id</tt> key that matches an already
+ # associated record, the matching record will be modified:
+ #
+ # member.attributes = {
+ # name: 'Joe',
+ # posts_attributes: [
+ # { id: 1, title: '[UPDATED] An, as of yet, undisclosed awesome Ruby documentation browser!' },
+ # { id: 2, title: '[UPDATED] other post' }
+ # ]
+ # }
+ #
+ # member.posts.first.title # => '[UPDATED] An, as of yet, undisclosed awesome Ruby documentation browser!'
+ # member.posts.second.title # => '[UPDATED] other post'
+ #
+ # By default the associated records are protected from being destroyed. If
+ # you want to destroy any of the associated records through the attributes
+ # hash, you have to enable it first using the <tt>:allow_destroy</tt>
+ # option. This will allow you to also use the <tt>_destroy</tt> key to
+ # destroy existing records:
+ #
+ # class Member < ActiveRecord::Base
+ # has_many :posts
+ # accepts_nested_attributes_for :posts, allow_destroy: true
+ # end
+ #
+ # params = { member: {
+ # posts_attributes: [{ id: '2', _destroy: '1' }]
+ # }}
+ #
+ # member.attributes = params[:member]
+ # member.posts.detect { |p| p.id == 2 }.marked_for_destruction? # => true
+ # member.posts.length # => 2
+ # member.save
+ # member.reload.posts.length # => 1
+ #
+ # Nested attributes for an associated collection can also be passed in
+ # the form of a hash of hashes instead of an array of hashes:
+ #
+ # Member.create(name: 'joe',
+ # posts_attributes: { first: { title: 'Foo' },
+ # second: { title: 'Bar' } })
+ #
+ # has the same effect as
+ #
+ # Member.create(name: 'joe',
+ # posts_attributes: [ { title: 'Foo' },
+ # { title: 'Bar' } ])
+ #
+ # The keys of the hash which is the value for +:posts_attributes+ are
+ # ignored in this case.
+ # However, it is not allowed to use +'id'+ or +:id+ for one of
+ # such keys, otherwise the hash will be wrapped in an array and
+ # interpreted as an attribute hash for a single post.
+ #
+ # Passing attributes for an associated collection in the form of a hash
+ # of hashes can be used with hashes generated from HTTP/HTML parameters,
+ # where there maybe no natural way to submit an array of hashes.
+ #
+ # === Saving
+ #
+ # All changes to models, including the destruction of those marked for
+ # destruction, are saved and destroyed automatically and atomically when
+ # the parent model is saved. This happens inside the transaction initiated
+ # by the parents save method. See ActiveRecord::AutosaveAssociation.
+ #
+ # === Validating the presence of a parent model
+ #
+ # If you want to validate that a child record is associated with a parent
+ # record, you can use <tt>validates_presence_of</tt> and
+ # <tt>inverse_of</tt> as this example illustrates:
+ #
+ # class Member < ActiveRecord::Base
+ # has_many :posts, inverse_of: :member
+ # accepts_nested_attributes_for :posts
+ # end
+ #
+ # class Post < ActiveRecord::Base
+ # belongs_to :member, inverse_of: :posts
+ # validates_presence_of :member
+ # end
+ #
+ # Note that if you do not specify the <tt>inverse_of</tt> option, then
+ # Active Record will try to automatically guess the inverse association
+ # based on heuristics.
+ #
+ # For one-to-one nested associations, if you build the new (in-memory)
+ # child object yourself before assignment, then this module will not
+ # overwrite it, e.g.:
+ #
+ # class Member < ActiveRecord::Base
+ # has_one :avatar
+ # accepts_nested_attributes_for :avatar
+ #
+ # def avatar
+ # super || build_avatar(width: 200)
+ # end
+ # end
+ #
+ # member = Member.new
+ # member.avatar_attributes = {icon: 'sad'}
+ # member.avatar.width # => 200
+ module ClassMethods
+ REJECT_ALL_BLANK_PROC = proc { |attributes| attributes.all? { |key, value| key == '_destroy' || value.blank? } }
+
+ # Defines an attributes writer for the specified association(s).
+ #
+ # Supported options:
+ # [:allow_destroy]
+ # If true, destroys any members from the attributes hash with a
+ # <tt>_destroy</tt> key and a value that evaluates to +true+
+ # (eg. 1, '1', true, or 'true'). This option is off by default.
+ # [:reject_if]
+ # Allows you to specify a Proc or a Symbol pointing to a method
+ # that checks whether a record should be built for a certain attribute
+ # hash. The hash is passed to the supplied Proc or the method
+ # and it should return either +true+ or +false+. When no :reject_if
+ # is specified, a record will be built for all attribute hashes that
+ # do not have a <tt>_destroy</tt> value that evaluates to true.
+ # Passing <tt>:all_blank</tt> instead of a Proc will create a proc
+ # that will reject a record where all the attributes are blank excluding
+ # any value for _destroy.
+ # [:limit]
+ # Allows you to specify the maximum number of the associated records that
+ # can be processed with the nested attributes. Limit also can be specified as a
+ # Proc or a Symbol pointing to a method that should return number. If the size of the
+ # nested attributes array exceeds the specified limit, NestedAttributes::TooManyRecords
+ # exception is raised. If omitted, any number associations can be processed.
+ # Note that the :limit option is only applicable to one-to-many associations.
+ # [:update_only]
+ # For a one-to-one association, this option allows you to specify how
+ # nested attributes are to be used when an associated record already
+ # exists. In general, an existing record may either be updated with the
+ # new set of attribute values or be replaced by a wholly new record
+ # containing those values. By default the :update_only option is +false+
+ # and the nested attributes are used to update the existing record only
+ # if they include the record's <tt>:id</tt> value. Otherwise a new
+ # record will be instantiated and used to replace the existing one.
+ # However if the :update_only option is +true+, the nested attributes
+ # are used to update the record's attributes always, regardless of
+ # whether the <tt>:id</tt> is present. The option is ignored for collection
+ # associations.
+ #
+ # Examples:
+ # # creates avatar_attributes=
+ # accepts_nested_attributes_for :avatar, reject_if: proc { |attributes| attributes['name'].blank? }
+ # # creates avatar_attributes=
+ # accepts_nested_attributes_for :avatar, reject_if: :all_blank
+ # # creates avatar_attributes= and posts_attributes=
+ # accepts_nested_attributes_for :avatar, :posts, allow_destroy: true
+ def accepts_nested_attributes_for(*attr_names)
+ options = { :allow_destroy => false, :update_only => false }
+ options.update(attr_names.extract_options!)
+ options.assert_valid_keys(:allow_destroy, :reject_if, :limit, :update_only)
+ options[:reject_if] = REJECT_ALL_BLANK_PROC if options[:reject_if] == :all_blank
+
+ attr_names.each do |association_name|
+ if reflection = _reflect_on_association(association_name)
+ reflection.autosave = true
+ add_autosave_association_callbacks(reflection)
+
+ nested_attributes_options = self.nested_attributes_options.dup
+ nested_attributes_options[association_name.to_sym] = options
+ self.nested_attributes_options = nested_attributes_options
+
+ type = (reflection.collection? ? :collection : :one_to_one)
+ generate_association_writer(association_name, type)
+ else
+ raise ArgumentError, "No association found for name `#{association_name}'. Has it been defined yet?"
+ end
+ end
+ end
+
+ private
+
+ # Generates a writer method for this association. Serves as a point for
+ # accessing the objects in the association. For example, this method
+ # could generate the following:
+ #
+ # def pirate_attributes=(attributes)
+ # assign_nested_attributes_for_one_to_one_association(:pirate, attributes)
+ # end
+ #
+ # This redirects the attempts to write objects in an association through
+ # the helper methods defined below. Makes it seem like the nested
+ # associations are just regular associations.
+ def generate_association_writer(association_name, type)
+ generated_association_methods.module_eval <<-eoruby, __FILE__, __LINE__ + 1
+ if method_defined?(:#{association_name}_attributes=)
+ remove_method(:#{association_name}_attributes=)
+ end
+ def #{association_name}_attributes=(attributes)
+ assign_nested_attributes_for_#{type}_association(:#{association_name}, attributes)
+ end
+ eoruby
+ end
+ end
+
+ # Returns ActiveRecord::AutosaveAssociation::marked_for_destruction? It's
+ # used in conjunction with fields_for to build a form element for the
+ # destruction of this association.
+ #
+ # See ActionView::Helpers::FormHelper::fields_for for more info.
+ def _destroy
+ marked_for_destruction?
+ end
+
+ private
+
+ # Attribute hash keys that should not be assigned as normal attributes.
+ # These hash keys are nested attributes implementation details.
+ UNASSIGNABLE_KEYS = %w( id _destroy )
+
+ # Assigns the given attributes to the association.
+ #
+ # If an associated record does not yet exist, one will be instantiated. If
+ # an associated record already exists, the method's behavior depends on
+ # the value of the update_only option. If update_only is +false+ and the
+ # given attributes include an <tt>:id</tt> that matches the existing record's
+ # id, then the existing record will be modified. If no <tt>:id</tt> is provided
+ # it will be replaced with a new record. If update_only is +true+ the existing
+ # record will be modified regardless of whether an <tt>:id</tt> is provided.
+ #
+ # If the given attributes include a matching <tt>:id</tt> attribute, or
+ # update_only is true, and a <tt>:_destroy</tt> key set to a truthy value,
+ # then the existing record will be marked for destruction.
+ def assign_nested_attributes_for_one_to_one_association(association_name, attributes)
+ options = self.nested_attributes_options[association_name]
+ attributes = attributes.with_indifferent_access
+ existing_record = send(association_name)
+
+ if (options[:update_only] || !attributes['id'].blank?) && existing_record &&
+ (options[:update_only] || existing_record.id.to_s == attributes['id'].to_s)
+ assign_to_or_mark_for_destruction(existing_record, attributes, options[:allow_destroy]) unless call_reject_if(association_name, attributes)
+
+ elsif attributes['id'].present?
+ raise_nested_attributes_record_not_found!(association_name, attributes['id'])
+
+ elsif !reject_new_record?(association_name, attributes)
+ assignable_attributes = attributes.except(*UNASSIGNABLE_KEYS)
+
+ if existing_record && existing_record.new_record?
+ existing_record.assign_attributes(assignable_attributes)
+ association(association_name).initialize_attributes(existing_record)
+ else
+ method = "build_#{association_name}"
+ if respond_to?(method)
+ send(method, assignable_attributes)
+ else
+ raise ArgumentError, "Cannot build association `#{association_name}'. Are you trying to build a polymorphic one-to-one association?"
+ end
+ end
+ end
+ end
+
+ # Assigns the given attributes to the collection association.
+ #
+ # Hashes with an <tt>:id</tt> value matching an existing associated record
+ # will update that record. Hashes without an <tt>:id</tt> value will build
+ # a new record for the association. Hashes with a matching <tt>:id</tt>
+ # value and a <tt>:_destroy</tt> key set to a truthy value will mark the
+ # matched record for destruction.
+ #
+ # For example:
+ #
+ # assign_nested_attributes_for_collection_association(:people, {
+ # '1' => { id: '1', name: 'Peter' },
+ # '2' => { name: 'John' },
+ # '3' => { id: '2', _destroy: true }
+ # })
+ #
+ # Will update the name of the Person with ID 1, build a new associated
+ # person with the name 'John', and mark the associated Person with ID 2
+ # for destruction.
+ #
+ # Also accepts an Array of attribute hashes:
+ #
+ # assign_nested_attributes_for_collection_association(:people, [
+ # { id: '1', name: 'Peter' },
+ # { name: 'John' },
+ # { id: '2', _destroy: true }
+ # ])
+ def assign_nested_attributes_for_collection_association(association_name, attributes_collection)
+ options = self.nested_attributes_options[association_name]
+
+ unless attributes_collection.is_a?(Hash) || attributes_collection.is_a?(Array)
+ raise ArgumentError, "Hash or Array expected, got #{attributes_collection.class.name} (#{attributes_collection.inspect})"
+ end
+
+ check_record_limit!(options[:limit], attributes_collection)
+
+ if attributes_collection.is_a? Hash
+ keys = attributes_collection.keys
+ attributes_collection = if keys.include?('id') || keys.include?(:id)
+ [attributes_collection]
+ else
+ attributes_collection.values
+ end
+ end
+
+ association = association(association_name)
+
+ existing_records = if association.loaded?
+ association.target
+ else
+ attribute_ids = attributes_collection.map {|a| a['id'] || a[:id] }.compact
+ attribute_ids.empty? ? [] : association.scope.where(association.klass.primary_key => attribute_ids)
+ end
+
+ attributes_collection.each do |attributes|
+ attributes = attributes.with_indifferent_access
+
+ if attributes['id'].blank?
+ unless reject_new_record?(association_name, attributes)
+ association.build(attributes.except(*UNASSIGNABLE_KEYS))
+ end
+ elsif existing_record = existing_records.detect { |record| record.id.to_s == attributes['id'].to_s }
+ unless call_reject_if(association_name, attributes)
+ # Make sure we are operating on the actual object which is in the association's
+ # proxy_target array (either by finding it, or adding it if not found)
+ # Take into account that the proxy_target may have changed due to callbacks
+ target_record = association.target.detect { |record| record.id.to_s == attributes['id'].to_s }
+ if target_record
+ existing_record = target_record
+ else
+ association.add_to_target(existing_record, :skip_callbacks)
+ end
+
+ assign_to_or_mark_for_destruction(existing_record, attributes, options[:allow_destroy])
+ end
+ else
+ raise_nested_attributes_record_not_found!(association_name, attributes['id'])
+ end
+ end
+ end
+
+ # Takes in a limit and checks if the attributes_collection has too many
+ # records. It accepts limit in the form of symbol, proc, or
+ # number-like object (anything that can be compared with an integer).
+ #
+ # Raises TooManyRecords error if the attributes_collection is
+ # larger than the limit.
+ def check_record_limit!(limit, attributes_collection)
+ if limit
+ limit = case limit
+ when Symbol
+ send(limit)
+ when Proc
+ limit.call
+ else
+ limit
+ end
+
+ if limit && attributes_collection.size > limit
+ raise TooManyRecords, "Maximum #{limit} records are allowed. Got #{attributes_collection.size} records instead."
+ end
+ end
+ end
+
+ # Updates a record with the +attributes+ or marks it for destruction if
+ # +allow_destroy+ is +true+ and has_destroy_flag? returns +true+.
+ def assign_to_or_mark_for_destruction(record, attributes, allow_destroy)
+ record.assign_attributes(attributes.except(*UNASSIGNABLE_KEYS))
+ record.mark_for_destruction if has_destroy_flag?(attributes) && allow_destroy
+ end
+
+ # Determines if a hash contains a truthy _destroy key.
+ def has_destroy_flag?(hash)
+ Type::Boolean.new.type_cast_from_user(hash['_destroy'])
+ end
+
+ # Determines if a new record should be rejected by checking
+ # has_destroy_flag? or if a <tt>:reject_if</tt> proc exists for this
+ # association and evaluates to +true+.
+ def reject_new_record?(association_name, attributes)
+ has_destroy_flag?(attributes) || call_reject_if(association_name, attributes)
+ end
+
+ # Determines if a record with the particular +attributes+ should be
+ # rejected by calling the reject_if Symbol or Proc (if defined).
+ # The reject_if option is defined by +accepts_nested_attributes_for+.
+ #
+ # Returns false if there is a +destroy_flag+ on the attributes.
+ def call_reject_if(association_name, attributes)
+ return false if has_destroy_flag?(attributes)
+ case callback = self.nested_attributes_options[association_name][:reject_if]
+ when Symbol
+ method(callback).arity == 0 ? send(callback) : send(callback, attributes)
+ when Proc
+ callback.call(attributes)
+ end
+ end
+
+ def raise_nested_attributes_record_not_found!(association_name, record_id)
+ raise RecordNotFound, "Couldn't find #{self.class._reflect_on_association(association_name).klass.name} with ID=#{record_id} for #{self.class.name} with ID=#{id}"
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/no_touching.rb b/activerecord/lib/active_record/no_touching.rb
new file mode 100644
index 0000000000..dbf4564ae5
--- /dev/null
+++ b/activerecord/lib/active_record/no_touching.rb
@@ -0,0 +1,52 @@
+module ActiveRecord
+ # = Active Record No Touching
+ module NoTouching
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ # Lets you selectively disable calls to `touch` for the
+ # duration of a block.
+ #
+ # ==== Examples
+ # ActiveRecord::Base.no_touching do
+ # Project.first.touch # does nothing
+ # Message.first.touch # does nothing
+ # end
+ #
+ # Project.no_touching do
+ # Project.first.touch # does nothing
+ # Message.first.touch # works, but does not touch the associated project
+ # end
+ #
+ def no_touching(&block)
+ NoTouching.apply_to(self, &block)
+ end
+ end
+
+ class << self
+ def apply_to(klass) #:nodoc:
+ klasses.push(klass)
+ yield
+ ensure
+ klasses.pop
+ end
+
+ def applied_to?(klass) #:nodoc:
+ klasses.any? { |k| k >= klass }
+ end
+
+ private
+ def klasses
+ Thread.current[:no_touching_classes] ||= []
+ end
+ end
+
+ def no_touching?
+ NoTouching.applied_to?(self.class)
+ end
+
+ def touch(*)
+ super unless no_touching?
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/null_relation.rb b/activerecord/lib/active_record/null_relation.rb
new file mode 100644
index 0000000000..807c301596
--- /dev/null
+++ b/activerecord/lib/active_record/null_relation.rb
@@ -0,0 +1,81 @@
+# -*- coding: utf-8 -*-
+
+module ActiveRecord
+ module NullRelation # :nodoc:
+ def exec_queries
+ @records = []
+ end
+
+ def pluck(*column_names)
+ []
+ end
+
+ def delete_all(_conditions = nil)
+ 0
+ end
+
+ def update_all(_updates, _conditions = nil, _options = {})
+ 0
+ end
+
+ def delete(_id_or_array)
+ 0
+ end
+
+ def size
+ calculate :size, nil
+ end
+
+ def empty?
+ true
+ end
+
+ def any?
+ false
+ end
+
+ def many?
+ false
+ end
+
+ def to_sql
+ ""
+ end
+
+ def count(*)
+ calculate :count, nil
+ end
+
+ def sum(*)
+ calculate :sum, nil
+ end
+
+ def average(*)
+ calculate :average, nil
+ end
+
+ def minimum(*)
+ calculate :minimum, nil
+ end
+
+ def maximum(*)
+ calculate :maximum, nil
+ end
+
+ def calculate(operation, _column_name, _options = {})
+ # TODO: Remove _options argument as soon we remove support to
+ # activerecord-deprecated_finders.
+ if [:count, :sum, :size].include? operation
+ group_values.any? ? Hash.new : 0
+ elsif [:average, :minimum, :maximum].include?(operation) && group_values.any?
+ Hash.new
+ else
+ nil
+ end
+ end
+
+ def exists?(_id = false)
+ false
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/persistence.rb b/activerecord/lib/active_record/persistence.rb
new file mode 100644
index 0000000000..51b1931ed5
--- /dev/null
+++ b/activerecord/lib/active_record/persistence.rb
@@ -0,0 +1,532 @@
+module ActiveRecord
+ # = Active Record Persistence
+ module Persistence
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ # Creates an object (or multiple objects) and saves it to the database, if validations pass.
+ # The resulting object is returned whether the object was saved successfully to the database or not.
+ #
+ # The +attributes+ parameter can be either a Hash or an Array of Hashes. These Hashes describe the
+ # attributes on the objects that are to be created.
+ #
+ # ==== Examples
+ # # Create a single new object
+ # User.create(first_name: 'Jamie')
+ #
+ # # Create an Array of new objects
+ # User.create([{ first_name: 'Jamie' }, { first_name: 'Jeremy' }])
+ #
+ # # Create a single object and pass it into a block to set other attributes.
+ # User.create(first_name: 'Jamie') do |u|
+ # u.is_admin = false
+ # end
+ #
+ # # Creating an Array of new objects using a block, where the block is executed for each object:
+ # User.create([{ first_name: 'Jamie' }, { first_name: 'Jeremy' }]) do |u|
+ # u.is_admin = false
+ # end
+ def create(attributes = nil, &block)
+ if attributes.is_a?(Array)
+ attributes.collect { |attr| create(attr, &block) }
+ else
+ object = new(attributes, &block)
+ object.save
+ object
+ end
+ end
+
+ # Creates an object (or multiple objects) and saves it to the database,
+ # if validations pass. Raises a RecordInvalid error if validations fail,
+ # unlike Base#create.
+ #
+ # The +attributes+ parameter can be either a Hash or an Array of Hashes.
+ # These describe which attributes to be created on the object, or
+ # multiple objects when given an Array of Hashes.
+ def create!(attributes = nil, &block)
+ if attributes.is_a?(Array)
+ attributes.collect { |attr| create!(attr, &block) }
+ else
+ object = new(attributes, &block)
+ object.save!
+ object
+ end
+ end
+
+ # Given an attributes hash, +instantiate+ returns a new instance of
+ # the appropriate class. Accepts only keys as strings.
+ #
+ # For example, +Post.all+ may return Comments, Messages, and Emails
+ # by storing the record's subclass in a +type+ attribute. By calling
+ # +instantiate+ instead of +new+, finder methods ensure they get new
+ # instances of the appropriate class for each record.
+ #
+ # See +ActiveRecord::Inheritance#discriminate_class_for_record+ to see
+ # how this "single-table" inheritance mapping is implemented.
+ def instantiate(attributes, column_types = {})
+ klass = discriminate_class_for_record(attributes)
+ attributes = klass.attributes_builder.build_from_database(attributes, column_types)
+ klass.allocate.init_with('attributes' => attributes, 'new_record' => false)
+ end
+
+ private
+ # Called by +instantiate+ to decide which class to use for a new
+ # record instance.
+ #
+ # See +ActiveRecord::Inheritance#discriminate_class_for_record+ for
+ # the single-table inheritance discriminator.
+ def discriminate_class_for_record(record)
+ self
+ end
+ end
+
+ # Returns true if this object hasn't been saved yet -- that is, a record
+ # for the object doesn't exist in the database yet; otherwise, returns false.
+ def new_record?
+ sync_with_transaction_state
+ @new_record
+ end
+
+ # Returns true if this object has been destroyed, otherwise returns false.
+ def destroyed?
+ sync_with_transaction_state
+ @destroyed
+ end
+
+ # Returns true if the record is persisted, i.e. it's not a new record and it was
+ # not destroyed, otherwise returns false.
+ def persisted?
+ !(new_record? || destroyed?)
+ end
+
+ # Saves the model.
+ #
+ # If the model is new a record gets created in the database, otherwise
+ # the existing record gets updated.
+ #
+ # By default, save always run validations. If any of them fail the action
+ # is cancelled and +save+ returns +false+. However, if you supply
+ # validate: false, validations are bypassed altogether. See
+ # ActiveRecord::Validations for more information.
+ #
+ # There's a series of callbacks associated with +save+. If any of the
+ # <tt>before_*</tt> callbacks return +false+ the action is cancelled and
+ # +save+ returns +false+. See ActiveRecord::Callbacks for further
+ # details.
+ #
+ # Attributes marked as readonly are silently ignored if the record is
+ # being updated.
+ def save(*)
+ create_or_update
+ rescue ActiveRecord::RecordInvalid
+ false
+ end
+
+ # Saves the model.
+ #
+ # If the model is new a record gets created in the database, otherwise
+ # the existing record gets updated.
+ #
+ # With <tt>save!</tt> validations always run. If any of them fail
+ # ActiveRecord::RecordInvalid gets raised. See ActiveRecord::Validations
+ # for more information.
+ #
+ # There's a series of callbacks associated with <tt>save!</tt>. If any of
+ # the <tt>before_*</tt> callbacks return +false+ the action is cancelled
+ # and <tt>save!</tt> raises ActiveRecord::RecordNotSaved. See
+ # ActiveRecord::Callbacks for further details.
+ #
+ # Attributes marked as readonly are silently ignored if the record is
+ # being updated.
+ def save!(*)
+ create_or_update || raise(RecordNotSaved)
+ end
+
+ # Deletes the record in the database and freezes this instance to
+ # reflect that no changes should be made (since they can't be
+ # persisted). Returns the frozen instance.
+ #
+ # The row is simply removed with an SQL +DELETE+ statement on the
+ # record's primary key, and no callbacks are executed.
+ #
+ # To enforce the object's +before_destroy+ and +after_destroy+
+ # callbacks or any <tt>:dependent</tt> association
+ # options, use <tt>#destroy</tt>.
+ def delete
+ self.class.delete(id) if persisted?
+ @destroyed = true
+ freeze
+ end
+
+ # Deletes the record in the database and freezes this instance to reflect
+ # that no changes should be made (since they can't be persisted).
+ #
+ # There's a series of callbacks associated with <tt>destroy</tt>. If
+ # the <tt>before_destroy</tt> callback return +false+ the action is cancelled
+ # and <tt>destroy</tt> returns +false+. See
+ # ActiveRecord::Callbacks for further details.
+ def destroy
+ raise ReadOnlyRecord if readonly?
+ destroy_associations
+ destroy_row if persisted?
+ @destroyed = true
+ freeze
+ end
+
+ # Deletes the record in the database and freezes this instance to reflect
+ # that no changes should be made (since they can't be persisted).
+ #
+ # There's a series of callbacks associated with <tt>destroy!</tt>. If
+ # the <tt>before_destroy</tt> callback return +false+ the action is cancelled
+ # and <tt>destroy!</tt> raises ActiveRecord::RecordNotDestroyed. See
+ # ActiveRecord::Callbacks for further details.
+ def destroy!
+ destroy || raise(ActiveRecord::RecordNotDestroyed)
+ end
+
+ # Returns an instance of the specified +klass+ with the attributes of the
+ # current record. This is mostly useful in relation to single-table
+ # inheritance structures where you want a subclass to appear as the
+ # superclass. This can be used along with record identification in
+ # Action Pack to allow, say, <tt>Client < Company</tt> to do something
+ # like render <tt>partial: @client.becomes(Company)</tt> to render that
+ # instance using the companies/company partial instead of clients/client.
+ #
+ # Note: The new instance will share a link to the same attributes as the original class.
+ # So any change to the attributes in either instance will affect the other.
+ def becomes(klass)
+ became = klass.new
+ became.instance_variable_set("@attributes", @attributes)
+ became.instance_variable_set("@changed_attributes", @changed_attributes) if defined?(@changed_attributes)
+ became.instance_variable_set("@new_record", new_record?)
+ became.instance_variable_set("@destroyed", destroyed?)
+ became.instance_variable_set("@errors", errors)
+ became
+ end
+
+ # Wrapper around +becomes+ that also changes the instance's sti column value.
+ # This is especially useful if you want to persist the changed class in your
+ # database.
+ #
+ # Note: The old instance's sti column value will be changed too, as both objects
+ # share the same set of attributes.
+ def becomes!(klass)
+ became = becomes(klass)
+ sti_type = nil
+ if !klass.descends_from_active_record?
+ sti_type = klass.sti_name
+ end
+ became.public_send("#{klass.inheritance_column}=", sti_type)
+ became
+ end
+
+ # Updates a single attribute and saves the record.
+ # This is especially useful for boolean flags on existing records. Also note that
+ #
+ # * Validation is skipped.
+ # * Callbacks are invoked.
+ # * updated_at/updated_on column is updated if that column is available.
+ # * Updates all the attributes that are dirty in this object.
+ #
+ # This method raises an +ActiveRecord::ActiveRecordError+ if the
+ # attribute is marked as readonly.
+ #
+ # See also +update_column+.
+ def update_attribute(name, value)
+ name = name.to_s
+ verify_readonly_attribute(name)
+ send("#{name}=", value)
+ save(validate: false)
+ end
+
+ # Updates the attributes of the model from the passed-in hash and saves the
+ # record, all wrapped in a transaction. If the object is invalid, the saving
+ # will fail and false will be returned.
+ def update(attributes)
+ # The following transaction covers any possible database side-effects of the
+ # attributes assignment. For example, setting the IDs of a child collection.
+ with_transaction_returning_status do
+ assign_attributes(attributes)
+ save
+ end
+ end
+
+ alias update_attributes update
+
+ # Updates its receiver just like +update+ but calls <tt>save!</tt> instead
+ # of +save+, so an exception is raised if the record is invalid.
+ def update!(attributes)
+ # The following transaction covers any possible database side-effects of the
+ # attributes assignment. For example, setting the IDs of a child collection.
+ with_transaction_returning_status do
+ assign_attributes(attributes)
+ save!
+ end
+ end
+
+ alias update_attributes! update!
+
+ # Equivalent to <code>update_columns(name => value)</code>.
+ def update_column(name, value)
+ update_columns(name => value)
+ end
+
+ # Updates the attributes directly in the database issuing an UPDATE SQL
+ # statement and sets them in the receiver:
+ #
+ # user.update_columns(last_request_at: Time.current)
+ #
+ # This is the fastest way to update attributes because it goes straight to
+ # the database, but take into account that in consequence the regular update
+ # procedures are totally bypassed. In particular:
+ #
+ # * Validations are skipped.
+ # * Callbacks are skipped.
+ # * +updated_at+/+updated_on+ are not updated.
+ #
+ # This method raises an +ActiveRecord::ActiveRecordError+ when called on new
+ # objects, or when at least one of the attributes is marked as readonly.
+ def update_columns(attributes)
+ raise ActiveRecordError, "cannot update a new record" if new_record?
+ raise ActiveRecordError, "cannot update a destroyed record" if destroyed?
+
+ attributes.each_key do |key|
+ verify_readonly_attribute(key.to_s)
+ end
+
+ updated_count = self.class.unscoped.where(self.class.primary_key => id).update_all(attributes)
+
+ attributes.each do |k, v|
+ raw_write_attribute(k, v)
+ end
+
+ updated_count == 1
+ end
+
+ # Initializes +attribute+ to zero if +nil+ and adds the value passed as +by+ (default is 1).
+ # The increment is performed directly on the underlying attribute, no setter is invoked.
+ # Only makes sense for number-based attributes. Returns +self+.
+ def increment(attribute, by = 1)
+ self[attribute] ||= 0
+ self[attribute] += by
+ self
+ end
+
+ # Wrapper around +increment+ that saves the record. This method differs from
+ # its non-bang version in that it passes through the attribute setter.
+ # Saving is not subjected to validation checks. Returns +true+ if the
+ # record could be saved.
+ def increment!(attribute, by = 1)
+ increment(attribute, by).update_attribute(attribute, self[attribute])
+ end
+
+ # Initializes +attribute+ to zero if +nil+ and subtracts the value passed as +by+ (default is 1).
+ # The decrement is performed directly on the underlying attribute, no setter is invoked.
+ # Only makes sense for number-based attributes. Returns +self+.
+ def decrement(attribute, by = 1)
+ self[attribute] ||= 0
+ self[attribute] -= by
+ self
+ end
+
+ # Wrapper around +decrement+ that saves the record. This method differs from
+ # its non-bang version in that it passes through the attribute setter.
+ # Saving is not subjected to validation checks. Returns +true+ if the
+ # record could be saved.
+ def decrement!(attribute, by = 1)
+ decrement(attribute, by).update_attribute(attribute, self[attribute])
+ end
+
+ # Assigns to +attribute+ the boolean opposite of <tt>attribute?</tt>. So
+ # if the predicate returns +true+ the attribute will become +false+. This
+ # method toggles directly the underlying value without calling any setter.
+ # Returns +self+.
+ def toggle(attribute)
+ self[attribute] = !send("#{attribute}?")
+ self
+ end
+
+ # Wrapper around +toggle+ that saves the record. This method differs from
+ # its non-bang version in that it passes through the attribute setter.
+ # Saving is not subjected to validation checks. Returns +true+ if the
+ # record could be saved.
+ def toggle!(attribute)
+ toggle(attribute).update_attribute(attribute, self[attribute])
+ end
+
+ # Reloads the record from the database.
+ #
+ # This method finds record by its primary key (which could be assigned manually) and
+ # modifies the receiver in-place:
+ #
+ # account = Account.new
+ # # => #<Account id: nil, email: nil>
+ # account.id = 1
+ # account.reload
+ # # Account Load (1.2ms) SELECT "accounts".* FROM "accounts" WHERE "accounts"."id" = $1 LIMIT 1 [["id", 1]]
+ # # => #<Account id: 1, email: 'account@example.com'>
+ #
+ # Attributes are reloaded from the database, and caches busted, in
+ # particular the associations cache.
+ #
+ # If the record no longer exists in the database <tt>ActiveRecord::RecordNotFound</tt>
+ # is raised. Otherwise, in addition to the in-place modification the method
+ # returns +self+ for convenience.
+ #
+ # The optional <tt>:lock</tt> flag option allows you to lock the reloaded record:
+ #
+ # reload(lock: true) # reload with pessimistic locking
+ #
+ # Reloading is commonly used in test suites to test something is actually
+ # written to the database, or when some action modifies the corresponding
+ # row in the database but not the object in memory:
+ #
+ # assert account.deposit!(25)
+ # assert_equal 25, account.credit # check it is updated in memory
+ # assert_equal 25, account.reload.credit # check it is also persisted
+ #
+ # Another common use case is optimistic locking handling:
+ #
+ # def with_optimistic_retry
+ # begin
+ # yield
+ # rescue ActiveRecord::StaleObjectError
+ # begin
+ # # Reload lock_version in particular.
+ # reload
+ # rescue ActiveRecord::RecordNotFound
+ # # If the record is gone there is nothing to do.
+ # else
+ # retry
+ # end
+ # end
+ # end
+ #
+ def reload(options = nil)
+ clear_aggregation_cache
+ clear_association_cache
+
+ fresh_object =
+ if options && options[:lock]
+ self.class.unscoped { self.class.lock(options[:lock]).find(id) }
+ else
+ self.class.unscoped { self.class.find(id) }
+ end
+
+ @attributes = fresh_object.instance_variable_get('@attributes')
+ @new_record = false
+ self
+ end
+
+ # Saves the record with the updated_at/on attributes set to the current time.
+ # Please note that no validation is performed and only the +after_touch+,
+ # +after_commit+ and +after_rollback+ callbacks are executed.
+ #
+ # If attribute names are passed, they are updated along with updated_at/on
+ # attributes.
+ #
+ # product.touch # updates updated_at/on
+ # product.touch(:designed_at) # updates the designed_at attribute and updated_at/on
+ # product.touch(:started_at, :ended_at) # updates started_at, ended_at and updated_at/on attributes
+ #
+ # If used along with +belongs_to+ then +touch+ will invoke +touch+ method on
+ # associated object.
+ #
+ # class Brake < ActiveRecord::Base
+ # belongs_to :car, touch: true
+ # end
+ #
+ # class Car < ActiveRecord::Base
+ # belongs_to :corporation, touch: true
+ # end
+ #
+ # # triggers @brake.car.touch and @brake.car.corporation.touch
+ # @brake.touch
+ #
+ # Note that +touch+ must be used on a persisted object, or else an
+ # ActiveRecordError will be thrown. For example:
+ #
+ # ball = Ball.new
+ # ball.touch(:updated_at) # => raises ActiveRecordError
+ #
+ def touch(*names)
+ raise ActiveRecordError, "cannot touch on a new record object" unless persisted?
+
+ attributes = timestamp_attributes_for_update_in_model
+ attributes.concat(names)
+
+ unless attributes.empty?
+ current_time = current_time_from_proper_timezone
+ changes = {}
+
+ attributes.each do |column|
+ column = column.to_s
+ changes[column] = write_attribute(column, current_time)
+ end
+
+ changes[self.class.locking_column] = increment_lock if locking_enabled?
+
+ changed_attributes.except!(*changes.keys)
+ primary_key = self.class.primary_key
+ self.class.unscoped.where(primary_key => self[primary_key]).update_all(changes) == 1
+ else
+ true
+ end
+ end
+
+ private
+
+ # A hook to be overridden by association modules.
+ def destroy_associations
+ end
+
+ def destroy_row
+ relation_for_destroy.delete_all
+ end
+
+ def relation_for_destroy
+ pk = self.class.primary_key
+ column = self.class.columns_hash[pk]
+ substitute = self.class.connection.substitute_at(column, 0)
+
+ relation = self.class.unscoped.where(
+ self.class.arel_table[pk].eq(substitute))
+
+ relation.bind_values = [[column, id]]
+ relation
+ end
+
+ def create_or_update
+ raise ReadOnlyRecord if readonly?
+ result = new_record? ? _create_record : _update_record
+ result != false
+ end
+
+ # Updates the associated record with values matching those of the instance attributes.
+ # Returns the number of affected rows.
+ def _update_record(attribute_names = self.attribute_names)
+ attributes_values = arel_attributes_with_values_for_update(attribute_names)
+ if attributes_values.empty?
+ 0
+ else
+ self.class.unscoped._update_record attributes_values, id, id_was
+ end
+ end
+
+ # Creates a record with values matching those of the instance attributes
+ # and returns its id.
+ def _create_record(attribute_names = self.attribute_names)
+ attributes_values = arel_attributes_with_values_for_create(attribute_names)
+
+ new_id = self.class.unscoped.insert attributes_values
+ self.id ||= new_id if self.class.primary_key
+
+ @new_record = false
+ id
+ end
+
+ def verify_readonly_attribute(name)
+ raise ActiveRecordError, "#{name} is marked as readonly" if self.class.readonly_attributes.include?(name)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/query_cache.rb b/activerecord/lib/active_record/query_cache.rb
new file mode 100644
index 0000000000..dcb2bd3d84
--- /dev/null
+++ b/activerecord/lib/active_record/query_cache.rb
@@ -0,0 +1,56 @@
+module ActiveRecord
+ # = Active Record Query Cache
+ class QueryCache
+ module ClassMethods
+ # Enable the query cache within the block if Active Record is configured.
+ # If it's not, it will execute the given block.
+ def cache(&block)
+ if ActiveRecord::Base.connected?
+ connection.cache(&block)
+ else
+ yield
+ end
+ end
+
+ # Disable the query cache within the block if Active Record is configured.
+ # If it's not, it will execute the given block.
+ def uncached(&block)
+ if ActiveRecord::Base.connected?
+ connection.uncached(&block)
+ else
+ yield
+ end
+ end
+ end
+
+ def initialize(app)
+ @app = app
+ end
+
+ def call(env)
+ connection = ActiveRecord::Base.connection
+ enabled = connection.query_cache_enabled
+ connection_id = ActiveRecord::Base.connection_id
+ connection.enable_query_cache!
+
+ response = @app.call(env)
+ response[2] = Rack::BodyProxy.new(response[2]) do
+ restore_query_cache_settings(connection_id, enabled)
+ end
+
+ response
+ rescue Exception => e
+ restore_query_cache_settings(connection_id, enabled)
+ raise e
+ end
+
+ private
+
+ def restore_query_cache_settings(connection_id, enabled)
+ ActiveRecord::Base.connection_id = connection_id
+ ActiveRecord::Base.connection.clear_query_cache
+ ActiveRecord::Base.connection.disable_query_cache! unless enabled
+ end
+
+ end
+end
diff --git a/activerecord/lib/active_record/querying.rb b/activerecord/lib/active_record/querying.rb
new file mode 100644
index 0000000000..a9ddd9141f
--- /dev/null
+++ b/activerecord/lib/active_record/querying.rb
@@ -0,0 +1,58 @@
+module ActiveRecord
+ module Querying
+ delegate :find, :take, :take!, :first, :first!, :last, :last!, :exists?, :any?, :many?, to: :all
+ delegate :second, :second!, :third, :third!, :fourth, :fourth!, :fifth, :fifth!, :forty_two, :forty_two!, to: :all
+ delegate :first_or_create, :first_or_create!, :first_or_initialize, to: :all
+ delegate :find_or_create_by, :find_or_create_by!, :find_or_initialize_by, to: :all
+ delegate :find_by, :find_by!, to: :all
+ delegate :destroy, :destroy_all, :delete, :delete_all, :update, :update_all, to: :all
+ delegate :find_each, :find_in_batches, to: :all
+ delegate :select, :group, :order, :except, :reorder, :limit, :offset, :joins,
+ :where, :rewhere, :preload, :eager_load, :includes, :from, :lock, :readonly,
+ :having, :create_with, :uniq, :distinct, :references, :none, :unscope, to: :all
+ delegate :count, :average, :minimum, :maximum, :sum, :calculate, to: :all
+ delegate :pluck, :ids, to: :all
+
+ # Executes a custom SQL query against your database and returns all the results. The results will
+ # be returned as an array with columns requested encapsulated as attributes of the model you call
+ # this method from. If you call <tt>Product.find_by_sql</tt> then the results will be returned in
+ # a +Product+ object with the attributes you specified in the SQL query.
+ #
+ # If you call a complicated SQL query which spans multiple tables the columns specified by the
+ # SELECT will be attributes of the model, whether or not they are columns of the corresponding
+ # table.
+ #
+ # The +sql+ parameter is a full SQL query as a string. It will be called as is, there will be
+ # no database agnostic conversions performed. This should be a last resort because using, for example,
+ # MySQL specific terms will lock you to using that particular database engine or require you to
+ # change your call if you switch engines.
+ #
+ # # A simple SQL query spanning multiple tables
+ # Post.find_by_sql "SELECT p.title, c.author FROM posts p, comments c WHERE p.id = c.post_id"
+ # # => [#<Post:0x36bff9c @attributes={"title"=>"Ruby Meetup", "first_name"=>"Quentin"}>, ...]
+ #
+ # You can use the same string replacement techniques as you can with <tt>ActiveRecord::QueryMethods#where</tt>:
+ #
+ # Post.find_by_sql ["SELECT title FROM posts WHERE author = ? AND created > ?", author_id, start_date]
+ # Post.find_by_sql ["SELECT body FROM comments WHERE author = :user_id OR approved_by = :user_id", { :user_id => user_id }]
+ def find_by_sql(sql, binds = [])
+ result_set = connection.select_all(sanitize_sql(sql), "#{name} Load", binds)
+ column_types = result_set.column_types.except(*columns_hash.keys)
+ result_set.map { |record| instantiate(record, column_types) }
+ end
+
+ # Returns the result of an SQL statement that should only include a COUNT(*) in the SELECT part.
+ # The use of this method should be restricted to complicated SQL queries that can't be executed
+ # using the ActiveRecord::Calculations class methods. Look into those before using this.
+ #
+ # ==== Parameters
+ #
+ # * +sql+ - An SQL statement which should return a count query from the database, see the example below.
+ #
+ # Product.count_by_sql "SELECT COUNT(*) FROM sales s, customers c WHERE s.customer_id = c.id"
+ def count_by_sql(sql)
+ sql = sanitize_conditions(sql)
+ connection.select_value(sql, "#{name} Count").to_i
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/railtie.rb b/activerecord/lib/active_record/railtie.rb
new file mode 100644
index 0000000000..a4ceacbf44
--- /dev/null
+++ b/activerecord/lib/active_record/railtie.rb
@@ -0,0 +1,164 @@
+require "active_record"
+require "rails"
+require "active_model/railtie"
+
+# For now, action_controller must always be present with
+# rails, so let's make sure that it gets required before
+# here. This is needed for correctly setting up the middleware.
+# In the future, this might become an optional require.
+require "action_controller/railtie"
+
+module ActiveRecord
+ # = Active Record Railtie
+ class Railtie < Rails::Railtie # :nodoc:
+ config.active_record = ActiveSupport::OrderedOptions.new
+
+ config.app_generators.orm :active_record, :migration => true,
+ :timestamps => true
+
+ config.app_middleware.insert_after "::ActionDispatch::Callbacks",
+ "ActiveRecord::QueryCache"
+
+ config.app_middleware.insert_after "::ActionDispatch::Callbacks",
+ "ActiveRecord::ConnectionAdapters::ConnectionManagement"
+
+ config.action_dispatch.rescue_responses.merge!(
+ 'ActiveRecord::RecordNotFound' => :not_found,
+ 'ActiveRecord::StaleObjectError' => :conflict,
+ 'ActiveRecord::RecordInvalid' => :unprocessable_entity,
+ 'ActiveRecord::RecordNotSaved' => :unprocessable_entity
+ )
+
+
+ config.active_record.use_schema_cache_dump = true
+ config.active_record.maintain_test_schema = true
+
+ config.eager_load_namespaces << ActiveRecord
+
+ rake_tasks do
+ require "active_record/base"
+
+ namespace :db do
+ task :load_config do
+ ActiveRecord::Tasks::DatabaseTasks.database_configuration = Rails.application.config.database_configuration
+
+ if defined?(ENGINE_PATH) && engine = Rails::Engine.find(ENGINE_PATH)
+ if engine.paths['db/migrate'].existent
+ ActiveRecord::Tasks::DatabaseTasks.migrations_paths += engine.paths['db/migrate'].to_a
+ end
+ end
+ end
+ end
+
+ load "active_record/railties/databases.rake"
+ end
+
+ # When loading console, force ActiveRecord::Base to be loaded
+ # to avoid cross references when loading a constant for the
+ # first time. Also, make it output to STDERR.
+ console do |app|
+ require "active_record/railties/console_sandbox" if app.sandbox?
+ require "active_record/base"
+ console = ActiveSupport::Logger.new(STDERR)
+ Rails.logger.extend ActiveSupport::Logger.broadcast console
+ end
+
+ runner do
+ require "active_record/base"
+ end
+
+ initializer "active_record.initialize_timezone" do
+ ActiveSupport.on_load(:active_record) do
+ self.time_zone_aware_attributes = true
+ self.default_timezone = :utc
+ end
+ end
+
+ initializer "active_record.logger" do
+ ActiveSupport.on_load(:active_record) { self.logger ||= ::Rails.logger }
+ end
+
+ initializer "active_record.migration_error" do
+ if config.active_record.delete(:migration_error) == :page_load
+ config.app_middleware.insert_after "::ActionDispatch::Callbacks",
+ "ActiveRecord::Migration::CheckPending"
+ end
+ end
+
+ initializer "active_record.check_schema_cache_dump" do
+ if config.active_record.delete(:use_schema_cache_dump)
+ config.after_initialize do |app|
+ ActiveSupport.on_load(:active_record) do
+ filename = File.join(app.config.paths["db"].first, "schema_cache.dump")
+
+ if File.file?(filename)
+ cache = Marshal.load File.binread filename
+ if cache.version == ActiveRecord::Migrator.current_version
+ self.connection.schema_cache = cache
+ else
+ warn "Ignoring db/schema_cache.dump because it has expired. The current schema version is #{ActiveRecord::Migrator.current_version}, but the one in the cache is #{cache.version}."
+ end
+ end
+ end
+ end
+ end
+ end
+
+ initializer "active_record.set_configs" do |app|
+ ActiveSupport.on_load(:active_record) do
+ app.config.active_record.each do |k,v|
+ send "#{k}=", v
+ end
+ end
+ end
+
+ # This sets the database configuration from Configuration#database_configuration
+ # and then establishes the connection.
+ initializer "active_record.initialize_database" do |app|
+ ActiveSupport.on_load(:active_record) do
+ self.configurations = Rails.application.config.database_configuration
+
+ begin
+ establish_connection
+ rescue ActiveRecord::NoDatabaseError
+ warn <<-end_warning
+Oops - You have a database configured, but it doesn't exist yet!
+
+Here's how to get started:
+
+ 1. Configure your database in config/database.yml.
+ 2. Run `bin/rake db:create` to create the database.
+ 3. Run `bin/rake db:setup` to load your database schema.
+end_warning
+ raise
+ end
+ end
+ end
+
+ # Expose database runtime to controller for logging.
+ initializer "active_record.log_runtime" do
+ require "active_record/railties/controller_runtime"
+ ActiveSupport.on_load(:action_controller) do
+ include ActiveRecord::Railties::ControllerRuntime
+ end
+ end
+
+ initializer "active_record.set_reloader_hooks" do |app|
+ hook = app.config.reload_classes_only_on_change ? :to_prepare : :to_cleanup
+
+ ActiveSupport.on_load(:active_record) do
+ ActionDispatch::Reloader.send(hook) do
+ if ActiveRecord::Base.connected?
+ ActiveRecord::Base.clear_reloadable_connections!
+ ActiveRecord::Base.clear_cache!
+ end
+ end
+ end
+ end
+
+ initializer "active_record.add_watchable_files" do |app|
+ path = app.paths["db"].first
+ config.watchable_files.concat ["#{path}/schema.rb", "#{path}/structure.sql"]
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/railties/console_sandbox.rb b/activerecord/lib/active_record/railties/console_sandbox.rb
new file mode 100644
index 0000000000..604a220303
--- /dev/null
+++ b/activerecord/lib/active_record/railties/console_sandbox.rb
@@ -0,0 +1,5 @@
+ActiveRecord::Base.connection.begin_transaction(joinable: false)
+
+at_exit do
+ ActiveRecord::Base.connection.rollback_transaction
+end
diff --git a/activerecord/lib/active_record/railties/controller_runtime.rb b/activerecord/lib/active_record/railties/controller_runtime.rb
new file mode 100644
index 0000000000..af4840476c
--- /dev/null
+++ b/activerecord/lib/active_record/railties/controller_runtime.rb
@@ -0,0 +1,50 @@
+require 'active_support/core_ext/module/attr_internal'
+require 'active_record/log_subscriber'
+
+module ActiveRecord
+ module Railties # :nodoc:
+ module ControllerRuntime #:nodoc:
+ extend ActiveSupport::Concern
+
+ protected
+
+ attr_internal :db_runtime
+
+ def process_action(action, *args)
+ # We also need to reset the runtime before each action
+ # because of queries in middleware or in cases we are streaming
+ # and it won't be cleaned up by the method below.
+ ActiveRecord::LogSubscriber.reset_runtime
+ super
+ end
+
+ def cleanup_view_runtime
+ if ActiveRecord::Base.connected?
+ db_rt_before_render = ActiveRecord::LogSubscriber.reset_runtime
+ self.db_runtime = (db_runtime || 0) + db_rt_before_render
+ runtime = super
+ db_rt_after_render = ActiveRecord::LogSubscriber.reset_runtime
+ self.db_runtime += db_rt_after_render
+ runtime - db_rt_after_render
+ else
+ super
+ end
+ end
+
+ def append_info_to_payload(payload)
+ super
+ if ActiveRecord::Base.connected?
+ payload[:db_runtime] = (db_runtime || 0) + ActiveRecord::LogSubscriber.reset_runtime
+ end
+ end
+
+ module ClassMethods # :nodoc:
+ def log_process_action(payload)
+ messages, db_runtime = super, payload[:db_runtime]
+ messages << ("ActiveRecord: %.1fms" % db_runtime.to_f) if db_runtime
+ messages
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/railties/databases.rake b/activerecord/lib/active_record/railties/databases.rake
new file mode 100644
index 0000000000..458862a538
--- /dev/null
+++ b/activerecord/lib/active_record/railties/databases.rake
@@ -0,0 +1,390 @@
+require 'active_record'
+
+db_namespace = namespace :db do
+ task :load_config do
+ ActiveRecord::Base.configurations = ActiveRecord::Tasks::DatabaseTasks.database_configuration || {}
+ ActiveRecord::Migrator.migrations_paths = ActiveRecord::Tasks::DatabaseTasks.migrations_paths
+ end
+
+ namespace :create do
+ task :all => :load_config do
+ ActiveRecord::Tasks::DatabaseTasks.create_all
+ end
+ end
+
+ desc 'Creates the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:create:all to create all databases in the config). Without RAILS_ENV it defaults to creating the development and test databases.'
+ task :create => [:load_config] do
+ ActiveRecord::Tasks::DatabaseTasks.create_current
+ end
+
+ namespace :drop do
+ task :all => :load_config do
+ ActiveRecord::Tasks::DatabaseTasks.drop_all
+ end
+ end
+
+ desc 'Drops the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:drop:all to drop all databases in the config). Without RAILS_ENV it defaults to dropping the development and test databases.'
+ task :drop => [:load_config] do
+ ActiveRecord::Tasks::DatabaseTasks.drop_current
+ end
+
+ namespace :purge do
+ task :all => :load_config do
+ ActiveRecord::Tasks::DatabaseTasks.purge_all
+ end
+ end
+
+ # desc "Empty the database from DATABASE_URL or config/database.yml for the current RAILS_ENV (use db:drop:all to drop all databases in the config). Without RAILS_ENV it defaults to purging the development and test databases."
+ task :purge => [:load_config] do
+ ActiveRecord::Tasks::DatabaseTasks.purge_current
+ end
+
+ desc "Migrate the database (options: VERSION=x, VERBOSE=false, SCOPE=blog)."
+ task :migrate => [:environment, :load_config] do
+ ActiveRecord::Tasks::DatabaseTasks.migrate
+ db_namespace['_dump'].invoke if ActiveRecord::Base.dump_schema_after_migration
+ end
+
+ task :_dump do
+ case ActiveRecord::Base.schema_format
+ when :ruby then db_namespace["schema:dump"].invoke
+ when :sql then db_namespace["structure:dump"].invoke
+ else
+ raise "unknown schema format #{ActiveRecord::Base.schema_format}"
+ end
+ # Allow this task to be called as many times as required. An example is the
+ # migrate:redo task, which calls other two internally that depend on this one.
+ db_namespace['_dump'].reenable
+ end
+
+ namespace :migrate do
+ # desc 'Rollbacks the database one migration and re migrate up (options: STEP=x, VERSION=x).'
+ task :redo => [:environment, :load_config] do
+ if ENV['VERSION']
+ db_namespace['migrate:down'].invoke
+ db_namespace['migrate:up'].invoke
+ else
+ db_namespace['rollback'].invoke
+ db_namespace['migrate'].invoke
+ end
+ end
+
+ # desc 'Resets your database using your migrations for the current environment'
+ task :reset => ['db:drop', 'db:create', 'db:migrate']
+
+ # desc 'Runs the "up" for a given migration VERSION.'
+ task :up => [:environment, :load_config] do
+ version = ENV['VERSION'] ? ENV['VERSION'].to_i : nil
+ raise 'VERSION is required' unless version
+ ActiveRecord::Migrator.run(:up, ActiveRecord::Migrator.migrations_paths, version)
+ db_namespace['_dump'].invoke
+ end
+
+ # desc 'Runs the "down" for a given migration VERSION.'
+ task :down => [:environment, :load_config] do
+ version = ENV['VERSION'] ? ENV['VERSION'].to_i : nil
+ raise 'VERSION is required - To go down one migration, run db:rollback' unless version
+ ActiveRecord::Migrator.run(:down, ActiveRecord::Migrator.migrations_paths, version)
+ db_namespace['_dump'].invoke
+ end
+
+ desc 'Display status of migrations'
+ task :status => [:environment, :load_config] do
+ unless ActiveRecord::SchemaMigration.table_exists?
+ abort 'Schema migrations table does not exist yet.'
+ end
+ db_list = ActiveRecord::SchemaMigration.normalized_versions
+
+ file_list =
+ ActiveRecord::Migrator.migrations_paths.flat_map do |path|
+ # match "20091231235959_some_name.rb" and "001_some_name.rb" pattern
+ Dir.foreach(path).grep(/^(\d{3,})_(.+)\.rb$/) do
+ version = ActiveRecord::SchemaMigration.normalize_migration_number($1)
+ status = db_list.delete(version) ? 'up' : 'down'
+ [status, version, $2.humanize]
+ end
+ end
+
+ db_list.map! do |version|
+ ['up', version, '********** NO FILE **********']
+ end
+ # output
+ puts "\ndatabase: #{ActiveRecord::Base.connection_config[:database]}\n\n"
+ puts "#{'Status'.center(8)} #{'Migration ID'.ljust(14)} Migration Name"
+ puts "-" * 50
+ (db_list + file_list).sort_by { |_, version, _| version }.each do |status, version, name|
+ puts "#{status.center(8)} #{version.ljust(14)} #{name}"
+ end
+ puts
+ end
+ end
+
+ desc 'Rolls the schema back to the previous version (specify steps w/ STEP=n).'
+ task :rollback => [:environment, :load_config] do
+ step = ENV['STEP'] ? ENV['STEP'].to_i : 1
+ ActiveRecord::Migrator.rollback(ActiveRecord::Migrator.migrations_paths, step)
+ db_namespace['_dump'].invoke
+ end
+
+ # desc 'Pushes the schema to the next version (specify steps w/ STEP=n).'
+ task :forward => [:environment, :load_config] do
+ step = ENV['STEP'] ? ENV['STEP'].to_i : 1
+ ActiveRecord::Migrator.forward(ActiveRecord::Migrator.migrations_paths, step)
+ db_namespace['_dump'].invoke
+ end
+
+ # desc 'Drops and recreates the database from db/schema.rb for the current environment and loads the seeds.'
+ task :reset => [:environment, :load_config] do
+ db_namespace["drop"].invoke
+ db_namespace["setup"].invoke
+ end
+
+ # desc "Retrieves the charset for the current environment's database"
+ task :charset => [:environment, :load_config] do
+ puts ActiveRecord::Tasks::DatabaseTasks.charset_current
+ end
+
+ # desc "Retrieves the collation for the current environment's database"
+ task :collation => [:environment, :load_config] do
+ begin
+ puts ActiveRecord::Tasks::DatabaseTasks.collation_current
+ rescue NoMethodError
+ $stderr.puts 'Sorry, your database adapter is not supported yet. Feel free to submit a patch.'
+ end
+ end
+
+ desc 'Retrieves the current schema version number'
+ task :version => [:environment, :load_config] do
+ puts "Current version: #{ActiveRecord::Migrator.current_version}"
+ end
+
+ # desc "Raises an error if there are pending migrations"
+ task :abort_if_pending_migrations => :environment do
+ pending_migrations = ActiveRecord::Migrator.open(ActiveRecord::Migrator.migrations_paths).pending_migrations
+
+ if pending_migrations.any?
+ puts "You have #{pending_migrations.size} pending #{pending_migrations.size > 1 ? 'migrations:' : 'migration:'}"
+ pending_migrations.each do |pending_migration|
+ puts ' %4d %s' % [pending_migration.version, pending_migration.name]
+ end
+ abort %{Run `rake db:migrate` to update your database then try again.}
+ end
+ end
+
+ desc 'Create the database, load the schema, and initialize with the seed data (use db:reset to also drop the database first)'
+ task :setup => ['db:schema:load_if_ruby', 'db:structure:load_if_sql', :seed]
+
+ desc 'Load the seed data from db/seeds.rb'
+ task :seed do
+ db_namespace['abort_if_pending_migrations'].invoke
+ ActiveRecord::Tasks::DatabaseTasks.load_seed
+ end
+
+ namespace :fixtures do
+ desc "Load fixtures into the current environment's database. Load specific fixtures using FIXTURES=x,y. Load from subdirectory in test/fixtures using FIXTURES_DIR=z. Specify an alternative path (eg. spec/fixtures) using FIXTURES_PATH=spec/fixtures."
+ task :load => [:environment, :load_config] do
+ require 'active_record/fixtures'
+
+ base_dir = ActiveRecord::Tasks::DatabaseTasks.fixtures_path
+
+ fixtures_dir = if ENV['FIXTURES_DIR']
+ File.join base_dir, ENV['FIXTURES_DIR']
+ else
+ base_dir
+ end
+
+ fixture_files = if ENV['FIXTURES']
+ ENV['FIXTURES'].split(',')
+ else
+ Pathname.glob("#{fixtures_dir}/**/*.yml").map {|f| f.basename.sub_ext('').to_s }
+ end
+
+ ActiveRecord::FixtureSet.create_fixtures(fixtures_dir, fixture_files)
+ end
+
+ # desc "Search for a fixture given a LABEL or ID. Specify an alternative path (eg. spec/fixtures) using FIXTURES_PATH=spec/fixtures."
+ task :identify => [:environment, :load_config] do
+ require 'active_record/fixtures'
+
+ label, id = ENV['LABEL'], ENV['ID']
+ raise 'LABEL or ID required' if label.blank? && id.blank?
+
+ puts %Q(The fixture ID for "#{label}" is #{ActiveRecord::FixtureSet.identify(label)}.) if label
+
+ base_dir = ActiveRecord::Tasks::DatabaseTasks.fixtures_path
+
+ Dir["#{base_dir}/**/*.yml"].each do |file|
+ if data = YAML::load(ERB.new(IO.read(file)).result)
+ data.keys.each do |key|
+ key_id = ActiveRecord::FixtureSet.identify(key)
+
+ if key == label || key_id == id.to_i
+ puts "#{file}: #{key} (#{key_id})"
+ end
+ end
+ end
+ end
+ end
+ end
+
+ namespace :schema do
+ desc 'Create a db/schema.rb file that is portable against any DB supported by AR'
+ task :dump => [:environment, :load_config] do
+ require 'active_record/schema_dumper'
+ filename = ENV['SCHEMA'] || File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, 'schema.rb')
+ File.open(filename, "w:utf-8") do |file|
+ ActiveRecord::SchemaDumper.dump(ActiveRecord::Base.connection, file)
+ end
+ db_namespace['schema:dump'].reenable
+ end
+
+ desc 'Load a schema.rb file into the database'
+ task :load => [:environment, :load_config] do
+ ActiveRecord::Tasks::DatabaseTasks.load_schema_current(:ruby, ENV['SCHEMA'])
+ end
+
+ task :load_if_ruby => ['db:create', :environment] do
+ db_namespace["schema:load"].invoke if ActiveRecord::Base.schema_format == :ruby
+ end
+
+ namespace :cache do
+ desc 'Create a db/schema_cache.dump file.'
+ task :dump => [:environment, :load_config] do
+ con = ActiveRecord::Base.connection
+ filename = File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "schema_cache.dump")
+
+ con.schema_cache.clear!
+ con.tables.each { |table| con.schema_cache.add(table) }
+ open(filename, 'wb') { |f| f.write(Marshal.dump(con.schema_cache)) }
+ end
+
+ desc 'Clear a db/schema_cache.dump file.'
+ task :clear => [:environment, :load_config] do
+ filename = File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "schema_cache.dump")
+ FileUtils.rm(filename) if File.exist?(filename)
+ end
+ end
+
+ end
+
+ namespace :structure do
+ desc 'Dump the database structure to db/structure.sql. Specify another file with DB_STRUCTURE=db/my_structure.sql'
+ task :dump => [:environment, :load_config] do
+ filename = ENV['DB_STRUCTURE'] || File.join(ActiveRecord::Tasks::DatabaseTasks.db_dir, "structure.sql")
+ current_config = ActiveRecord::Tasks::DatabaseTasks.current_config
+ ActiveRecord::Tasks::DatabaseTasks.structure_dump(current_config, filename)
+
+ if ActiveRecord::Base.connection.supports_migrations? &&
+ ActiveRecord::SchemaMigration.table_exists?
+ File.open(filename, "a") do |f|
+ f.puts ActiveRecord::Base.connection.dump_schema_information
+ f.print "\n"
+ end
+ end
+ db_namespace['structure:dump'].reenable
+ end
+
+ desc "Recreate the databases from the structure.sql file"
+ task :load => [:environment, :load_config] do
+ ActiveRecord::Tasks::DatabaseTasks.load_schema_current(:sql, ENV['DB_STRUCTURE'])
+ end
+
+ task :load_if_sql => ['db:create', :environment] do
+ db_namespace["structure:load"].invoke if ActiveRecord::Base.schema_format == :sql
+ end
+ end
+
+ namespace :test do
+
+ task :deprecated do
+ Rake.application.top_level_tasks.grep(/^db:test:/).each do |task|
+ $stderr.puts "WARNING: #{task} is deprecated. The Rails test helper now maintains " \
+ "your test schema automatically, see the release notes for details."
+ end
+ end
+
+ # desc "Recreate the test database from the current schema"
+ task :load => %w(db:test:deprecated db:test:purge) do
+ case ActiveRecord::Base.schema_format
+ when :ruby
+ db_namespace["test:load_schema"].invoke
+ when :sql
+ db_namespace["test:load_structure"].invoke
+ end
+ end
+
+ # desc "Recreate the test database from an existent schema.rb file"
+ task :load_schema => %w(db:test:deprecated db:test:purge) do
+ begin
+ should_reconnect = ActiveRecord::Base.connection_pool.active_connection?
+ ActiveRecord::Schema.verbose = false
+ ActiveRecord::Tasks::DatabaseTasks.load_schema_for ActiveRecord::Base.configurations['test'], :ruby, ENV['SCHEMA']
+ ensure
+ if should_reconnect
+ ActiveRecord::Base.establish_connection(ActiveRecord::Base.configurations[ActiveRecord::Tasks::DatabaseTasks.env])
+ end
+ end
+ end
+
+ # desc "Recreate the test database from an existent structure.sql file"
+ task :load_structure => %w(db:test:deprecated db:test:purge) do
+ ActiveRecord::Tasks::DatabaseTasks.load_schema_for ActiveRecord::Base.configurations['test'], :sql, ENV['SCHEMA']
+ end
+
+ # desc "Recreate the test database from a fresh schema"
+ task :clone => %w(db:test:deprecated environment) do
+ case ActiveRecord::Base.schema_format
+ when :ruby
+ db_namespace["test:clone_schema"].invoke
+ when :sql
+ db_namespace["test:clone_structure"].invoke
+ end
+ end
+
+ # desc "Recreate the test database from a fresh schema.rb file"
+ task :clone_schema => %w(db:test:deprecated db:schema:dump db:test:load_schema)
+
+ # desc "Recreate the test database from a fresh structure.sql file"
+ task :clone_structure => %w(db:test:deprecated db:structure:dump db:test:load_structure)
+
+ # desc "Empty the test database"
+ task :purge => %w(db:test:deprecated environment load_config) do
+ ActiveRecord::Tasks::DatabaseTasks.purge ActiveRecord::Base.configurations['test']
+ end
+
+ # desc 'Check for pending migrations and load the test schema'
+ task :prepare => %w(db:test:deprecated environment load_config) do
+ unless ActiveRecord::Base.configurations.blank?
+ db_namespace['test:load'].invoke
+ end
+ end
+ end
+end
+
+namespace :railties do
+ namespace :install do
+ # desc "Copies missing migrations from Railties (e.g. engines). You can specify Railties to use with FROM=railtie1,railtie2"
+ task :migrations => :'db:load_config' do
+ to_load = ENV['FROM'].blank? ? :all : ENV['FROM'].split(",").map {|n| n.strip }
+ railties = {}
+ Rails.application.migration_railties.each do |railtie|
+ next unless to_load == :all || to_load.include?(railtie.railtie_name)
+
+ if railtie.respond_to?(:paths) && (path = railtie.paths['db/migrate'].first)
+ railties[railtie.railtie_name] = path
+ end
+ end
+
+ on_skip = Proc.new do |name, migration|
+ puts "NOTE: Migration #{migration.basename} from #{name} has been skipped. Migration with the same name already exists."
+ end
+
+ on_copy = Proc.new do |name, migration|
+ puts "Copied migration #{migration.basename} from #{name}"
+ end
+
+ ActiveRecord::Migration.copy(ActiveRecord::Migrator.migrations_paths.first, railties,
+ :on_skip => on_skip, :on_copy => on_copy)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/railties/jdbcmysql_error.rb b/activerecord/lib/active_record/railties/jdbcmysql_error.rb
new file mode 100644
index 0000000000..6a38211bff
--- /dev/null
+++ b/activerecord/lib/active_record/railties/jdbcmysql_error.rb
@@ -0,0 +1,16 @@
+#FIXME Remove if ArJdbcMysql will give.
+module ArJdbcMySQL #:nodoc:
+ class Error < StandardError #:nodoc:
+ attr_accessor :error_number, :sql_state
+
+ def initialize msg
+ super
+ @error_number = nil
+ @sql_state = nil
+ end
+
+ # Mysql gem compatibility
+ alias_method :errno, :error_number
+ alias_method :error, :message
+ end
+end
diff --git a/activerecord/lib/active_record/readonly_attributes.rb b/activerecord/lib/active_record/readonly_attributes.rb
new file mode 100644
index 0000000000..85bbac43e4
--- /dev/null
+++ b/activerecord/lib/active_record/readonly_attributes.rb
@@ -0,0 +1,23 @@
+module ActiveRecord
+ module ReadonlyAttributes
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :_attr_readonly, instance_accessor: false
+ self._attr_readonly = []
+ end
+
+ module ClassMethods
+ # Attributes listed as readonly will be used to create a new record but update operations will
+ # ignore these fields.
+ def attr_readonly(*attributes)
+ self._attr_readonly = Set.new(attributes.map { |a| a.to_s }) + (self._attr_readonly || [])
+ end
+
+ # Returns an array of all the attributes that have been specified as readonly.
+ def readonly_attributes
+ self._attr_readonly
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/reflection.rb b/activerecord/lib/active_record/reflection.rb
new file mode 100644
index 0000000000..1547c8e3f4
--- /dev/null
+++ b/activerecord/lib/active_record/reflection.rb
@@ -0,0 +1,867 @@
+require 'thread'
+
+module ActiveRecord
+ # = Active Record Reflection
+ module Reflection # :nodoc:
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :_reflections
+ class_attribute :aggregate_reflections
+ self._reflections = {}
+ self.aggregate_reflections = {}
+ end
+
+ def self.create(macro, name, scope, options, ar)
+ klass = case macro
+ when :composed_of
+ AggregateReflection
+ when :has_many
+ HasManyReflection
+ when :has_one
+ HasOneReflection
+ when :belongs_to
+ BelongsToReflection
+ else
+ raise "Unsupported Macro: #{macro}"
+ end
+
+ reflection = klass.new(name, scope, options, ar)
+ options[:through] ? ThroughReflection.new(reflection) : reflection
+ end
+
+ def self.add_reflection(ar, name, reflection)
+ ar._reflections = ar._reflections.merge(name.to_s => reflection)
+ end
+
+ def self.add_aggregate_reflection(ar, name, reflection)
+ ar.aggregate_reflections = ar.aggregate_reflections.merge(name.to_s => reflection)
+ end
+
+ # \Reflection enables interrogating Active Record classes and objects
+ # about their associations and aggregations. This information can,
+ # for example, be used in a form builder that takes an Active Record object
+ # and creates input fields for all of the attributes depending on their type
+ # and displays the associations to other objects.
+ #
+ # MacroReflection class has info for AggregateReflection and AssociationReflection
+ # classes.
+ module ClassMethods
+ # Returns an array of AggregateReflection objects for all the aggregations in the class.
+ def reflect_on_all_aggregations
+ aggregate_reflections.values
+ end
+
+ # Returns the AggregateReflection object for the named +aggregation+ (use the symbol).
+ #
+ # Account.reflect_on_aggregation(:balance) # => the balance AggregateReflection
+ #
+ def reflect_on_aggregation(aggregation)
+ aggregate_reflections[aggregation.to_s]
+ end
+
+ # Returns a Hash of name of the reflection as the key and a AssociationReflection as the value.
+ #
+ # Account.reflections # => {balance: AggregateReflection}
+ #
+ # @api public
+ def reflections
+ ref = {}
+ _reflections.each do |name, reflection|
+ parent_name, parent_reflection = reflection.parent_reflection
+ if parent_name
+ ref[parent_name] = parent_reflection
+ else
+ ref[name] = reflection
+ end
+ end
+ ref
+ end
+
+ # Returns an array of AssociationReflection objects for all the
+ # associations in the class. If you only want to reflect on a certain
+ # association type, pass in the symbol (<tt>:has_many</tt>, <tt>:has_one</tt>,
+ # <tt>:belongs_to</tt>) as the first parameter.
+ #
+ # Example:
+ #
+ # Account.reflect_on_all_associations # returns an array of all associations
+ # Account.reflect_on_all_associations(:has_many) # returns an array of all has_many associations
+ #
+ # @api public
+ def reflect_on_all_associations(macro = nil)
+ association_reflections = reflections.values
+ macro ? association_reflections.select { |reflection| reflection.macro == macro } : association_reflections
+ end
+
+ # Returns the AssociationReflection object for the +association+ (use the symbol).
+ #
+ # Account.reflect_on_association(:owner) # returns the owner AssociationReflection
+ # Invoice.reflect_on_association(:line_items).macro # returns :has_many
+ #
+ # @api public
+ def reflect_on_association(association)
+ reflections[association.to_s]
+ end
+
+ # @api private
+ def _reflect_on_association(association) #:nodoc:
+ _reflections[association.to_s]
+ end
+
+ # Returns an array of AssociationReflection objects for all associations which have <tt>:autosave</tt> enabled.
+ #
+ # @api public
+ def reflect_on_all_autosave_associations
+ reflections.values.select { |reflection| reflection.options[:autosave] }
+ end
+ end
+
+ # Holds all the methods that are shared between MacroReflection, AssociationReflection
+ # and ThroughReflection
+ class AbstractReflection # :nodoc:
+ def table_name
+ klass.table_name
+ end
+
+ # Returns a new, unsaved instance of the associated class. +attributes+ will
+ # be passed to the class's constructor.
+ def build_association(attributes, &block)
+ klass.new(attributes, &block)
+ end
+
+ def quoted_table_name
+ klass.quoted_table_name
+ end
+
+ def primary_key_type
+ klass.type_for_attribute(klass.primary_key)
+ end
+
+ # Returns the class name for the macro.
+ #
+ # <tt>composed_of :balance, class_name: 'Money'</tt> returns <tt>'Money'</tt>
+ # <tt>has_many :clients</tt> returns <tt>'Client'</tt>
+ def class_name
+ @class_name ||= (options[:class_name] || derive_class_name).to_s
+ end
+
+ JoinKeys = Struct.new(:key, :foreign_key) # :nodoc:
+
+ def join_keys(assoc_klass)
+ JoinKeys.new(foreign_key, active_record_primary_key)
+ end
+
+ def source_macro
+ ActiveSupport::Deprecation.warn("ActiveRecord::Base.source_macro is deprecated and " \
+ "will be removed without replacement.")
+ macro
+ end
+ end
+ # Base class for AggregateReflection and AssociationReflection. Objects of
+ # AggregateReflection and AssociationReflection are returned by the Reflection::ClassMethods.
+ #
+ # MacroReflection
+ # AssociationReflection
+ # AggregateReflection
+ # HasManyReflection
+ # HasOneReflection
+ # BelongsToReflection
+ # ThroughReflection
+ class MacroReflection < AbstractReflection
+ # Returns the name of the macro.
+ #
+ # <tt>composed_of :balance, class_name: 'Money'</tt> returns <tt>:balance</tt>
+ # <tt>has_many :clients</tt> returns <tt>:clients</tt>
+ attr_reader :name
+
+ attr_reader :scope
+
+ # Returns the hash of options used for the macro.
+ #
+ # <tt>composed_of :balance, class_name: 'Money'</tt> returns <tt>{ class_name: "Money" }</tt>
+ # <tt>has_many :clients</tt> returns <tt>{}</tt>
+ attr_reader :options
+
+ attr_reader :active_record
+
+ attr_reader :plural_name # :nodoc:
+
+ def initialize(name, scope, options, active_record)
+ @name = name
+ @scope = scope
+ @options = options
+ @active_record = active_record
+ @klass = options[:class]
+ @plural_name = active_record.pluralize_table_names ?
+ name.to_s.pluralize : name.to_s
+ end
+
+ def autosave=(autosave)
+ @automatic_inverse_of = false
+ @options[:autosave] = autosave
+ _, parent_reflection = self.parent_reflection
+ if parent_reflection
+ parent_reflection.autosave = autosave
+ end
+ end
+
+ # Returns the class for the macro.
+ #
+ # <tt>composed_of :balance, class_name: 'Money'</tt> returns the Money class
+ # <tt>has_many :clients</tt> returns the Client class
+ def klass
+ @klass ||= compute_class(class_name)
+ end
+
+ def compute_class(name)
+ name.constantize
+ end
+
+ # Returns +true+ if +self+ and +other_aggregation+ have the same +name+ attribute, +active_record+ attribute,
+ # and +other_aggregation+ has an options hash assigned to it.
+ def ==(other_aggregation)
+ super ||
+ other_aggregation.kind_of?(self.class) &&
+ name == other_aggregation.name &&
+ !other_aggregation.options.nil? &&
+ active_record == other_aggregation.active_record
+ end
+
+ private
+ def derive_class_name
+ name.to_s.camelize
+ end
+ end
+
+
+ # Holds all the meta-data about an aggregation as it was specified in the
+ # Active Record class.
+ class AggregateReflection < MacroReflection #:nodoc:
+ def mapping
+ mapping = options[:mapping] || [name, name]
+ mapping.first.is_a?(Array) ? mapping : [mapping]
+ end
+ end
+
+ # Holds all the meta-data about an association as it was specified in the
+ # Active Record class.
+ class AssociationReflection < MacroReflection #:nodoc:
+ # Returns the target association's class.
+ #
+ # class Author < ActiveRecord::Base
+ # has_many :books
+ # end
+ #
+ # Author.reflect_on_association(:books).klass
+ # # => Book
+ #
+ # <b>Note:</b> Do not call +klass.new+ or +klass.create+ to instantiate
+ # a new association object. Use +build_association+ or +create_association+
+ # instead. This allows plugins to hook into association object creation.
+ def klass
+ @klass ||= compute_class(class_name)
+ end
+
+ def compute_class(name)
+ active_record.send(:compute_type, name)
+ end
+
+ attr_reader :type, :foreign_type
+ attr_accessor :parent_reflection # [:name, Reflection]
+
+ def initialize(name, scope, options, active_record)
+ super
+ @automatic_inverse_of = nil
+ @type = options[:as] && "#{options[:as]}_type"
+ @foreign_type = options[:foreign_type] || "#{name}_type"
+ @constructable = calculate_constructable(macro, options)
+ @association_scope_cache = {}
+ @scope_lock = Mutex.new
+ end
+
+ def association_scope_cache(conn, owner)
+ key = conn.prepared_statements
+ if polymorphic?
+ key = [key, owner.read_attribute(@foreign_type)]
+ end
+ @association_scope_cache[key] ||= @scope_lock.synchronize {
+ @association_scope_cache[key] ||= yield
+ }
+ end
+
+ def constructable? # :nodoc:
+ @constructable
+ end
+
+ def join_table
+ @join_table ||= options[:join_table] || derive_join_table
+ end
+
+ def foreign_key
+ @foreign_key ||= options[:foreign_key] || derive_foreign_key
+ end
+
+ def association_foreign_key
+ @association_foreign_key ||= options[:association_foreign_key] || class_name.foreign_key
+ end
+
+ # klass option is necessary to support loading polymorphic associations
+ def association_primary_key(klass = nil)
+ options[:primary_key] || primary_key(klass || self.klass)
+ end
+
+ def active_record_primary_key
+ @active_record_primary_key ||= options[:primary_key] || primary_key(active_record)
+ end
+
+ def counter_cache_column
+ if options[:counter_cache] == true
+ "#{active_record.name.demodulize.underscore.pluralize}_count"
+ elsif options[:counter_cache]
+ options[:counter_cache].to_s
+ end
+ end
+
+ def check_validity!
+ check_validity_of_inverse!
+ end
+
+ def check_validity_of_inverse!
+ unless polymorphic?
+ if has_inverse? && inverse_of.nil?
+ raise InverseOfAssociationNotFoundError.new(self)
+ end
+ end
+ end
+
+ def check_preloadable!
+ return unless scope
+
+ if scope.arity > 0
+ ActiveSupport::Deprecation.warn <<-WARNING
+The association scope '#{name}' is instance dependent (the scope block takes an argument).
+Preloading happens before the individual instances are created. This means that there is no instance
+being passed to the association scope. This will most likely result in broken or incorrect behavior.
+Joining, Preloading and eager loading of these associations is deprecated and will be removed in the future.
+ WARNING
+ end
+ end
+ alias :check_eager_loadable! :check_preloadable!
+
+ def join_id_for(owner) # :nodoc:
+ owner[active_record_primary_key]
+ end
+
+ def through_reflection
+ nil
+ end
+
+ def source_reflection
+ self
+ end
+
+ # A chain of reflections from this one back to the owner. For more see the explanation in
+ # ThroughReflection.
+ def chain
+ [self]
+ end
+
+ def nested?
+ false
+ end
+
+ # An array of arrays of scopes. Each item in the outside array corresponds to a reflection
+ # in the #chain.
+ def scope_chain
+ scope ? [[scope]] : [[]]
+ end
+
+ def has_inverse?
+ inverse_name
+ end
+
+ def inverse_of
+ return unless inverse_name
+
+ @inverse_of ||= klass._reflect_on_association inverse_name
+ end
+
+ def polymorphic_inverse_of(associated_class)
+ if has_inverse?
+ if inverse_relationship = associated_class._reflect_on_association(options[:inverse_of])
+ inverse_relationship
+ else
+ raise InverseOfAssociationNotFoundError.new(self, associated_class)
+ end
+ end
+ end
+
+ # Returns the macro type.
+ #
+ # <tt>has_many :clients</tt> returns <tt>:has_many</tt>
+ def macro; raise NotImplementedError; end
+
+ # Returns whether or not this association reflection is for a collection
+ # association. Returns +true+ if the +macro+ is either +has_many+ or
+ # +has_and_belongs_to_many+, +false+ otherwise.
+ def collection?
+ false
+ end
+
+ # Returns whether or not the association should be validated as part of
+ # the parent's validation.
+ #
+ # Unless you explicitly disable validation with
+ # <tt>validate: false</tt>, validation will take place when:
+ #
+ # * you explicitly enable validation; <tt>validate: true</tt>
+ # * you use autosave; <tt>autosave: true</tt>
+ # * the association is a +has_many+ association
+ def validate?
+ !options[:validate].nil? ? options[:validate] : (options[:autosave] == true || collection?)
+ end
+
+ # Returns +true+ if +self+ is a +belongs_to+ reflection.
+ def belongs_to?; false; end
+
+ # Returns +true+ if +self+ is a +has_one+ reflection.
+ def has_one?; false; end
+
+ def association_class
+ case macro
+ when :belongs_to
+ if polymorphic?
+ Associations::BelongsToPolymorphicAssociation
+ else
+ Associations::BelongsToAssociation
+ end
+ when :has_many
+ if options[:through]
+ Associations::HasManyThroughAssociation
+ else
+ Associations::HasManyAssociation
+ end
+ when :has_one
+ if options[:through]
+ Associations::HasOneThroughAssociation
+ else
+ Associations::HasOneAssociation
+ end
+ end
+ end
+
+ def polymorphic?
+ options[:polymorphic]
+ end
+
+ VALID_AUTOMATIC_INVERSE_MACROS = [:has_many, :has_one, :belongs_to]
+ INVALID_AUTOMATIC_INVERSE_OPTIONS = [:conditions, :through, :polymorphic, :foreign_key]
+
+ protected
+
+ def actual_source_reflection # FIXME: this is a horrible name
+ self
+ end
+
+ private
+
+ def calculate_constructable(macro, options)
+ case macro
+ when :belongs_to
+ !polymorphic?
+ when :has_one
+ !options[:through]
+ else
+ true
+ end
+ end
+
+ # Attempts to find the inverse association name automatically.
+ # If it cannot find a suitable inverse association name, it returns
+ # nil.
+ def inverse_name
+ options.fetch(:inverse_of) do
+ if @automatic_inverse_of == false
+ nil
+ else
+ @automatic_inverse_of ||= automatic_inverse_of
+ end
+ end
+ end
+
+ # returns either nil or the inverse association name that it finds.
+ def automatic_inverse_of
+ if can_find_inverse_of_automatically?(self)
+ inverse_name = ActiveSupport::Inflector.underscore(options[:as] || active_record.name).to_sym
+
+ begin
+ reflection = klass._reflect_on_association(inverse_name)
+ rescue NameError
+ # Give up: we couldn't compute the klass type so we won't be able
+ # to find any associations either.
+ reflection = false
+ end
+
+ if valid_inverse_reflection?(reflection)
+ return inverse_name
+ end
+ end
+
+ false
+ end
+
+ # Checks if the inverse reflection that is returned from the
+ # +automatic_inverse_of+ method is a valid reflection. We must
+ # make sure that the reflection's active_record name matches up
+ # with the current reflection's klass name.
+ #
+ # Note: klass will always be valid because when there's a NameError
+ # from calling +klass+, +reflection+ will already be set to false.
+ def valid_inverse_reflection?(reflection)
+ reflection &&
+ klass.name == reflection.active_record.name &&
+ can_find_inverse_of_automatically?(reflection)
+ end
+
+ # Checks to see if the reflection doesn't have any options that prevent
+ # us from being able to guess the inverse automatically. First, the
+ # <tt>inverse_of</tt> option cannot be set to false. Second, we must
+ # have <tt>has_many</tt>, <tt>has_one</tt>, <tt>belongs_to</tt> associations.
+ # Third, we must not have options such as <tt>:polymorphic</tt> or
+ # <tt>:foreign_key</tt> which prevent us from correctly guessing the
+ # inverse association.
+ #
+ # Anything with a scope can additionally ruin our attempt at finding an
+ # inverse, so we exclude reflections with scopes.
+ def can_find_inverse_of_automatically?(reflection)
+ reflection.options[:inverse_of] != false &&
+ VALID_AUTOMATIC_INVERSE_MACROS.include?(reflection.macro) &&
+ !INVALID_AUTOMATIC_INVERSE_OPTIONS.any? { |opt| reflection.options[opt] } &&
+ !reflection.scope
+ end
+
+ def derive_class_name
+ class_name = name.to_s
+ class_name = class_name.singularize if collection?
+ class_name.camelize
+ end
+
+ def derive_foreign_key
+ if belongs_to?
+ "#{name}_id"
+ elsif options[:as]
+ "#{options[:as]}_id"
+ else
+ active_record.name.foreign_key
+ end
+ end
+
+ def derive_join_table
+ ModelSchema.derive_join_table_name active_record.table_name, klass.table_name
+ end
+
+ def primary_key(klass)
+ klass.primary_key || raise(UnknownPrimaryKey.new(klass))
+ end
+ end
+
+ class HasManyReflection < AssociationReflection # :nodoc:
+ def initialize(name, scope, options, active_record)
+ super(name, scope, options, active_record)
+ end
+
+ def macro; :has_many; end
+
+ def collection?; true; end
+ end
+
+ class HasOneReflection < AssociationReflection # :nodoc:
+ def initialize(name, scope, options, active_record)
+ super(name, scope, options, active_record)
+ end
+
+ def macro; :has_one; end
+
+ def has_one?; true; end
+ end
+
+ class BelongsToReflection < AssociationReflection # :nodoc:
+ def initialize(name, scope, options, active_record)
+ super(name, scope, options, active_record)
+ end
+
+ def macro; :belongs_to; end
+
+ def belongs_to?; true; end
+
+ def join_keys(assoc_klass)
+ key = polymorphic? ? association_primary_key(assoc_klass) : association_primary_key
+ JoinKeys.new(key, foreign_key)
+ end
+
+ def join_id_for(owner) # :nodoc:
+ owner[foreign_key]
+ end
+ end
+
+ class HasAndBelongsToManyReflection < AssociationReflection # :nodoc:
+ def initialize(name, scope, options, active_record)
+ super
+ end
+
+ def macro; :has_and_belongs_to_many; end
+
+ def collection?
+ true
+ end
+ end
+
+ # Holds all the meta-data about a :through association as it was specified
+ # in the Active Record class.
+ class ThroughReflection < AbstractReflection #:nodoc:
+ attr_reader :delegate_reflection
+ delegate :foreign_key, :foreign_type, :association_foreign_key,
+ :active_record_primary_key, :type, :to => :source_reflection
+
+ def initialize(delegate_reflection)
+ @delegate_reflection = delegate_reflection
+ @klass = delegate_reflection.options[:class]
+ @source_reflection_name = delegate_reflection.options[:source]
+ end
+
+ def klass
+ @klass ||= delegate_reflection.compute_class(class_name)
+ end
+
+ # Returns the source of the through reflection. It checks both a singularized
+ # and pluralized form for <tt>:belongs_to</tt> or <tt>:has_many</tt>.
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :taggings
+ # has_many :tags, through: :taggings
+ # end
+ #
+ # class Tagging < ActiveRecord::Base
+ # belongs_to :post
+ # belongs_to :tag
+ # end
+ #
+ # tags_reflection = Post.reflect_on_association(:tags)
+ # tags_reflection.source_reflection
+ # # => <ActiveRecord::Reflection::BelongsToReflection: @name=:tag, @active_record=Tagging, @plural_name="tags">
+ #
+ def source_reflection
+ through_reflection.klass._reflect_on_association(source_reflection_name)
+ end
+
+ # Returns the AssociationReflection object specified in the <tt>:through</tt> option
+ # of a HasManyThrough or HasOneThrough association.
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :taggings
+ # has_many :tags, through: :taggings
+ # end
+ #
+ # tags_reflection = Post.reflect_on_association(:tags)
+ # tags_reflection.through_reflection
+ # # => <ActiveRecord::Reflection::HasManyReflection: @name=:taggings, @active_record=Post, @plural_name="taggings">
+ #
+ def through_reflection
+ active_record._reflect_on_association(options[:through])
+ end
+
+ # Returns an array of reflections which are involved in this association. Each item in the
+ # array corresponds to a table which will be part of the query for this association.
+ #
+ # The chain is built by recursively calling #chain on the source reflection and the through
+ # reflection. The base case for the recursion is a normal association, which just returns
+ # [self] as its #chain.
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :taggings
+ # has_many :tags, through: :taggings
+ # end
+ #
+ # tags_reflection = Post.reflect_on_association(:tags)
+ # tags_reflection.chain
+ # # => [<ActiveRecord::Reflection::ThroughReflection: @delegate_reflection=#<ActiveRecord::Reflection::HasManyReflection: @name=:tags...>,
+ # <ActiveRecord::Reflection::HasManyReflection: @name=:taggings, @options={}, @active_record=Post>]
+ #
+ def chain
+ @chain ||= begin
+ a = source_reflection.chain
+ b = through_reflection.chain
+ chain = a + b
+ chain[0] = self # Use self so we don't lose the information from :source_type
+ chain
+ end
+ end
+
+ # Consider the following example:
+ #
+ # class Person
+ # has_many :articles
+ # has_many :comment_tags, through: :articles
+ # end
+ #
+ # class Article
+ # has_many :comments
+ # has_many :comment_tags, through: :comments, source: :tags
+ # end
+ #
+ # class Comment
+ # has_many :tags
+ # end
+ #
+ # There may be scopes on Person.comment_tags, Article.comment_tags and/or Comment.tags,
+ # but only Comment.tags will be represented in the #chain. So this method creates an array
+ # of scopes corresponding to the chain.
+ def scope_chain
+ @scope_chain ||= begin
+ scope_chain = source_reflection.scope_chain.map(&:dup)
+
+ # Add to it the scope from this reflection (if any)
+ scope_chain.first << scope if scope
+
+ through_scope_chain = through_reflection.scope_chain.map(&:dup)
+
+ if options[:source_type]
+ through_scope_chain.first <<
+ through_reflection.klass.where(foreign_type => options[:source_type])
+ end
+
+ # Recursively fill out the rest of the array from the through reflection
+ scope_chain + through_scope_chain
+ end
+ end
+
+ def join_keys(assoc_klass)
+ source_reflection.join_keys(assoc_klass)
+ end
+
+ # The macro used by the source association
+ def source_macro
+ ActiveSupport::Deprecation.warn("ActiveRecord::Base.source_macro is deprecated and " \
+ "will be removed without replacement.")
+ source_reflection.source_macro
+ end
+
+ # A through association is nested if there would be more than one join table
+ def nested?
+ chain.length > 2
+ end
+
+ # We want to use the klass from this reflection, rather than just delegate straight to
+ # the source_reflection, because the source_reflection may be polymorphic. We still
+ # need to respect the source_reflection's :primary_key option, though.
+ def association_primary_key(klass = nil)
+ # Get the "actual" source reflection if the immediate source reflection has a
+ # source reflection itself
+ actual_source_reflection.options[:primary_key] || primary_key(klass || self.klass)
+ end
+
+ # Gets an array of possible <tt>:through</tt> source reflection names in both singular and plural form.
+ #
+ # class Post < ActiveRecord::Base
+ # has_many :taggings
+ # has_many :tags, through: :taggings
+ # end
+ #
+ # tags_reflection = Post.reflect_on_association(:tags)
+ # tags_reflection.source_reflection_names
+ # # => [:tag, :tags]
+ #
+ def source_reflection_names
+ options[:source] ? [options[:source]] : [name.to_s.singularize, name].uniq
+ end
+
+ def source_reflection_name # :nodoc:
+ return @source_reflection_name if @source_reflection_name
+
+ names = [name.to_s.singularize, name].collect { |n| n.to_sym }.uniq
+ names = names.find_all { |n|
+ through_reflection.klass._reflect_on_association(n)
+ }
+
+ if names.length > 1
+ example_options = options.dup
+ example_options[:source] = source_reflection_names.first
+ ActiveSupport::Deprecation.warn <<-eowarn
+Ambiguous source reflection for through association. Please specify a :source
+directive on your declaration like:
+
+ class #{active_record.name} < ActiveRecord::Base
+ #{macro} :#{name}, #{example_options}
+ end
+
+ eowarn
+ end
+
+ @source_reflection_name = names.first
+ end
+
+ def source_options
+ source_reflection.options
+ end
+
+ def through_options
+ through_reflection.options
+ end
+
+ def join_id_for(owner) # :nodoc:
+ source_reflection.join_id_for(owner)
+ end
+
+ def check_validity!
+ if through_reflection.nil?
+ raise HasManyThroughAssociationNotFoundError.new(active_record.name, self)
+ end
+
+ if through_reflection.polymorphic?
+ raise HasManyThroughAssociationPolymorphicThroughError.new(active_record.name, self)
+ end
+
+ if source_reflection.nil?
+ raise HasManyThroughSourceAssociationNotFoundError.new(self)
+ end
+
+ if options[:source_type] && !source_reflection.polymorphic?
+ raise HasManyThroughAssociationPointlessSourceTypeError.new(active_record.name, self, source_reflection)
+ end
+
+ if source_reflection.polymorphic? && options[:source_type].nil?
+ raise HasManyThroughAssociationPolymorphicSourceError.new(active_record.name, self, source_reflection)
+ end
+
+ if has_one? && through_reflection.collection?
+ raise HasOneThroughCantAssociateThroughCollection.new(active_record.name, self, through_reflection)
+ end
+
+ check_validity_of_inverse!
+ end
+
+ protected
+
+ def actual_source_reflection # FIXME: this is a horrible name
+ source_reflection.send(:actual_source_reflection)
+ end
+
+ def primary_key(klass)
+ klass.primary_key || raise(UnknownPrimaryKey.new(klass))
+ end
+
+ private
+ def derive_class_name
+ # get the class_name of the belongs_to association of the through reflection
+ options[:source_type] || source_reflection.class_name
+ end
+
+ delegate_methods = AssociationReflection.public_instance_methods -
+ public_instance_methods
+
+ delegate(*delegate_methods, to: :delegate_reflection)
+
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation.rb b/activerecord/lib/active_record/relation.rb
new file mode 100644
index 0000000000..ad54d84665
--- /dev/null
+++ b/activerecord/lib/active_record/relation.rb
@@ -0,0 +1,674 @@
+# -*- coding: utf-8 -*-
+require 'arel/collectors/bind'
+
+module ActiveRecord
+ # = Active Record Relation
+ class Relation
+ JoinOperation = Struct.new(:relation, :join_class, :on)
+
+ MULTI_VALUE_METHODS = [:includes, :eager_load, :preload, :select, :group,
+ :order, :joins, :where, :having, :bind, :references,
+ :extending, :unscope]
+
+ SINGLE_VALUE_METHODS = [:limit, :offset, :lock, :readonly, :from, :reordering,
+ :reverse_order, :distinct, :create_with, :uniq]
+ INVALID_METHODS_FOR_DELETE_ALL = [:limit, :distinct, :offset, :group, :having]
+
+ VALUE_METHODS = MULTI_VALUE_METHODS + SINGLE_VALUE_METHODS
+
+ include FinderMethods, Calculations, SpawnMethods, QueryMethods, Batches, Explain, Delegation
+
+ attr_reader :table, :klass, :loaded
+ alias :model :klass
+ alias :loaded? :loaded
+
+ def initialize(klass, table, values = {})
+ @klass = klass
+ @table = table
+ @values = values
+ @offsets = {}
+ @loaded = false
+ end
+
+ def initialize_copy(other)
+ # This method is a hot spot, so for now, use Hash[] to dup the hash.
+ # https://bugs.ruby-lang.org/issues/7166
+ @values = Hash[@values]
+ @values[:bind] = @values[:bind].dup if @values.key? :bind
+ reset
+ end
+
+ def insert(values) # :nodoc:
+ primary_key_value = nil
+
+ if primary_key && Hash === values
+ primary_key_value = values[values.keys.find { |k|
+ k.name == primary_key
+ }]
+
+ if !primary_key_value && connection.prefetch_primary_key?(klass.table_name)
+ primary_key_value = connection.next_sequence_value(klass.sequence_name)
+ values[klass.arel_table[klass.primary_key]] = primary_key_value
+ end
+ end
+
+ im = arel.create_insert
+ im.into @table
+
+ substitutes, binds = substitute_values values
+
+ if values.empty? # empty insert
+ im.values = Arel.sql(connection.empty_insert_statement_value)
+ else
+ im.insert substitutes
+ end
+
+ @klass.connection.insert(
+ im,
+ 'SQL',
+ primary_key,
+ primary_key_value,
+ nil,
+ binds)
+ end
+
+ def _update_record(values, id, id_was) # :nodoc:
+ substitutes, binds = substitute_values values
+
+ scope = @klass.unscoped
+
+ if @klass.finder_needs_type_condition?
+ scope.unscope!(where: @klass.inheritance_column)
+ end
+
+ um = scope.where(@klass.arel_table[@klass.primary_key].eq(id_was || id)).arel.compile_update(substitutes, @klass.primary_key)
+
+ @klass.connection.update(
+ um,
+ 'SQL',
+ binds)
+ end
+
+ def substitute_values(values) # :nodoc:
+ substitutes = values.sort_by { |arel_attr,_| arel_attr.name }
+ binds = substitutes.map do |arel_attr, value|
+ [@klass.columns_hash[arel_attr.name], value]
+ end
+
+ substitutes.each_with_index do |tuple, i|
+ tuple[1] = @klass.connection.substitute_at(binds[i][0], i)
+ end
+
+ [substitutes, binds]
+ end
+
+ # Initializes new record from relation while maintaining the current
+ # scope.
+ #
+ # Expects arguments in the same format as +Base.new+.
+ #
+ # users = User.where(name: 'DHH')
+ # user = users.new # => #<User id: nil, name: "DHH", created_at: nil, updated_at: nil>
+ #
+ # You can also pass a block to new with the new record as argument:
+ #
+ # user = users.new { |user| user.name = 'Oscar' }
+ # user.name # => Oscar
+ def new(*args, &block)
+ scoping { @klass.new(*args, &block) }
+ end
+
+ alias build new
+
+ # Tries to create a new record with the same scoped attributes
+ # defined in the relation. Returns the initialized object if validation fails.
+ #
+ # Expects arguments in the same format as +Base.create+.
+ #
+ # ==== Examples
+ # users = User.where(name: 'Oscar')
+ # users.create # #<User id: 3, name: "oscar", ...>
+ #
+ # users.create(name: 'fxn')
+ # users.create # #<User id: 4, name: "fxn", ...>
+ #
+ # users.create { |user| user.name = 'tenderlove' }
+ # # #<User id: 5, name: "tenderlove", ...>
+ #
+ # users.create(name: nil) # validation on name
+ # # #<User id: nil, name: nil, ...>
+ def create(*args, &block)
+ scoping { @klass.create(*args, &block) }
+ end
+
+ # Similar to #create, but calls +create!+ on the base class. Raises
+ # an exception if a validation error occurs.
+ #
+ # Expects arguments in the same format as <tt>Base.create!</tt>.
+ def create!(*args, &block)
+ scoping { @klass.create!(*args, &block) }
+ end
+
+ def first_or_create(attributes = nil, &block) # :nodoc:
+ first || create(attributes, &block)
+ end
+
+ def first_or_create!(attributes = nil, &block) # :nodoc:
+ first || create!(attributes, &block)
+ end
+
+ def first_or_initialize(attributes = nil, &block) # :nodoc:
+ first || new(attributes, &block)
+ end
+
+ # Finds the first record with the given attributes, or creates a record
+ # with the attributes if one is not found:
+ #
+ # # Find the first user named "Penélope" or create a new one.
+ # User.find_or_create_by(first_name: 'Penélope')
+ # # => #<User id: 1, first_name: "Penélope", last_name: nil>
+ #
+ # # Find the first user named "Penélope" or create a new one.
+ # # We already have one so the existing record will be returned.
+ # User.find_or_create_by(first_name: 'Penélope')
+ # # => #<User id: 1, first_name: "Penélope", last_name: nil>
+ #
+ # # Find the first user named "Scarlett" or create a new one with
+ # # a particular last name.
+ # User.create_with(last_name: 'Johansson').find_or_create_by(first_name: 'Scarlett')
+ # # => #<User id: 2, first_name: "Scarlett", last_name: "Johansson">
+ #
+ # This method accepts a block, which is passed down to +create+. The last example
+ # above can be alternatively written this way:
+ #
+ # # Find the first user named "Scarlett" or create a new one with a
+ # # different last name.
+ # User.find_or_create_by(first_name: 'Scarlett') do |user|
+ # user.last_name = 'Johansson'
+ # end
+ # # => #<User id: 2, first_name: "Scarlett", last_name: "Johansson">
+ #
+ # This method always returns a record, but if creation was attempted and
+ # failed due to validation errors it won't be persisted, you get what
+ # +create+ returns in such situation.
+ #
+ # Please note *this method is not atomic*, it runs first a SELECT, and if
+ # there are no results an INSERT is attempted. If there are other threads
+ # or processes there is a race condition between both calls and it could
+ # be the case that you end up with two similar records.
+ #
+ # Whether that is a problem or not depends on the logic of the
+ # application, but in the particular case in which rows have a UNIQUE
+ # constraint an exception may be raised, just retry:
+ #
+ # begin
+ # CreditAccount.find_or_create_by(user_id: user.id)
+ # rescue ActiveRecord::RecordNotUnique
+ # retry
+ # end
+ #
+ def find_or_create_by(attributes, &block)
+ find_by(attributes) || create(attributes, &block)
+ end
+
+ # Like <tt>find_or_create_by</tt>, but calls <tt>create!</tt> so an exception
+ # is raised if the created record is invalid.
+ def find_or_create_by!(attributes, &block)
+ find_by(attributes) || create!(attributes, &block)
+ end
+
+ # Like <tt>find_or_create_by</tt>, but calls <tt>new</tt> instead of <tt>create</tt>.
+ def find_or_initialize_by(attributes, &block)
+ find_by(attributes) || new(attributes, &block)
+ end
+
+ # Runs EXPLAIN on the query or queries triggered by this relation and
+ # returns the result as a string. The string is formatted imitating the
+ # ones printed by the database shell.
+ #
+ # Note that this method actually runs the queries, since the results of some
+ # are needed by the next ones when eager loading is going on.
+ #
+ # Please see further details in the
+ # {Active Record Query Interface guide}[http://guides.rubyonrails.org/active_record_querying.html#running-explain].
+ def explain
+ #TODO: Fix for binds.
+ exec_explain(collecting_queries_for_explain { exec_queries })
+ end
+
+ # Converts relation objects to Array.
+ def to_a
+ load
+ @records
+ end
+
+ # Serializes the relation objects Array.
+ def encode_with(coder)
+ coder.represent_seq(nil, to_a)
+ end
+
+ def as_json(options = nil) #:nodoc:
+ to_a.as_json(options)
+ end
+
+ # Returns size of the records.
+ def size
+ loaded? ? @records.length : count(:all)
+ end
+
+ # Returns true if there are no records.
+ def empty?
+ return @records.empty? if loaded?
+
+ if limit_value == 0
+ true
+ else
+ c = count(:all)
+ c.respond_to?(:zero?) ? c.zero? : c.empty?
+ end
+ end
+
+ # Returns true if there are any records.
+ def any?
+ if block_given?
+ to_a.any? { |*block_args| yield(*block_args) }
+ else
+ !empty?
+ end
+ end
+
+ # Returns true if there is more than one record.
+ def many?
+ if block_given?
+ to_a.many? { |*block_args| yield(*block_args) }
+ else
+ limit_value ? to_a.many? : size > 1
+ end
+ end
+
+ # Scope all queries to the current scope.
+ #
+ # Comment.where(post_id: 1).scoping do
+ # Comment.first
+ # end
+ # # => SELECT "comments".* FROM "comments" WHERE "comments"."post_id" = 1 ORDER BY "comments"."id" ASC LIMIT 1
+ #
+ # Please check unscoped if you want to remove all previous scopes (including
+ # the default_scope) during the execution of a block.
+ def scoping
+ previous, klass.current_scope = klass.current_scope, self
+ yield
+ ensure
+ klass.current_scope = previous
+ end
+
+ # Updates all records with details given if they match a set of conditions supplied, limits and order can
+ # also be supplied. This method constructs a single SQL UPDATE statement and sends it straight to the
+ # database. It does not instantiate the involved models and it does not trigger Active Record callbacks
+ # or validations.
+ #
+ # ==== Parameters
+ #
+ # * +updates+ - A string, array, or hash representing the SET part of an SQL statement.
+ #
+ # ==== Examples
+ #
+ # # Update all customers with the given attributes
+ # Customer.update_all wants_email: true
+ #
+ # # Update all books with 'Rails' in their title
+ # Book.where('title LIKE ?', '%Rails%').update_all(author: 'David')
+ #
+ # # Update all books that match conditions, but limit it to 5 ordered by date
+ # Book.where('title LIKE ?', '%Rails%').order(:created_at).limit(5).update_all(author: 'David')
+ def update_all(updates)
+ raise ArgumentError, "Empty list of attributes to change" if updates.blank?
+
+ stmt = Arel::UpdateManager.new(arel.engine)
+
+ stmt.set Arel.sql(@klass.send(:sanitize_sql_for_assignment, updates))
+ stmt.table(table)
+ stmt.key = table[primary_key]
+
+ if joins_values.any?
+ @klass.connection.join_to_update(stmt, arel)
+ else
+ stmt.take(arel.limit)
+ stmt.order(*arel.orders)
+ stmt.wheres = arel.constraints
+ end
+
+ bvs = bind_values + arel.bind_values
+ @klass.connection.update stmt, 'SQL', bvs
+ end
+
+ # Updates an object (or multiple objects) and saves it to the database, if validations pass.
+ # The resulting object is returned whether the object was saved successfully to the database or not.
+ #
+ # ==== Parameters
+ #
+ # * +id+ - This should be the id or an array of ids to be updated.
+ # * +attributes+ - This should be a hash of attributes or an array of hashes.
+ #
+ # ==== Examples
+ #
+ # # Updates one record
+ # Person.update(15, user_name: 'Samuel', group: 'expert')
+ #
+ # # Updates multiple records
+ # people = { 1 => { "first_name" => "David" }, 2 => { "first_name" => "Jeremy" } }
+ # Person.update(people.keys, people.values)
+ def update(id, attributes)
+ if id.is_a?(Array)
+ id.map.with_index { |one_id, idx| update(one_id, attributes[idx]) }
+ else
+ object = find(id)
+ object.update(attributes)
+ object
+ end
+ end
+
+ # Destroys the records matching +conditions+ by instantiating each
+ # record and calling its +destroy+ method. Each object's callbacks are
+ # executed (including <tt>:dependent</tt> association options). Returns the
+ # collection of objects that were destroyed; each will be frozen, to
+ # reflect that no changes should be made (since they can't be persisted).
+ #
+ # Note: Instantiation, callback execution, and deletion of each
+ # record can be time consuming when you're removing many records at
+ # once. It generates at least one SQL +DELETE+ query per record (or
+ # possibly more, to enforce your callbacks). If you want to delete many
+ # rows quickly, without concern for their associations or callbacks, use
+ # +delete_all+ instead.
+ #
+ # ==== Parameters
+ #
+ # * +conditions+ - A string, array, or hash that specifies which records
+ # to destroy. If omitted, all records are destroyed. See the
+ # Conditions section in the introduction to ActiveRecord::Base for
+ # more information.
+ #
+ # ==== Examples
+ #
+ # Person.destroy_all("last_login < '2004-04-04'")
+ # Person.destroy_all(status: "inactive")
+ # Person.where(age: 0..18).destroy_all
+ def destroy_all(conditions = nil)
+ if conditions
+ where(conditions).destroy_all
+ else
+ to_a.each {|object| object.destroy }.tap { reset }
+ end
+ end
+
+ # Destroy an object (or multiple objects) that has the given id. The object is instantiated first,
+ # therefore all callbacks and filters are fired off before the object is deleted. This method is
+ # less efficient than ActiveRecord#delete but allows cleanup methods and other actions to be run.
+ #
+ # This essentially finds the object (or multiple objects) with the given id, creates a new object
+ # from the attributes, and then calls destroy on it.
+ #
+ # ==== Parameters
+ #
+ # * +id+ - Can be either an Integer or an Array of Integers.
+ #
+ # ==== Examples
+ #
+ # # Destroy a single object
+ # Todo.destroy(1)
+ #
+ # # Destroy multiple objects
+ # todos = [1,2,3]
+ # Todo.destroy(todos)
+ def destroy(id)
+ if id.is_a?(Array)
+ id.map { |one_id| destroy(one_id) }
+ else
+ find(id).destroy
+ end
+ end
+
+ # Deletes the records matching +conditions+ without instantiating the records
+ # first, and hence not calling the +destroy+ method nor invoking callbacks. This
+ # is a single SQL DELETE statement that goes straight to the database, much more
+ # efficient than +destroy_all+. Be careful with relations though, in particular
+ # <tt>:dependent</tt> rules defined on associations are not honored. Returns the
+ # number of rows affected.
+ #
+ # Post.delete_all("person_id = 5 AND (category = 'Something' OR category = 'Else')")
+ # Post.delete_all(["person_id = ? AND (category = ? OR category = ?)", 5, 'Something', 'Else'])
+ # Post.where(person_id: 5).where(category: ['Something', 'Else']).delete_all
+ #
+ # Both calls delete the affected posts all at once with a single DELETE statement.
+ # If you need to destroy dependent associations or call your <tt>before_*</tt> or
+ # +after_destroy+ callbacks, use the +destroy_all+ method instead.
+ #
+ # If an invalid method is supplied, +delete_all+ raises an ActiveRecord error:
+ #
+ # Post.limit(100).delete_all
+ # # => ActiveRecord::ActiveRecordError: delete_all doesn't support limit
+ def delete_all(conditions = nil)
+ invalid_methods = INVALID_METHODS_FOR_DELETE_ALL.select { |method|
+ if MULTI_VALUE_METHODS.include?(method)
+ send("#{method}_values").any?
+ else
+ send("#{method}_value")
+ end
+ }
+ if invalid_methods.any?
+ raise ActiveRecordError.new("delete_all doesn't support #{invalid_methods.join(', ')}")
+ end
+
+ if conditions
+ where(conditions).delete_all
+ else
+ stmt = Arel::DeleteManager.new(arel.engine)
+ stmt.from(table)
+
+ if joins_values.any?
+ @klass.connection.join_to_delete(stmt, arel, table[primary_key])
+ else
+ stmt.wheres = arel.constraints
+ end
+
+ affected = @klass.connection.delete(stmt, 'SQL', bind_values)
+
+ reset
+ affected
+ end
+ end
+
+ # Deletes the row with a primary key matching the +id+ argument, using a
+ # SQL +DELETE+ statement, and returns the number of rows deleted. Active
+ # Record objects are not instantiated, so the object's callbacks are not
+ # executed, including any <tt>:dependent</tt> association options.
+ #
+ # You can delete multiple rows at once by passing an Array of <tt>id</tt>s.
+ #
+ # Note: Although it is often much faster than the alternative,
+ # <tt>#destroy</tt>, skipping callbacks might bypass business logic in
+ # your application that ensures referential integrity or performs other
+ # essential jobs.
+ #
+ # ==== Examples
+ #
+ # # Delete a single row
+ # Todo.delete(1)
+ #
+ # # Delete multiple rows
+ # Todo.delete([2,3,4])
+ def delete(id_or_array)
+ where(primary_key => id_or_array).delete_all
+ end
+
+ # Causes the records to be loaded from the database if they have not
+ # been loaded already. You can use this if for some reason you need
+ # to explicitly load some records before actually using them. The
+ # return value is the relation itself, not the records.
+ #
+ # Post.where(published: true).load # => #<ActiveRecord::Relation>
+ def load
+ exec_queries unless loaded?
+
+ self
+ end
+
+ # Forces reloading of relation.
+ def reload
+ reset
+ load
+ end
+
+ def reset
+ @last = @to_sql = @order_clause = @scope_for_create = @arel = @loaded = nil
+ @should_eager_load = @join_dependency = nil
+ @records = []
+ @offsets = {}
+ self
+ end
+
+ # Returns sql statement for the relation.
+ #
+ # User.where(name: 'Oscar').to_sql
+ # # => SELECT "users".* FROM "users" WHERE "users"."name" = 'Oscar'
+ def to_sql
+ @to_sql ||= begin
+ relation = self
+ connection = klass.connection
+ visitor = connection.visitor
+
+ if eager_loading?
+ find_with_associations { |rel| relation = rel }
+ end
+
+ arel = relation.arel
+ binds = (arel.bind_values + relation.bind_values).dup
+ binds.map! { |bv| connection.quote(*bv.reverse) }
+ collect = visitor.accept(arel.ast, Arel::Collectors::Bind.new)
+ collect.substitute_binds(binds).join
+ end
+ end
+
+ # Returns a hash of where conditions.
+ #
+ # User.where(name: 'Oscar').where_values_hash
+ # # => {name: "Oscar"}
+ def where_values_hash(relation_table_name = table_name)
+ equalities = where_values.grep(Arel::Nodes::Equality).find_all { |node|
+ node.left.relation.name == relation_table_name
+ }
+
+ binds = Hash[bind_values.find_all(&:first).map { |column, v| [column.name, v] }]
+
+ Hash[equalities.map { |where|
+ name = where.left.name
+ [name, binds.fetch(name.to_s) {
+ case where.right
+ when Array then where.right.map(&:val)
+ else
+ where.right.val
+ end
+ }]
+ }]
+ end
+
+ def scope_for_create
+ @scope_for_create ||= where_values_hash.merge(create_with_value)
+ end
+
+ # Returns true if relation needs eager loading.
+ def eager_loading?
+ @should_eager_load ||=
+ eager_load_values.any? ||
+ includes_values.any? && (joined_includes_values.any? || references_eager_loaded_tables?)
+ end
+
+ # Joins that are also marked for preloading. In which case we should just eager load them.
+ # Note that this is a naive implementation because we could have strings and symbols which
+ # represent the same association, but that aren't matched by this. Also, we could have
+ # nested hashes which partially match, e.g. { a: :b } & { a: [:b, :c] }
+ def joined_includes_values
+ includes_values & joins_values
+ end
+
+ # +uniq+ and +uniq!+ are silently deprecated. +uniq_value+ delegates to +distinct_value+
+ # to maintain backwards compatibility. Use +distinct_value+ instead.
+ def uniq_value
+ distinct_value
+ end
+
+ # Compares two relations for equality.
+ def ==(other)
+ case other
+ when Associations::CollectionProxy, AssociationRelation
+ self == other.to_a
+ when Relation
+ other.to_sql == to_sql
+ when Array
+ to_a == other
+ end
+ end
+
+ def pretty_print(q)
+ q.pp(self.to_a)
+ end
+
+ # Returns true if relation is blank.
+ def blank?
+ to_a.blank?
+ end
+
+ def values
+ Hash[@values]
+ end
+
+ def inspect
+ entries = to_a.take([limit_value, 11].compact.min).map!(&:inspect)
+ entries[10] = '...' if entries.size == 11
+
+ "#<#{self.class.name} [#{entries.join(', ')}]>"
+ end
+
+ private
+
+ def exec_queries
+ @records = eager_loading? ? find_with_associations : @klass.find_by_sql(arel, arel.bind_values + bind_values)
+
+ preload = preload_values
+ preload += includes_values unless eager_loading?
+ preloader = ActiveRecord::Associations::Preloader.new
+ preload.each do |associations|
+ preloader.preload @records, associations
+ end
+
+ @records.each { |record| record.readonly! } if readonly_value
+
+ @loaded = true
+ @records
+ end
+
+ def references_eager_loaded_tables?
+ joined_tables = arel.join_sources.map do |join|
+ if join.is_a?(Arel::Nodes::StringJoin)
+ tables_in_string(join.left)
+ else
+ [join.left.table_name, join.left.table_alias]
+ end
+ end
+
+ joined_tables += [table.name, table.table_alias]
+
+ # always convert table names to downcase as in Oracle quoted table names are in uppercase
+ joined_tables = joined_tables.flatten.compact.map { |t| t.downcase }.uniq
+
+ (references_values - joined_tables).any?
+ end
+
+ def tables_in_string(string)
+ return [] if string.blank?
+ # always convert table names to downcase as in Oracle quoted table names are in uppercase
+ # ignore raw_sql_ that is used by Oracle adapter as alias for limit/offset subqueries
+ string.scan(/([a-zA-Z_][.\w]+).?\./).flatten.map{ |s| s.downcase }.uniq - ['raw_sql_']
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/batches.rb b/activerecord/lib/active_record/relation/batches.rb
new file mode 100644
index 0000000000..b069cdce7c
--- /dev/null
+++ b/activerecord/lib/active_record/relation/batches.rb
@@ -0,0 +1,138 @@
+module ActiveRecord
+ module Batches
+ # Looping through a collection of records from the database
+ # (using the +all+ method, for example) is very inefficient
+ # since it will try to instantiate all the objects at once.
+ #
+ # In that case, batch processing methods allow you to work
+ # with the records in batches, thereby greatly reducing memory consumption.
+ #
+ # The #find_each method uses #find_in_batches with a batch size of 1000 (or as
+ # specified by the +:batch_size+ option).
+ #
+ # Person.find_each do |person|
+ # person.do_awesome_stuff
+ # end
+ #
+ # Person.where("age > 21").find_each do |person|
+ # person.party_all_night!
+ # end
+ #
+ # If you do not provide a block to #find_each, it will return an Enumerator
+ # for chaining with other methods:
+ #
+ # Person.find_each.with_index do |person, index|
+ # person.award_trophy(index + 1)
+ # end
+ #
+ # ==== Options
+ # * <tt>:batch_size</tt> - Specifies the size of the batch. Default to 1000.
+ # * <tt>:start</tt> - Specifies the starting point for the batch processing.
+ # This is especially useful if you want multiple workers dealing with
+ # the same processing queue. You can make worker 1 handle all the records
+ # between id 0 and 10,000 and worker 2 handle from 10,000 and beyond
+ # (by setting the +:start+ option on that worker).
+ #
+ # # Let's process for a batch of 2000 records, skipping the first 2000 rows
+ # Person.find_each(start: 2000, batch_size: 2000) do |person|
+ # person.party_all_night!
+ # end
+ #
+ # NOTE: It's not possible to set the order. That is automatically set to
+ # ascending on the primary key ("id ASC") to make the batch ordering
+ # work. This also means that this method only works with integer-based
+ # primary keys.
+ #
+ # NOTE: You can't set the limit either, that's used to control
+ # the batch sizes.
+ def find_each(options = {})
+ if block_given?
+ find_in_batches(options) do |records|
+ records.each { |record| yield record }
+ end
+ else
+ enum_for :find_each, options do
+ options[:start] ? where(table[primary_key].gteq(options[:start])).size : size
+ end
+ end
+ end
+
+ # Yields each batch of records that was found by the find +options+ as
+ # an array.
+ #
+ # Person.where("age > 21").find_in_batches do |group|
+ # sleep(50) # Make sure it doesn't get too crowded in there!
+ # group.each { |person| person.party_all_night! }
+ # end
+ #
+ # If you do not provide a block to #find_in_batches, it will return an Enumerator
+ # for chaining with other methods:
+ #
+ # Person.find_in_batches.with_index do |group, batch|
+ # puts "Processing group ##{batch}"
+ # group.each(&:recover_from_last_night!)
+ # end
+ #
+ # To be yielded each record one by one, use #find_each instead.
+ #
+ # ==== Options
+ # * <tt>:batch_size</tt> - Specifies the size of the batch. Default to 1000.
+ # * <tt>:start</tt> - Specifies the starting point for the batch processing.
+ # This is especially useful if you want multiple workers dealing with
+ # the same processing queue. You can make worker 1 handle all the records
+ # between id 0 and 10,000 and worker 2 handle from 10,000 and beyond
+ # (by setting the +:start+ option on that worker).
+ #
+ # # Let's process the next 2000 records
+ # Person.find_in_batches(start: 2000, batch_size: 2000) do |group|
+ # group.each { |person| person.party_all_night! }
+ # end
+ #
+ # NOTE: It's not possible to set the order. That is automatically set to
+ # ascending on the primary key ("id ASC") to make the batch ordering
+ # work. This also means that this method only works with integer-based
+ # primary keys.
+ #
+ # NOTE: You can't set the limit either, that's used to control
+ # the batch sizes.
+ def find_in_batches(options = {})
+ options.assert_valid_keys(:start, :batch_size)
+
+ relation = self
+ start = options[:start]
+ batch_size = options[:batch_size] || 1000
+
+ unless block_given?
+ return to_enum(:find_in_batches, options) do
+ total = start ? where(table[primary_key].gteq(start)).size : size
+ (total - 1).div(batch_size) + 1
+ end
+ end
+
+ if logger && (arel.orders.present? || arel.taken.present?)
+ logger.warn("Scoped order and limit are ignored, it's forced to be batch order and batch size")
+ end
+
+ relation = relation.reorder(batch_order).limit(batch_size)
+ records = start ? relation.where(table[primary_key].gteq(start)).to_a : relation.to_a
+
+ while records.any?
+ records_size = records.size
+ primary_key_offset = records.last.id
+ raise "Primary key not included in the custom select clause" unless primary_key_offset
+
+ yield records
+
+ break if records_size < batch_size
+
+ records = relation.where(table[primary_key].gt(primary_key_offset)).to_a
+ end
+ end
+
+ private
+
+ def batch_order
+ "#{quoted_table_name}.#{quoted_primary_key} ASC"
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/calculations.rb b/activerecord/lib/active_record/relation/calculations.rb
new file mode 100644
index 0000000000..90e99957f6
--- /dev/null
+++ b/activerecord/lib/active_record/relation/calculations.rb
@@ -0,0 +1,402 @@
+module ActiveRecord
+ module Calculations
+ # Count the records.
+ #
+ # Person.count
+ # # => the total count of all people
+ #
+ # Person.count(:age)
+ # # => returns the total count of all people whose age is present in database
+ #
+ # Person.count(:all)
+ # # => performs a COUNT(*) (:all is an alias for '*')
+ #
+ # Person.distinct.count(:age)
+ # # => counts the number of different age values
+ #
+ # If +count+ is used with +group+, it returns a Hash whose keys represent the aggregated column,
+ # and the values are the respective amounts:
+ #
+ # Person.group(:city).count
+ # # => { 'Rome' => 5, 'Paris' => 3 }
+ #
+ # If +count+ is used with +group+ for multiple columns, it returns a Hash whose
+ # keys are an array containing the individual values of each column and the value
+ # of each key would be the +count+.
+ #
+ # Article.group(:status, :category).count
+ # # => {["draft", "business"]=>10, ["draft", "technology"]=>4,
+ # ["published", "business"]=>0, ["published", "technology"]=>2}
+ #
+ # If +count+ is used with +select+, it will count the selected columns:
+ #
+ # Person.select(:age).count
+ # # => counts the number of different age values
+ #
+ # Note: not all valid +select+ expressions are valid +count+ expressions. The specifics differ
+ # between databases. In invalid cases, an error from the database is thrown.
+ def count(column_name = nil, options = {})
+ # TODO: Remove options argument as soon we remove support to
+ # activerecord-deprecated_finders.
+ column_name, options = nil, column_name if column_name.is_a?(Hash)
+ calculate(:count, column_name, options)
+ end
+
+ # Calculates the average value on a given column. Returns +nil+ if there's
+ # no row. See +calculate+ for examples with options.
+ #
+ # Person.average(:age) # => 35.8
+ def average(column_name, options = {})
+ # TODO: Remove options argument as soon we remove support to
+ # activerecord-deprecated_finders.
+ calculate(:average, column_name, options)
+ end
+
+ # Calculates the minimum value on a given column. The value is returned
+ # with the same data type of the column, or +nil+ if there's no row. See
+ # +calculate+ for examples with options.
+ #
+ # Person.minimum(:age) # => 7
+ def minimum(column_name, options = {})
+ # TODO: Remove options argument as soon we remove support to
+ # activerecord-deprecated_finders.
+ calculate(:minimum, column_name, options)
+ end
+
+ # Calculates the maximum value on a given column. The value is returned
+ # with the same data type of the column, or +nil+ if there's no row. See
+ # +calculate+ for examples with options.
+ #
+ # Person.maximum(:age) # => 93
+ def maximum(column_name, options = {})
+ # TODO: Remove options argument as soon we remove support to
+ # activerecord-deprecated_finders.
+ calculate(:maximum, column_name, options)
+ end
+
+ # Calculates the sum of values on a given column. The value is returned
+ # with the same data type of the column, 0 if there's no row. See
+ # +calculate+ for examples with options.
+ #
+ # Person.sum(:age) # => 4562
+ def sum(*args)
+ calculate(:sum, *args)
+ end
+
+ # This calculates aggregate values in the given column. Methods for count, sum, average,
+ # minimum, and maximum have been added as shortcuts.
+ #
+ # There are two basic forms of output:
+ #
+ # * Single aggregate value: The single value is type cast to Fixnum for COUNT, Float
+ # for AVG, and the given column's type for everything else.
+ #
+ # * Grouped values: This returns an ordered hash of the values and groups them. It
+ # takes either a column name, or the name of a belongs_to association.
+ #
+ # values = Person.group('last_name').maximum(:age)
+ # puts values["Drake"]
+ # # => 43
+ #
+ # drake = Family.find_by(last_name: 'Drake')
+ # values = Person.group(:family).maximum(:age) # Person belongs_to :family
+ # puts values[drake]
+ # # => 43
+ #
+ # values.each do |family, max_age|
+ # ...
+ # end
+ #
+ # Person.calculate(:count, :all) # The same as Person.count
+ # Person.average(:age) # SELECT AVG(age) FROM people...
+ #
+ # # Selects the minimum age for any family without any minors
+ # Person.group(:last_name).having("min(age) > 17").minimum(:age)
+ #
+ # Person.sum("2 * age")
+ def calculate(operation, column_name, options = {})
+ # TODO: Remove options argument as soon we remove support to
+ # activerecord-deprecated_finders.
+ if column_name.is_a?(Symbol) && attribute_alias?(column_name)
+ column_name = attribute_alias(column_name)
+ end
+
+ if has_include?(column_name)
+ construct_relation_for_association_calculations.calculate(operation, column_name, options)
+ else
+ perform_calculation(operation, column_name, options)
+ end
+ end
+
+ # Use <tt>pluck</tt> as a shortcut to select one or more attributes without
+ # loading a bunch of records just to grab the attributes you want.
+ #
+ # Person.pluck(:name)
+ #
+ # instead of
+ #
+ # Person.all.map(&:name)
+ #
+ # Pluck returns an <tt>Array</tt> of attribute values type-casted to match
+ # the plucked column names, if they can be deduced. Plucking an SQL fragment
+ # returns String values by default.
+ #
+ # Person.pluck(:id)
+ # # SELECT people.id FROM people
+ # # => [1, 2, 3]
+ #
+ # Person.pluck(:id, :name)
+ # # SELECT people.id, people.name FROM people
+ # # => [[1, 'David'], [2, 'Jeremy'], [3, 'Jose']]
+ #
+ # Person.pluck('DISTINCT role')
+ # # SELECT DISTINCT role FROM people
+ # # => ['admin', 'member', 'guest']
+ #
+ # Person.where(age: 21).limit(5).pluck(:id)
+ # # SELECT people.id FROM people WHERE people.age = 21 LIMIT 5
+ # # => [2, 3]
+ #
+ # Person.pluck('DATEDIFF(updated_at, created_at)')
+ # # SELECT DATEDIFF(updated_at, created_at) FROM people
+ # # => ['0', '27761', '173']
+ #
+ def pluck(*column_names)
+ column_names.map! do |column_name|
+ if column_name.is_a?(Symbol) && attribute_alias?(column_name)
+ attribute_alias(column_name)
+ else
+ column_name.to_s
+ end
+ end
+
+ if has_include?(column_names.first)
+ construct_relation_for_association_calculations.pluck(*column_names)
+ else
+ relation = spawn
+ relation.select_values = column_names.map { |cn|
+ columns_hash.key?(cn) ? arel_table[cn] : cn
+ }
+ result = klass.connection.select_all(relation.arel, nil, bind_values)
+ result.cast_values(klass.column_types)
+ end
+ end
+
+ # Pluck all the ID's for the relation using the table's primary key
+ #
+ # Person.ids # SELECT people.id FROM people
+ # Person.joins(:companies).ids # SELECT people.id FROM people INNER JOIN companies ON companies.person_id = people.id
+ def ids
+ pluck primary_key
+ end
+
+ private
+
+ def has_include?(column_name)
+ eager_loading? || (includes_values.present? && ((column_name && column_name != :all) || references_eager_loaded_tables?))
+ end
+
+ def perform_calculation(operation, column_name, options = {})
+ # TODO: Remove options argument as soon we remove support to
+ # activerecord-deprecated_finders.
+ operation = operation.to_s.downcase
+
+ # If #count is used with #distinct / #uniq it is considered distinct. (eg. relation.distinct.count)
+ distinct = self.distinct_value
+
+ if operation == "count"
+ column_name ||= select_for_count
+
+ unless arel.ast.grep(Arel::Nodes::OuterJoin).empty?
+ distinct = true
+ end
+
+ column_name = primary_key if column_name == :all && distinct
+ distinct = nil if column_name =~ /\s*DISTINCT[\s(]+/i
+ end
+
+ if group_values.any?
+ execute_grouped_calculation(operation, column_name, distinct)
+ else
+ execute_simple_calculation(operation, column_name, distinct)
+ end
+ end
+
+ def aggregate_column(column_name)
+ if @klass.column_names.include?(column_name.to_s)
+ Arel::Attribute.new(@klass.unscoped.table, column_name)
+ else
+ Arel.sql(column_name == :all ? "*" : column_name.to_s)
+ end
+ end
+
+ def operation_over_aggregate_column(column, operation, distinct)
+ operation == 'count' ? column.count(distinct) : column.send(operation)
+ end
+
+ def execute_simple_calculation(operation, column_name, distinct) #:nodoc:
+ # Postgresql doesn't like ORDER BY when there are no GROUP BY
+ relation = unscope(:order)
+
+ column_alias = column_name
+
+ bind_values = nil
+
+ if operation == "count" && (relation.limit_value || relation.offset_value)
+ # Shortcut when limit is zero.
+ return 0 if relation.limit_value == 0
+
+ query_builder = build_count_subquery(relation, column_name, distinct)
+ bind_values = query_builder.bind_values + relation.bind_values
+ else
+ column = aggregate_column(column_name)
+
+ select_value = operation_over_aggregate_column(column, operation, distinct)
+
+ column_alias = select_value.alias
+ relation.select_values = [select_value]
+
+ query_builder = relation.arel
+ bind_values = query_builder.bind_values + relation.bind_values
+ end
+
+ result = @klass.connection.select_all(query_builder, nil, bind_values)
+ row = result.first
+ value = row && row.values.first
+ column = result.column_types.fetch(column_alias) do
+ type_for(column_name)
+ end
+
+ type_cast_calculated_value(value, column, operation)
+ end
+
+ def execute_grouped_calculation(operation, column_name, distinct) #:nodoc:
+ group_attrs = group_values
+
+ if group_attrs.first.respond_to?(:to_sym)
+ association = @klass._reflect_on_association(group_attrs.first.to_sym)
+ associated = group_attrs.size == 1 && association && association.belongs_to? # only count belongs_to associations
+ group_fields = Array(associated ? association.foreign_key : group_attrs)
+ else
+ group_fields = group_attrs
+ end
+
+ group_aliases = group_fields.map { |field|
+ column_alias_for(field)
+ }
+ group_columns = group_aliases.zip(group_fields).map { |aliaz,field|
+ [aliaz, field]
+ }
+
+ group = group_fields
+
+ if operation == 'count' && column_name == :all
+ aggregate_alias = 'count_all'
+ else
+ aggregate_alias = column_alias_for([operation, column_name].join(' '))
+ end
+
+ select_values = [
+ operation_over_aggregate_column(
+ aggregate_column(column_name),
+ operation,
+ distinct).as(aggregate_alias)
+ ]
+ select_values += select_values unless having_values.empty?
+
+ select_values.concat group_fields.zip(group_aliases).map { |field,aliaz|
+ if field.respond_to?(:as)
+ field.as(aliaz)
+ else
+ "#{field} AS #{aliaz}"
+ end
+ }
+
+ relation = except(:group)
+ relation.group_values = group
+ relation.select_values = select_values
+
+ calculated_data = @klass.connection.select_all(relation, nil, bind_values)
+
+ if association
+ key_ids = calculated_data.collect { |row| row[group_aliases.first] }
+ key_records = association.klass.base_class.find(key_ids)
+ key_records = Hash[key_records.map { |r| [r.id, r] }]
+ end
+
+ Hash[calculated_data.map do |row|
+ key = group_columns.map { |aliaz, col_name|
+ column = calculated_data.column_types.fetch(aliaz) do
+ type_for(col_name)
+ end
+ type_cast_calculated_value(row[aliaz], column)
+ }
+ key = key.first if key.size == 1
+ key = key_records[key] if associated
+
+ column_type = calculated_data.column_types.fetch(aggregate_alias) { type_for(column_name) }
+ [key, type_cast_calculated_value(row[aggregate_alias], column_type, operation)]
+ end]
+ end
+
+ # Converts the given keys to the value that the database adapter returns as
+ # a usable column name:
+ #
+ # column_alias_for("users.id") # => "users_id"
+ # column_alias_for("sum(id)") # => "sum_id"
+ # column_alias_for("count(distinct users.id)") # => "count_distinct_users_id"
+ # column_alias_for("count(*)") # => "count_all"
+ # column_alias_for("count", "id") # => "count_id"
+ def column_alias_for(keys)
+ if keys.respond_to? :name
+ keys = "#{keys.relation.name}.#{keys.name}"
+ end
+
+ table_name = keys.to_s.downcase
+ table_name.gsub!(/\*/, 'all')
+ table_name.gsub!(/\W+/, ' ')
+ table_name.strip!
+ table_name.gsub!(/ +/, '_')
+
+ @klass.connection.table_alias_for(table_name)
+ end
+
+ def type_for(field)
+ field_name = field.respond_to?(:name) ? field.name.to_s : field.to_s.split('.').last
+ @klass.type_for_attribute(field_name)
+ end
+
+ def type_cast_calculated_value(value, type, operation = nil)
+ case operation
+ when 'count' then value.to_i
+ when 'sum' then type.type_cast_from_database(value || 0)
+ when 'average' then value.respond_to?(:to_d) ? value.to_d : value
+ else type.type_cast_from_database(value)
+ end
+ end
+
+ # TODO: refactor to allow non-string `select_values` (eg. Arel nodes).
+ def select_for_count
+ if select_values.present?
+ select_values.join(", ")
+ else
+ :all
+ end
+ end
+
+ def build_count_subquery(relation, column_name, distinct)
+ column_alias = Arel.sql('count_column')
+ subquery_alias = Arel.sql('subquery_for_count')
+
+ aliased_column = aggregate_column(column_name == :all ? 1 : column_name).as(column_alias)
+ relation.select_values = [aliased_column]
+ arel = relation.arel
+ subquery = arel.as(subquery_alias)
+
+ sm = Arel::SelectManager.new relation.engine
+ sm.bind_values = arel.bind_values
+ select_value = operation_over_aggregate_column(column_alias, 'count', distinct)
+ sm.project(select_value).from(subquery)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/delegation.rb b/activerecord/lib/active_record/relation/delegation.rb
new file mode 100644
index 0000000000..50f4d5c7ab
--- /dev/null
+++ b/activerecord/lib/active_record/relation/delegation.rb
@@ -0,0 +1,140 @@
+require 'set'
+require 'active_support/concern'
+require 'active_support/deprecation'
+
+module ActiveRecord
+ module Delegation # :nodoc:
+ module DelegateCache
+ def relation_delegate_class(klass) # :nodoc:
+ @relation_delegate_cache[klass]
+ end
+
+ def initialize_relation_delegate_cache # :nodoc:
+ @relation_delegate_cache = cache = {}
+ [
+ ActiveRecord::Relation,
+ ActiveRecord::Associations::CollectionProxy,
+ ActiveRecord::AssociationRelation
+ ].each do |klass|
+ delegate = Class.new(klass) {
+ include ClassSpecificRelation
+ }
+ const_set klass.name.gsub('::', '_'), delegate
+ cache[klass] = delegate
+ end
+ end
+
+ def inherited(child_class)
+ child_class.initialize_relation_delegate_cache
+ super
+ end
+ end
+
+ extend ActiveSupport::Concern
+
+ # This module creates compiled delegation methods dynamically at runtime, which makes
+ # subsequent calls to that method faster by avoiding method_missing. The delegations
+ # may vary depending on the klass of a relation, so we create a subclass of Relation
+ # for each different klass, and the delegations are compiled into that subclass only.
+
+ BLACKLISTED_ARRAY_METHODS = [
+ :compact!, :flatten!, :reject!, :reverse!, :rotate!, :map!,
+ :shuffle!, :slice!, :sort!, :sort_by!, :delete_if,
+ :keep_if, :pop, :shift, :delete_at, :compact, :select!
+ ].to_set # :nodoc:
+
+ delegate :to_xml, :to_yaml, :length, :collect, :map, :each, :all?, :include?, :to_ary, :join, to: :to_a
+
+ delegate :table_name, :quoted_table_name, :primary_key, :quoted_primary_key,
+ :connection, :columns_hash, :to => :klass
+
+ module ClassSpecificRelation # :nodoc:
+ extend ActiveSupport::Concern
+
+ included do
+ @delegation_mutex = Mutex.new
+ end
+
+ module ClassMethods # :nodoc:
+ def name
+ superclass.name
+ end
+
+ def delegate_to_scoped_klass(method)
+ @delegation_mutex.synchronize do
+ return if method_defined?(method)
+
+ if method.to_s =~ /\A[a-zA-Z_]\w*[!?]?\z/
+ module_eval <<-RUBY, __FILE__, __LINE__ + 1
+ def #{method}(*args, &block)
+ scoping { @klass.#{method}(*args, &block) }
+ end
+ RUBY
+ else
+ define_method method do |*args, &block|
+ scoping { @klass.public_send(method, *args, &block) }
+ end
+ end
+ end
+ end
+
+ def delegate(method, opts = {})
+ @delegation_mutex.synchronize do
+ return if method_defined?(method)
+ super
+ end
+ end
+ end
+
+ protected
+
+ def method_missing(method, *args, &block)
+ if @klass.respond_to?(method)
+ self.class.delegate_to_scoped_klass(method)
+ scoping { @klass.public_send(method, *args, &block) }
+ elsif arel.respond_to?(method)
+ self.class.delegate method, :to => :arel
+ arel.public_send(method, *args, &block)
+ else
+ super
+ end
+ end
+ end
+
+ module ClassMethods # :nodoc:
+ def create(klass, *args)
+ relation_class_for(klass).new(klass, *args)
+ end
+
+ private
+
+ def relation_class_for(klass)
+ klass.relation_delegate_class(self)
+ end
+ end
+
+ def respond_to?(method, include_private = false)
+ super || @klass.respond_to?(method, include_private) ||
+ array_delegable?(method) ||
+ arel.respond_to?(method, include_private)
+ end
+
+ protected
+
+ def array_delegable?(method)
+ Array.method_defined?(method) && BLACKLISTED_ARRAY_METHODS.exclude?(method)
+ end
+
+ def method_missing(method, *args, &block)
+ if @klass.respond_to?(method)
+ scoping { @klass.public_send(method, *args, &block) }
+ elsif array_delegable?(method)
+ to_a.public_send(method, *args, &block)
+ elsif arel.respond_to?(method)
+ arel.public_send(method, *args, &block)
+ else
+ super
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/finder_methods.rb b/activerecord/lib/active_record/relation/finder_methods.rb
new file mode 100644
index 0000000000..0c9c761f97
--- /dev/null
+++ b/activerecord/lib/active_record/relation/finder_methods.rb
@@ -0,0 +1,515 @@
+require 'active_support/deprecation'
+
+module ActiveRecord
+ module FinderMethods
+ ONE_AS_ONE = '1 AS one'
+
+ # Find by id - This can either be a specific id (1), a list of ids (1, 5, 6), or an array of ids ([5, 6, 10]).
+ # If no record can be found for all of the listed ids, then RecordNotFound will be raised. If the primary key
+ # is an integer, find by id coerces its arguments using +to_i+.
+ #
+ # Person.find(1) # returns the object for ID = 1
+ # Person.find("1") # returns the object for ID = 1
+ # Person.find("31-sarah") # returns the object for ID = 31
+ # Person.find(1, 2, 6) # returns an array for objects with IDs in (1, 2, 6)
+ # Person.find([7, 17]) # returns an array for objects with IDs in (7, 17)
+ # Person.find([1]) # returns an array for the object with ID = 1
+ # Person.where("administrator = 1").order("created_on DESC").find(1)
+ #
+ # <tt>ActiveRecord::RecordNotFound</tt> will be raised if one or more ids are not found.
+ #
+ # NOTE: The returned records may not be in the same order as the ids you
+ # provide since database rows are unordered. You'd need to provide an explicit <tt>order</tt>
+ # option if you want the results are sorted.
+ #
+ # ==== Find with lock
+ #
+ # Example for find with a lock: Imagine two concurrent transactions:
+ # each will read <tt>person.visits == 2</tt>, add 1 to it, and save, resulting
+ # in two saves of <tt>person.visits = 3</tt>. By locking the row, the second
+ # transaction has to wait until the first is finished; we get the
+ # expected <tt>person.visits == 4</tt>.
+ #
+ # Person.transaction do
+ # person = Person.lock(true).find(1)
+ # person.visits += 1
+ # person.save!
+ # end
+ #
+ # ==== Variations of +find+
+ #
+ # Person.where(name: 'Spartacus', rating: 4)
+ # # returns a chainable list (which can be empty).
+ #
+ # Person.find_by(name: 'Spartacus', rating: 4)
+ # # returns the first item or nil.
+ #
+ # Person.where(name: 'Spartacus', rating: 4).first_or_initialize
+ # # returns the first item or returns a new instance (requires you call .save to persist against the database).
+ #
+ # Person.where(name: 'Spartacus', rating: 4).first_or_create
+ # # returns the first item or creates it and returns it, available since Rails 3.2.1.
+ #
+ # ==== Alternatives for +find+
+ #
+ # Person.where(name: 'Spartacus', rating: 4).exists?(conditions = :none)
+ # # returns a boolean indicating if any record with the given conditions exist.
+ #
+ # Person.where(name: 'Spartacus', rating: 4).select("field1, field2, field3")
+ # # returns a chainable list of instances with only the mentioned fields.
+ #
+ # Person.where(name: 'Spartacus', rating: 4).ids
+ # # returns an Array of ids, available since Rails 3.2.1.
+ #
+ # Person.where(name: 'Spartacus', rating: 4).pluck(:field1, :field2)
+ # # returns an Array of the required fields, available since Rails 3.1.
+ def find(*args)
+ if block_given?
+ to_a.find(*args) { |*block_args| yield(*block_args) }
+ else
+ find_with_ids(*args)
+ end
+ end
+
+ # Finds the first record matching the specified conditions. There
+ # is no implied ordering so if order matters, you should specify it
+ # yourself.
+ #
+ # If no record is found, returns <tt>nil</tt>.
+ #
+ # Post.find_by name: 'Spartacus', rating: 4
+ # Post.find_by "published_at < ?", 2.weeks.ago
+ def find_by(*args)
+ where(*args).take
+ end
+
+ # Like <tt>find_by</tt>, except that if no record is found, raises
+ # an <tt>ActiveRecord::RecordNotFound</tt> error.
+ def find_by!(*args)
+ where(*args).take!
+ end
+
+ # Gives a record (or N records if a parameter is supplied) without any implied
+ # order. The order will depend on the database implementation.
+ # If an order is supplied it will be respected.
+ #
+ # Person.take # returns an object fetched by SELECT * FROM people LIMIT 1
+ # Person.take(5) # returns 5 objects fetched by SELECT * FROM people LIMIT 5
+ # Person.where(["name LIKE '%?'", name]).take
+ def take(limit = nil)
+ limit ? limit(limit).to_a : find_take
+ end
+
+ # Same as +take+ but raises <tt>ActiveRecord::RecordNotFound</tt> if no record
+ # is found. Note that <tt>take!</tt> accepts no arguments.
+ def take!
+ take or raise RecordNotFound
+ end
+
+ # Find the first record (or first N records if a parameter is supplied).
+ # If no order is defined it will order by primary key.
+ #
+ # Person.first # returns the first object fetched by SELECT * FROM people
+ # Person.where(["user_name = ?", user_name]).first
+ # Person.where(["user_name = :u", { u: user_name }]).first
+ # Person.order("created_on DESC").offset(5).first
+ # Person.first(3) # returns the first three objects fetched by SELECT * FROM people LIMIT 3
+ #
+ # ==== Rails 3
+ #
+ # Person.first # SELECT "people".* FROM "people" LIMIT 1
+ #
+ # NOTE: Rails 3 may not order this query by the primary key and the order
+ # will depend on the database implementation. In order to ensure that behavior,
+ # use <tt>User.order(:id).first</tt> instead.
+ #
+ # ==== Rails 4
+ #
+ # Person.first # SELECT "people".* FROM "people" ORDER BY "people"."id" ASC LIMIT 1
+ #
+ def first(limit = nil)
+ if limit
+ find_nth_with_limit(offset_index, limit)
+ else
+ find_nth(0, offset_index)
+ end
+ end
+
+ # Same as +first+ but raises <tt>ActiveRecord::RecordNotFound</tt> if no record
+ # is found. Note that <tt>first!</tt> accepts no arguments.
+ def first!
+ first or raise RecordNotFound
+ end
+
+ # Find the last record (or last N records if a parameter is supplied).
+ # If no order is defined it will order by primary key.
+ #
+ # Person.last # returns the last object fetched by SELECT * FROM people
+ # Person.where(["user_name = ?", user_name]).last
+ # Person.order("created_on DESC").offset(5).last
+ # Person.last(3) # returns the last three objects fetched by SELECT * FROM people.
+ #
+ # Take note that in that last case, the results are sorted in ascending order:
+ #
+ # [#<Person id:2>, #<Person id:3>, #<Person id:4>]
+ #
+ # and not:
+ #
+ # [#<Person id:4>, #<Person id:3>, #<Person id:2>]
+ def last(limit = nil)
+ if limit
+ if order_values.empty? && primary_key
+ order(arel_table[primary_key].desc).limit(limit).reverse
+ else
+ to_a.last(limit)
+ end
+ else
+ find_last
+ end
+ end
+
+ # Same as +last+ but raises <tt>ActiveRecord::RecordNotFound</tt> if no record
+ # is found. Note that <tt>last!</tt> accepts no arguments.
+ def last!
+ last or raise RecordNotFound
+ end
+
+ # Find the second record.
+ # If no order is defined it will order by primary key.
+ #
+ # Person.second # returns the second object fetched by SELECT * FROM people
+ # Person.offset(3).second # returns the second object from OFFSET 3 (which is OFFSET 4)
+ # Person.where(["user_name = :u", { u: user_name }]).second
+ def second
+ find_nth(1, offset_index)
+ end
+
+ # Same as +second+ but raises <tt>ActiveRecord::RecordNotFound</tt> if no record
+ # is found.
+ def second!
+ second or raise RecordNotFound
+ end
+
+ # Find the third record.
+ # If no order is defined it will order by primary key.
+ #
+ # Person.third # returns the third object fetched by SELECT * FROM people
+ # Person.offset(3).third # returns the third object from OFFSET 3 (which is OFFSET 5)
+ # Person.where(["user_name = :u", { u: user_name }]).third
+ def third
+ find_nth(2, offset_index)
+ end
+
+ # Same as +third+ but raises <tt>ActiveRecord::RecordNotFound</tt> if no record
+ # is found.
+ def third!
+ third or raise RecordNotFound
+ end
+
+ # Find the fourth record.
+ # If no order is defined it will order by primary key.
+ #
+ # Person.fourth # returns the fourth object fetched by SELECT * FROM people
+ # Person.offset(3).fourth # returns the fourth object from OFFSET 3 (which is OFFSET 6)
+ # Person.where(["user_name = :u", { u: user_name }]).fourth
+ def fourth
+ find_nth(3, offset_index)
+ end
+
+ # Same as +fourth+ but raises <tt>ActiveRecord::RecordNotFound</tt> if no record
+ # is found.
+ def fourth!
+ fourth or raise RecordNotFound
+ end
+
+ # Find the fifth record.
+ # If no order is defined it will order by primary key.
+ #
+ # Person.fifth # returns the fifth object fetched by SELECT * FROM people
+ # Person.offset(3).fifth # returns the fifth object from OFFSET 3 (which is OFFSET 7)
+ # Person.where(["user_name = :u", { u: user_name }]).fifth
+ def fifth
+ find_nth(4, offset_index)
+ end
+
+ # Same as +fifth+ but raises <tt>ActiveRecord::RecordNotFound</tt> if no record
+ # is found.
+ def fifth!
+ fifth or raise RecordNotFound
+ end
+
+ # Find the forty-second record. Also known as accessing "the reddit".
+ # If no order is defined it will order by primary key.
+ #
+ # Person.forty_two # returns the forty-second object fetched by SELECT * FROM people
+ # Person.offset(3).forty_two # returns the forty-second object from OFFSET 3 (which is OFFSET 44)
+ # Person.where(["user_name = :u", { u: user_name }]).forty_two
+ def forty_two
+ find_nth(41, offset_index)
+ end
+
+ # Same as +forty_two+ but raises <tt>ActiveRecord::RecordNotFound</tt> if no record
+ # is found.
+ def forty_two!
+ forty_two or raise RecordNotFound
+ end
+
+ # Returns +true+ if a record exists in the table that matches the +id+ or
+ # conditions given, or +false+ otherwise. The argument can take six forms:
+ #
+ # * Integer - Finds the record with this primary key.
+ # * String - Finds the record with a primary key corresponding to this
+ # string (such as <tt>'5'</tt>).
+ # * Array - Finds the record that matches these +find+-style conditions
+ # (such as <tt>['name LIKE ?', "%#{query}%"]</tt>).
+ # * Hash - Finds the record that matches these +find+-style conditions
+ # (such as <tt>{name: 'David'}</tt>).
+ # * +false+ - Returns always +false+.
+ # * No args - Returns +false+ if the table is empty, +true+ otherwise.
+ #
+ # For more information about specifying conditions as a hash or array,
+ # see the Conditions section in the introduction to <tt>ActiveRecord::Base</tt>.
+ #
+ # Note: You can't pass in a condition as a string (like <tt>name =
+ # 'Jamie'</tt>), since it would be sanitized and then queried against
+ # the primary key column, like <tt>id = 'name = \'Jamie\''</tt>.
+ #
+ # Person.exists?(5)
+ # Person.exists?('5')
+ # Person.exists?(['name LIKE ?', "%#{query}%"])
+ # Person.exists?(id: [1, 4, 8])
+ # Person.exists?(name: 'David')
+ # Person.exists?(false)
+ # Person.exists?
+ def exists?(conditions = :none)
+ if Base === conditions
+ conditions = conditions.id
+ ActiveSupport::Deprecation.warn "You are passing an instance of ActiveRecord::Base to `exists?`." \
+ "Please pass the id of the object by calling `.id`"
+ end
+
+ return false if !conditions
+
+ relation = apply_join_dependency(self, construct_join_dependency)
+ return false if ActiveRecord::NullRelation === relation
+
+ relation = relation.except(:select, :order).select(ONE_AS_ONE).limit(1)
+
+ case conditions
+ when Array, Hash
+ relation = relation.where(conditions)
+ else
+ unless conditions == :none
+ relation = where(primary_key => conditions)
+ end
+ end
+
+ connection.select_value(relation, "#{name} Exists", relation.bind_values) ? true : false
+ end
+
+ # This method is called whenever no records are found with either a single
+ # id or multiple ids and raises a +ActiveRecord::RecordNotFound+ exception.
+ #
+ # The error message is different depending on whether a single id or
+ # multiple ids are provided. If multiple ids are provided, then the number
+ # of results obtained should be provided in the +result_size+ argument and
+ # the expected number of results should be provided in the +expected_size+
+ # argument.
+ def raise_record_not_found_exception!(ids, result_size, expected_size) #:nodoc:
+ conditions = arel.where_sql
+ conditions = " [#{conditions}]" if conditions
+
+ if Array(ids).size == 1
+ error = "Couldn't find #{@klass.name} with '#{primary_key}'=#{ids}#{conditions}"
+ else
+ error = "Couldn't find all #{@klass.name.pluralize} with '#{primary_key}': "
+ error << "(#{ids.join(", ")})#{conditions} (found #{result_size} results, but was looking for #{expected_size})"
+ end
+
+ raise RecordNotFound, error
+ end
+
+ private
+
+ def offset_index
+ offset_value || 0
+ end
+
+ def find_with_associations
+ # NOTE: the JoinDependency constructed here needs to know about
+ # any joins already present in `self`, so pass them in
+ #
+ # failing to do so means that in cases like activerecord/test/cases/associations/inner_join_association_test.rb:136
+ # incorrect SQL is generated. In that case, the join dependency for
+ # SpecialCategorizations is constructed without knowledge of the
+ # preexisting join in joins_values to categorizations (by way of
+ # the `has_many :through` for categories).
+ #
+ join_dependency = construct_join_dependency(joins_values)
+
+ aliases = join_dependency.aliases
+ relation = select aliases.columns
+ relation = apply_join_dependency(relation, join_dependency)
+
+ if block_given?
+ yield relation
+ else
+ if ActiveRecord::NullRelation === relation
+ []
+ else
+ arel = relation.arel
+ rows = connection.select_all(arel, 'SQL', arel.bind_values + relation.bind_values)
+ join_dependency.instantiate(rows, aliases)
+ end
+ end
+ end
+
+ def construct_join_dependency(joins = [])
+ including = eager_load_values + includes_values
+ ActiveRecord::Associations::JoinDependency.new(@klass, including, joins)
+ end
+
+ def construct_relation_for_association_calculations
+ from = arel.froms.first
+ if Arel::Table === from
+ apply_join_dependency(self, construct_join_dependency)
+ else
+ # FIXME: as far as I can tell, `from` will always be an Arel::Table.
+ # There are no tests that test this branch, but presumably it's
+ # possible for `from` to be a list?
+ apply_join_dependency(self, construct_join_dependency(from))
+ end
+ end
+
+ def apply_join_dependency(relation, join_dependency)
+ relation = relation.except(:includes, :eager_load, :preload)
+ relation = relation.joins join_dependency
+
+ if using_limitable_reflections?(join_dependency.reflections)
+ relation
+ else
+ if relation.limit_value
+ limited_ids = limited_ids_for(relation)
+ limited_ids.empty? ? relation.none! : relation.where!(table[primary_key].in(limited_ids))
+ end
+ relation.except(:limit, :offset)
+ end
+ end
+
+ def limited_ids_for(relation)
+ values = @klass.connection.columns_for_distinct(
+ "#{quoted_table_name}.#{quoted_primary_key}", relation.order_values)
+
+ relation = relation.except(:select).select(values).distinct!
+
+ id_rows = @klass.connection.select_all(relation.arel, 'SQL', relation.bind_values)
+ id_rows.map {|row| row[primary_key]}
+ end
+
+ def using_limitable_reflections?(reflections)
+ reflections.none? { |r| r.collection? }
+ end
+
+ protected
+
+ def find_with_ids(*ids)
+ raise UnknownPrimaryKey.new(@klass) if primary_key.nil?
+
+ expects_array = ids.first.kind_of?(Array)
+ return ids.first if expects_array && ids.first.empty?
+
+ ids = ids.flatten.compact.uniq
+
+ case ids.size
+ when 0
+ raise RecordNotFound, "Couldn't find #{@klass.name} without an ID"
+ when 1
+ result = find_one(ids.first)
+ expects_array ? [ result ] : result
+ else
+ find_some(ids)
+ end
+ end
+
+ def find_one(id)
+ if ActiveRecord::Base === id
+ id = id.id
+ ActiveSupport::Deprecation.warn "You are passing an instance of ActiveRecord::Base to `find`." \
+ "Please pass the id of the object by calling `.id`"
+ end
+
+ column = columns_hash[primary_key]
+ substitute = connection.substitute_at(column, bind_values.length)
+ relation = where(table[primary_key].eq(substitute))
+ relation.bind_values += [[column, id]]
+ record = relation.take
+
+ raise_record_not_found_exception!(id, 0, 1) unless record
+
+ record
+ end
+
+ def find_some(ids)
+ result = where(table[primary_key].in(ids)).to_a
+
+ expected_size =
+ if limit_value && ids.size > limit_value
+ limit_value
+ else
+ ids.size
+ end
+
+ # 11 ids with limit 3, offset 9 should give 2 results.
+ if offset_value && (ids.size - offset_value < expected_size)
+ expected_size = ids.size - offset_value
+ end
+
+ if result.size == expected_size
+ result
+ else
+ raise_record_not_found_exception!(ids, result.size, expected_size)
+ end
+ end
+
+ def find_take
+ if loaded?
+ @records.first
+ else
+ @take ||= limit(1).to_a.first
+ end
+ end
+
+ def find_nth(index, offset)
+ if loaded?
+ @records[index]
+ else
+ offset += index
+ @offsets[offset] ||= find_nth_with_limit(offset, 1).first
+ end
+ end
+
+ def find_nth_with_limit(offset, limit)
+ relation = if order_values.empty? && primary_key
+ order(arel_table[primary_key].asc)
+ else
+ self
+ end
+
+ relation = relation.offset(offset) unless offset.zero?
+ relation.limit(limit).to_a
+ end
+
+ def find_last
+ if loaded?
+ @records.last
+ else
+ @last ||=
+ if limit_value
+ to_a.last
+ else
+ reverse_order.limit(1).to_a.first
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/merger.rb b/activerecord/lib/active_record/relation/merger.rb
new file mode 100644
index 0000000000..ac41d0aa80
--- /dev/null
+++ b/activerecord/lib/active_record/relation/merger.rb
@@ -0,0 +1,182 @@
+require 'active_support/core_ext/hash/keys'
+require "set"
+
+module ActiveRecord
+ class Relation
+ class HashMerger # :nodoc:
+ attr_reader :relation, :hash
+
+ def initialize(relation, hash)
+ hash.assert_valid_keys(*Relation::VALUE_METHODS)
+
+ @relation = relation
+ @hash = hash
+ end
+
+ def merge
+ Merger.new(relation, other).merge
+ end
+
+ # Applying values to a relation has some side effects. E.g.
+ # interpolation might take place for where values. So we should
+ # build a relation to merge in rather than directly merging
+ # the values.
+ def other
+ other = Relation.create(relation.klass, relation.table)
+ hash.each { |k, v|
+ if k == :joins
+ if Hash === v
+ other.joins!(v)
+ else
+ other.joins!(*v)
+ end
+ elsif k == :select
+ other._select!(v)
+ else
+ other.send("#{k}!", v)
+ end
+ }
+ other
+ end
+ end
+
+ class Merger # :nodoc:
+ attr_reader :relation, :values, :other
+
+ def initialize(relation, other)
+ @relation = relation
+ @values = other.values
+ @other = other
+ end
+
+ NORMAL_VALUES = Relation::SINGLE_VALUE_METHODS +
+ Relation::MULTI_VALUE_METHODS -
+ [:joins, :where, :order, :bind, :reverse_order, :lock, :create_with, :reordering, :from] # :nodoc:
+
+ def normal_values
+ NORMAL_VALUES
+ end
+
+ def merge
+ normal_values.each do |name|
+ value = values[name]
+ # The unless clause is here mostly for performance reasons (since the `send` call might be moderately
+ # expensive), most of the time the value is going to be `nil` or `.blank?`, the only catch is that
+ # `false.blank?` returns `true`, so there needs to be an extra check so that explicit `false` values
+ # don't fall through the cracks.
+ unless value.nil? || (value.blank? && false != value)
+ if name == :select
+ relation._select!(*value)
+ else
+ relation.send("#{name}!", *value)
+ end
+ end
+ end
+
+ merge_multi_values
+ merge_single_values
+ merge_joins
+
+ relation
+ end
+
+ private
+
+ def merge_joins
+ return if values[:joins].blank?
+
+ if other.klass == relation.klass
+ relation.joins!(*values[:joins])
+ else
+ joins_dependency, rest = values[:joins].partition do |join|
+ case join
+ when Hash, Symbol, Array
+ true
+ else
+ false
+ end
+ end
+
+ join_dependency = ActiveRecord::Associations::JoinDependency.new(other.klass,
+ joins_dependency,
+ [])
+ relation.joins! rest
+
+ @relation = relation.joins join_dependency
+ end
+ end
+
+ def merge_multi_values
+ lhs_wheres = relation.where_values
+ rhs_wheres = values[:where] || []
+
+ lhs_binds = relation.bind_values
+ rhs_binds = values[:bind] || []
+
+ removed, kept = partition_overwrites(lhs_wheres, rhs_wheres)
+
+ where_values = kept + rhs_wheres
+ bind_values = filter_binds(lhs_binds, removed) + rhs_binds
+
+ conn = relation.klass.connection
+ bv_index = 0
+ where_values.map! do |node|
+ if Arel::Nodes::Equality === node && Arel::Nodes::BindParam === node.right
+ substitute = conn.substitute_at(bind_values[bv_index].first, bv_index)
+ bv_index += 1
+ Arel::Nodes::Equality.new(node.left, substitute)
+ else
+ node
+ end
+ end
+
+ relation.where_values = where_values
+ relation.bind_values = bind_values
+
+ if values[:reordering]
+ # override any order specified in the original relation
+ relation.reorder! values[:order]
+ elsif values[:order]
+ # merge in order_values from relation
+ relation.order! values[:order]
+ end
+
+ relation.extend(*values[:extending]) unless values[:extending].blank?
+ end
+
+ def merge_single_values
+ relation.from_value = values[:from] unless relation.from_value
+ relation.lock_value = values[:lock] unless relation.lock_value
+
+ unless values[:create_with].blank?
+ relation.create_with_value = (relation.create_with_value || {}).merge(values[:create_with])
+ end
+ end
+
+ def filter_binds(lhs_binds, removed_wheres)
+ return lhs_binds if removed_wheres.empty?
+
+ set = Set.new removed_wheres.map { |x| x.left.name.to_s }
+ lhs_binds.dup.delete_if { |col,_| set.include? col.name }
+ end
+
+ # Remove equalities from the existing relation with a LHS which is
+ # present in the relation being merged in.
+ # returns [things_to_remove, things_to_keep]
+ def partition_overwrites(lhs_wheres, rhs_wheres)
+ if lhs_wheres.empty? || rhs_wheres.empty?
+ return [[], lhs_wheres]
+ end
+
+ nodes = rhs_wheres.find_all do |w|
+ w.respond_to?(:operator) && w.operator == :==
+ end
+ seen = Set.new(nodes) { |node| node.left }
+
+ lhs_wheres.partition do |w|
+ w.respond_to?(:operator) && w.operator == :== && seen.include?(w.left)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/predicate_builder.rb b/activerecord/lib/active_record/relation/predicate_builder.rb
new file mode 100644
index 0000000000..eff5c8f09c
--- /dev/null
+++ b/activerecord/lib/active_record/relation/predicate_builder.rb
@@ -0,0 +1,126 @@
+module ActiveRecord
+ class PredicateBuilder # :nodoc:
+ @handlers = []
+
+ autoload :RelationHandler, 'active_record/relation/predicate_builder/relation_handler'
+ autoload :ArrayHandler, 'active_record/relation/predicate_builder/array_handler'
+
+ def self.resolve_column_aliases(klass, hash)
+ hash = hash.dup
+ hash.keys.grep(Symbol) do |key|
+ if klass.attribute_alias? key
+ hash[klass.attribute_alias(key)] = hash.delete key
+ end
+ end
+ hash
+ end
+
+ def self.build_from_hash(klass, attributes, default_table)
+ queries = []
+
+ attributes.each do |column, value|
+ table = default_table
+
+ if value.is_a?(Hash)
+ if value.empty?
+ queries << '1=0'
+ else
+ table = Arel::Table.new(column, default_table.engine)
+ association = klass._reflect_on_association(column.to_sym)
+
+ value.each do |k, v|
+ queries.concat expand(association && association.klass, table, k, v)
+ end
+ end
+ else
+ column = column.to_s
+
+ if column.include?('.')
+ table_name, column = column.split('.', 2)
+ table = Arel::Table.new(table_name, default_table.engine)
+ end
+
+ queries.concat expand(klass, table, column, value)
+ end
+ end
+
+ queries
+ end
+
+ def self.expand(klass, table, column, value)
+ queries = []
+
+ # Find the foreign key when using queries such as:
+ # Post.where(author: author)
+ #
+ # For polymorphic relationships, find the foreign key and type:
+ # PriceEstimate.where(estimate_of: treasure)
+ if klass && reflection = klass._reflect_on_association(column.to_sym)
+ if reflection.polymorphic? && base_class = polymorphic_base_class_from_value(value)
+ queries << build(table[reflection.foreign_type], base_class)
+ end
+
+ column = reflection.foreign_key
+ end
+
+ queries << build(table[column], value)
+ queries
+ end
+
+ def self.polymorphic_base_class_from_value(value)
+ case value
+ when Relation
+ value.klass.base_class
+ when Array
+ val = value.compact.first
+ val.class.base_class if val.is_a?(Base)
+ when Base
+ value.class.base_class
+ end
+ end
+
+ def self.references(attributes)
+ attributes.map do |key, value|
+ if value.is_a?(Hash)
+ key
+ else
+ key = key.to_s
+ key.split('.').first if key.include?('.')
+ end
+ end.compact
+ end
+
+ # Define how a class is converted to Arel nodes when passed to +where+.
+ # The handler can be any object that responds to +call+, and will be used
+ # for any value that +===+ the class given. For example:
+ #
+ # MyCustomDateRange = Struct.new(:start, :end)
+ # handler = proc do |column, range|
+ # Arel::Nodes::Between.new(column,
+ # Arel::Nodes::And.new([range.start, range.end])
+ # )
+ # end
+ # ActiveRecord::PredicateBuilder.register_handler(MyCustomDateRange, handler)
+ def self.register_handler(klass, handler)
+ @handlers.unshift([klass, handler])
+ end
+
+ register_handler(BasicObject, ->(attribute, value) { attribute.eq(value) })
+ # FIXME: I think we need to deprecate this behavior
+ register_handler(Class, ->(attribute, value) { attribute.eq(value.name) })
+ register_handler(Base, ->(attribute, value) { attribute.eq(value.id) })
+ register_handler(Range, ->(attribute, value) { attribute.in(value) })
+ register_handler(Relation, RelationHandler.new)
+ register_handler(Array, ArrayHandler.new)
+
+ def self.build(attribute, value)
+ handler_for(value).call(attribute, value)
+ end
+ private_class_method :build
+
+ def self.handler_for(object)
+ @handlers.detect { |klass, _| klass === object }.last
+ end
+ private_class_method :handler_for
+ end
+end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/array_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/array_handler.rb
new file mode 100644
index 0000000000..78dba8be06
--- /dev/null
+++ b/activerecord/lib/active_record/relation/predicate_builder/array_handler.rb
@@ -0,0 +1,34 @@
+module ActiveRecord
+ class PredicateBuilder
+ class ArrayHandler # :nodoc:
+ def call(attribute, value)
+ return attribute.in([]) if value.empty?
+
+ values = value.map { |x| x.is_a?(Base) ? x.id : x }
+ ranges, values = values.partition { |v| v.is_a?(Range) }
+ nils, values = values.partition(&:nil?)
+
+ values_predicate =
+ case values.length
+ when 0 then NullPredicate
+ when 1 then attribute.eq(values.first)
+ else attribute.in(values)
+ end
+
+ unless nils.empty?
+ values_predicate = values_predicate.or(attribute.eq(nil))
+ end
+
+ array_predicates = ranges.map { |range| attribute.in(range) }
+ array_predicates << values_predicate
+ array_predicates.inject { |composite, predicate| composite.or(predicate) }
+ end
+
+ module NullPredicate
+ def self.or(other)
+ other
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/predicate_builder/relation_handler.rb b/activerecord/lib/active_record/relation/predicate_builder/relation_handler.rb
new file mode 100644
index 0000000000..618fa3cdd9
--- /dev/null
+++ b/activerecord/lib/active_record/relation/predicate_builder/relation_handler.rb
@@ -0,0 +1,13 @@
+module ActiveRecord
+ class PredicateBuilder
+ class RelationHandler # :nodoc:
+ def call(attribute, value)
+ if value.select_values.empty?
+ value = value.select(value.klass.arel_table[value.klass.primary_key])
+ end
+
+ attribute.in(value.arel.ast)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/query_methods.rb b/activerecord/lib/active_record/relation/query_methods.rb
new file mode 100644
index 0000000000..1262b2c291
--- /dev/null
+++ b/activerecord/lib/active_record/relation/query_methods.rb
@@ -0,0 +1,1141 @@
+require 'active_support/core_ext/array/wrap'
+
+module ActiveRecord
+ module QueryMethods
+ extend ActiveSupport::Concern
+
+ # WhereChain objects act as placeholder for queries in which #where does not have any parameter.
+ # In this case, #where must be chained with #not to return a new relation.
+ class WhereChain
+ def initialize(scope)
+ @scope = scope
+ end
+
+ # Returns a new relation expressing WHERE + NOT condition according to
+ # the conditions in the arguments.
+ #
+ # +not+ accepts conditions as a string, array, or hash. See #where for
+ # more details on each format.
+ #
+ # User.where.not("name = 'Jon'")
+ # # SELECT * FROM users WHERE NOT (name = 'Jon')
+ #
+ # User.where.not(["name = ?", "Jon"])
+ # # SELECT * FROM users WHERE NOT (name = 'Jon')
+ #
+ # User.where.not(name: "Jon")
+ # # SELECT * FROM users WHERE name != 'Jon'
+ #
+ # User.where.not(name: nil)
+ # # SELECT * FROM users WHERE name IS NOT NULL
+ #
+ # User.where.not(name: %w(Ko1 Nobu))
+ # # SELECT * FROM users WHERE name NOT IN ('Ko1', 'Nobu')
+ #
+ # User.where.not(name: "Jon", role: "admin")
+ # # SELECT * FROM users WHERE name != 'Jon' AND role != 'admin'
+ def not(opts, *rest)
+ where_value = @scope.send(:build_where, opts, rest).map do |rel|
+ case rel
+ when NilClass
+ raise ArgumentError, 'Invalid argument for .where.not(), got nil.'
+ when Arel::Nodes::In
+ Arel::Nodes::NotIn.new(rel.left, rel.right)
+ when Arel::Nodes::Equality
+ Arel::Nodes::NotEqual.new(rel.left, rel.right)
+ when String
+ Arel::Nodes::Not.new(Arel::Nodes::SqlLiteral.new(rel))
+ else
+ Arel::Nodes::Not.new(rel)
+ end
+ end
+
+ @scope.references!(PredicateBuilder.references(opts)) if Hash === opts
+ @scope.where_values += where_value
+ @scope
+ end
+ end
+
+ Relation::MULTI_VALUE_METHODS.each do |name|
+ class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{name}_values # def select_values
+ @values[:#{name}] || [] # @values[:select] || []
+ end # end
+ #
+ def #{name}_values=(values) # def select_values=(values)
+ raise ImmutableRelation if @loaded # raise ImmutableRelation if @loaded
+ check_cached_relation
+ @values[:#{name}] = values # @values[:select] = values
+ end # end
+ CODE
+ end
+
+ (Relation::SINGLE_VALUE_METHODS - [:create_with]).each do |name|
+ class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{name}_value # def readonly_value
+ @values[:#{name}] # @values[:readonly]
+ end # end
+ CODE
+ end
+
+ Relation::SINGLE_VALUE_METHODS.each do |name|
+ class_eval <<-CODE, __FILE__, __LINE__ + 1
+ def #{name}_value=(value) # def readonly_value=(value)
+ raise ImmutableRelation if @loaded # raise ImmutableRelation if @loaded
+ check_cached_relation
+ @values[:#{name}] = value # @values[:readonly] = value
+ end # end
+ CODE
+ end
+
+ def check_cached_relation # :nodoc:
+ if defined?(@arel) && @arel
+ @arel = nil
+ ActiveSupport::Deprecation.warn <<-WARNING
+Modifying already cached Relation. The cache will be reset.
+Use a cloned Relation to prevent this warning.
+WARNING
+ end
+ end
+
+ def create_with_value # :nodoc:
+ @values[:create_with] || {}
+ end
+
+ alias extensions extending_values
+
+ # Specify relationships to be included in the result set. For
+ # example:
+ #
+ # users = User.includes(:address)
+ # users.each do |user|
+ # user.address.city
+ # end
+ #
+ # allows you to access the +address+ attribute of the +User+ model without
+ # firing an additional query. This will often result in a
+ # performance improvement over a simple +join+.
+ #
+ # You can also specify multiple relationships, like this:
+ #
+ # users = User.includes(:address, :friends)
+ #
+ # Loading nested relationships is possible using a Hash:
+ #
+ # users = User.includes(:address, friends: [:address, :followers])
+ #
+ # === conditions
+ #
+ # If you want to add conditions to your included models you'll have
+ # to explicitly reference them. For example:
+ #
+ # User.includes(:posts).where('posts.name = ?', 'example')
+ #
+ # Will throw an error, but this will work:
+ #
+ # User.includes(:posts).where('posts.name = ?', 'example').references(:posts)
+ #
+ # Note that +includes+ works with association names while +references+ needs
+ # the actual table name.
+ def includes(*args)
+ check_if_method_has_arguments!(:includes, args)
+ spawn.includes!(*args)
+ end
+
+ def includes!(*args) # :nodoc:
+ args.reject!(&:blank?)
+ args.flatten!
+
+ self.includes_values |= args
+ self
+ end
+
+ # Forces eager loading by performing a LEFT OUTER JOIN on +args+:
+ #
+ # User.eager_load(:posts)
+ # => SELECT "users"."id" AS t0_r0, "users"."name" AS t0_r1, ...
+ # FROM "users" LEFT OUTER JOIN "posts" ON "posts"."user_id" =
+ # "users"."id"
+ def eager_load(*args)
+ check_if_method_has_arguments!(:eager_load, args)
+ spawn.eager_load!(*args)
+ end
+
+ def eager_load!(*args) # :nodoc:
+ self.eager_load_values += args
+ self
+ end
+
+ # Allows preloading of +args+, in the same way that +includes+ does:
+ #
+ # User.preload(:posts)
+ # => SELECT "posts".* FROM "posts" WHERE "posts"."user_id" IN (1, 2, 3)
+ def preload(*args)
+ check_if_method_has_arguments!(:preload, args)
+ spawn.preload!(*args)
+ end
+
+ def preload!(*args) # :nodoc:
+ self.preload_values += args
+ self
+ end
+
+ # Use to indicate that the given +table_names+ are referenced by an SQL string,
+ # and should therefore be JOINed in any query rather than loaded separately.
+ # This method only works in conjunction with +includes+.
+ # See #includes for more details.
+ #
+ # User.includes(:posts).where("posts.name = 'foo'")
+ # # => Doesn't JOIN the posts table, resulting in an error.
+ #
+ # User.includes(:posts).where("posts.name = 'foo'").references(:posts)
+ # # => Query now knows the string references posts, so adds a JOIN
+ def references(*table_names)
+ check_if_method_has_arguments!(:references, table_names)
+ spawn.references!(*table_names)
+ end
+
+ def references!(*table_names) # :nodoc:
+ table_names.flatten!
+ table_names.map!(&:to_s)
+
+ self.references_values |= table_names
+ self
+ end
+
+ # Works in two unique ways.
+ #
+ # First: takes a block so it can be used just like Array#select.
+ #
+ # Model.all.select { |m| m.field == value }
+ #
+ # This will build an array of objects from the database for the scope,
+ # converting them into an array and iterating through them using Array#select.
+ #
+ # Second: Modifies the SELECT statement for the query so that only certain
+ # fields are retrieved:
+ #
+ # Model.select(:field)
+ # # => [#<Model id: nil, field: "value">]
+ #
+ # Although in the above example it looks as though this method returns an
+ # array, it actually returns a relation object and can have other query
+ # methods appended to it, such as the other methods in ActiveRecord::QueryMethods.
+ #
+ # The argument to the method can also be an array of fields.
+ #
+ # Model.select(:field, :other_field, :and_one_more)
+ # # => [#<Model id: nil, field: "value", other_field: "value", and_one_more: "value">]
+ #
+ # You can also use one or more strings, which will be used unchanged as SELECT fields.
+ #
+ # Model.select('field AS field_one', 'other_field AS field_two')
+ # # => [#<Model id: nil, field: "value", other_field: "value">]
+ #
+ # If an alias was specified, it will be accessible from the resulting objects:
+ #
+ # Model.select('field AS field_one').first.field_one
+ # # => "value"
+ #
+ # Accessing attributes of an object that do not have fields retrieved by a select
+ # except +id+ will throw <tt>ActiveModel::MissingAttributeError</tt>:
+ #
+ # Model.select(:field).first.other_field
+ # # => ActiveModel::MissingAttributeError: missing attribute: other_field
+ def select(*fields)
+ if block_given?
+ to_a.select { |*block_args| yield(*block_args) }
+ else
+ raise ArgumentError, 'Call this with at least one field' if fields.empty?
+ spawn._select!(*fields)
+ end
+ end
+
+ def _select!(*fields) # :nodoc:
+ fields.flatten!
+ fields.map! do |field|
+ klass.attribute_alias?(field) ? klass.attribute_alias(field) : field
+ end
+ self.select_values += fields
+ self
+ end
+
+ # Allows to specify a group attribute:
+ #
+ # User.group(:name)
+ # => SELECT "users".* FROM "users" GROUP BY name
+ #
+ # Returns an array with distinct records based on the +group+ attribute:
+ #
+ # User.select([:id, :name])
+ # => [#<User id: 1, name: "Oscar">, #<User id: 2, name: "Oscar">, #<User id: 3, name: "Foo">
+ #
+ # User.group(:name)
+ # => [#<User id: 3, name: "Foo", ...>, #<User id: 2, name: "Oscar", ...>]
+ #
+ # User.group('name AS grouped_name, age')
+ # => [#<User id: 3, name: "Foo", age: 21, ...>, #<User id: 2, name: "Oscar", age: 21, ...>, #<User id: 5, name: "Foo", age: 23, ...>]
+ #
+ # Passing in an array of attributes to group by is also supported.
+ # User.select([:id, :first_name]).group(:id, :first_name).first(3)
+ # => [#<User id: 1, first_name: "Bill">, #<User id: 2, first_name: "Earl">, #<User id: 3, first_name: "Beto">]
+ def group(*args)
+ check_if_method_has_arguments!(:group, args)
+ spawn.group!(*args)
+ end
+
+ def group!(*args) # :nodoc:
+ args.flatten!
+
+ self.group_values += args
+ self
+ end
+
+ # Allows to specify an order attribute:
+ #
+ # User.order(:name)
+ # => SELECT "users".* FROM "users" ORDER BY "users"."name" ASC
+ #
+ # User.order(email: :desc)
+ # => SELECT "users".* FROM "users" ORDER BY "users"."email" DESC
+ #
+ # User.order(:name, email: :desc)
+ # => SELECT "users".* FROM "users" ORDER BY "users"."name" ASC, "users"."email" DESC
+ #
+ # User.order('name')
+ # => SELECT "users".* FROM "users" ORDER BY name
+ #
+ # User.order('name DESC')
+ # => SELECT "users".* FROM "users" ORDER BY name DESC
+ #
+ # User.order('name DESC, email')
+ # => SELECT "users".* FROM "users" ORDER BY name DESC, email
+ def order(*args)
+ check_if_method_has_arguments!(:order, args)
+ spawn.order!(*args)
+ end
+
+ def order!(*args) # :nodoc:
+ preprocess_order_args(args)
+
+ self.order_values += args
+ self
+ end
+
+ # Replaces any existing order defined on the relation with the specified order.
+ #
+ # User.order('email DESC').reorder('id ASC') # generated SQL has 'ORDER BY id ASC'
+ #
+ # Subsequent calls to order on the same relation will be appended. For example:
+ #
+ # User.order('email DESC').reorder('id ASC').order('name ASC')
+ #
+ # generates a query with 'ORDER BY id ASC, name ASC'.
+ def reorder(*args)
+ check_if_method_has_arguments!(:reorder, args)
+ spawn.reorder!(*args)
+ end
+
+ def reorder!(*args) # :nodoc:
+ preprocess_order_args(args)
+
+ self.reordering_value = true
+ self.order_values = args
+ self
+ end
+
+ VALID_UNSCOPING_VALUES = Set.new([:where, :select, :group, :order, :lock,
+ :limit, :offset, :joins, :includes, :from,
+ :readonly, :having])
+
+ # Removes an unwanted relation that is already defined on a chain of relations.
+ # This is useful when passing around chains of relations and would like to
+ # modify the relations without reconstructing the entire chain.
+ #
+ # User.order('email DESC').unscope(:order) == User.all
+ #
+ # The method arguments are symbols which correspond to the names of the methods
+ # which should be unscoped. The valid arguments are given in VALID_UNSCOPING_VALUES.
+ # The method can also be called with multiple arguments. For example:
+ #
+ # User.order('email DESC').select('id').where(name: "John")
+ # .unscope(:order, :select, :where) == User.all
+ #
+ # One can additionally pass a hash as an argument to unscope specific :where values.
+ # This is done by passing a hash with a single key-value pair. The key should be
+ # :where and the value should be the where value to unscope. For example:
+ #
+ # User.where(name: "John", active: true).unscope(where: :name)
+ # == User.where(active: true)
+ #
+ # This method is similar to <tt>except</tt>, but unlike
+ # <tt>except</tt>, it persists across merges:
+ #
+ # User.order('email').merge(User.except(:order))
+ # == User.order('email')
+ #
+ # User.order('email').merge(User.unscope(:order))
+ # == User.all
+ #
+ # This means it can be used in association definitions:
+ #
+ # has_many :comments, -> { unscope where: :trashed }
+ #
+ def unscope(*args)
+ check_if_method_has_arguments!(:unscope, args)
+ spawn.unscope!(*args)
+ end
+
+ def unscope!(*args) # :nodoc:
+ args.flatten!
+ self.unscope_values += args
+
+ args.each do |scope|
+ case scope
+ when Symbol
+ symbol_unscoping(scope)
+ when Hash
+ scope.each do |key, target_value|
+ if key != :where
+ raise ArgumentError, "Hash arguments in .unscope(*args) must have :where as the key."
+ end
+
+ Array(target_value).each do |val|
+ where_unscoping(val)
+ end
+ end
+ else
+ raise ArgumentError, "Unrecognized scoping: #{args.inspect}. Use .unscope(where: :attribute_name) or .unscope(:order), for example."
+ end
+ end
+
+ self
+ end
+
+ # Performs a joins on +args+:
+ #
+ # User.joins(:posts)
+ # => SELECT "users".* FROM "users" INNER JOIN "posts" ON "posts"."user_id" = "users"."id"
+ #
+ # You can use strings in order to customize your joins:
+ #
+ # User.joins("LEFT JOIN bookmarks ON bookmarks.bookmarkable_type = 'Post' AND bookmarks.user_id = users.id")
+ # => SELECT "users".* FROM "users" LEFT JOIN bookmarks ON bookmarks.bookmarkable_type = 'Post' AND bookmarks.user_id = users.id
+ def joins(*args)
+ check_if_method_has_arguments!(:joins, args)
+
+ args.compact!
+ args.flatten!
+
+ spawn.joins!(*args)
+ end
+
+ def joins!(*args) # :nodoc:
+ self.joins_values += args
+ self
+ end
+
+ def bind(value)
+ spawn.bind!(value)
+ end
+
+ def bind!(value) # :nodoc:
+ self.bind_values += [value]
+ self
+ end
+
+ # Returns a new relation, which is the result of filtering the current relation
+ # according to the conditions in the arguments.
+ #
+ # #where accepts conditions in one of several formats. In the examples below, the resulting
+ # SQL is given as an illustration; the actual query generated may be different depending
+ # on the database adapter.
+ #
+ # === string
+ #
+ # A single string, without additional arguments, is passed to the query
+ # constructor as an SQL fragment, and used in the where clause of the query.
+ #
+ # Client.where("orders_count = '2'")
+ # # SELECT * from clients where orders_count = '2';
+ #
+ # Note that building your own string from user input may expose your application
+ # to injection attacks if not done properly. As an alternative, it is recommended
+ # to use one of the following methods.
+ #
+ # === array
+ #
+ # If an array is passed, then the first element of the array is treated as a template, and
+ # the remaining elements are inserted into the template to generate the condition.
+ # Active Record takes care of building the query to avoid injection attacks, and will
+ # convert from the ruby type to the database type where needed. Elements are inserted
+ # into the string in the order in which they appear.
+ #
+ # User.where(["name = ? and email = ?", "Joe", "joe@example.com"])
+ # # SELECT * FROM users WHERE name = 'Joe' AND email = 'joe@example.com';
+ #
+ # Alternatively, you can use named placeholders in the template, and pass a hash as the
+ # second element of the array. The names in the template are replaced with the corresponding
+ # values from the hash.
+ #
+ # User.where(["name = :name and email = :email", { name: "Joe", email: "joe@example.com" }])
+ # # SELECT * FROM users WHERE name = 'Joe' AND email = 'joe@example.com';
+ #
+ # This can make for more readable code in complex queries.
+ #
+ # Lastly, you can use sprintf-style % escapes in the template. This works slightly differently
+ # than the previous methods; you are responsible for ensuring that the values in the template
+ # are properly quoted. The values are passed to the connector for quoting, but the caller
+ # is responsible for ensuring they are enclosed in quotes in the resulting SQL. After quoting,
+ # the values are inserted using the same escapes as the Ruby core method <tt>Kernel::sprintf</tt>.
+ #
+ # User.where(["name = '%s' and email = '%s'", "Joe", "joe@example.com"])
+ # # SELECT * FROM users WHERE name = 'Joe' AND email = 'joe@example.com';
+ #
+ # If #where is called with multiple arguments, these are treated as if they were passed as
+ # the elements of a single array.
+ #
+ # User.where("name = :name and email = :email", { name: "Joe", email: "joe@example.com" })
+ # # SELECT * FROM users WHERE name = 'Joe' AND email = 'joe@example.com';
+ #
+ # When using strings to specify conditions, you can use any operator available from
+ # the database. While this provides the most flexibility, you can also unintentionally introduce
+ # dependencies on the underlying database. If your code is intended for general consumption,
+ # test with multiple database backends.
+ #
+ # === hash
+ #
+ # #where will also accept a hash condition, in which the keys are fields and the values
+ # are values to be searched for.
+ #
+ # Fields can be symbols or strings. Values can be single values, arrays, or ranges.
+ #
+ # User.where({ name: "Joe", email: "joe@example.com" })
+ # # SELECT * FROM users WHERE name = 'Joe' AND email = 'joe@example.com'
+ #
+ # User.where({ name: ["Alice", "Bob"]})
+ # # SELECT * FROM users WHERE name IN ('Alice', 'Bob')
+ #
+ # User.where({ created_at: (Time.now.midnight - 1.day)..Time.now.midnight })
+ # # SELECT * FROM users WHERE (created_at BETWEEN '2012-06-09 07:00:00.000000' AND '2012-06-10 07:00:00.000000')
+ #
+ # In the case of a belongs_to relationship, an association key can be used
+ # to specify the model if an ActiveRecord object is used as the value.
+ #
+ # author = Author.find(1)
+ #
+ # # The following queries will be equivalent:
+ # Post.where(author: author)
+ # Post.where(author_id: author)
+ #
+ # This also works with polymorphic belongs_to relationships:
+ #
+ # treasure = Treasure.create(name: 'gold coins')
+ # treasure.price_estimates << PriceEstimate.create(price: 125)
+ #
+ # # The following queries will be equivalent:
+ # PriceEstimate.where(estimate_of: treasure)
+ # PriceEstimate.where(estimate_of_type: 'Treasure', estimate_of_id: treasure)
+ #
+ # === Joins
+ #
+ # If the relation is the result of a join, you may create a condition which uses any of the
+ # tables in the join. For string and array conditions, use the table name in the condition.
+ #
+ # User.joins(:posts).where("posts.created_at < ?", Time.now)
+ #
+ # For hash conditions, you can either use the table name in the key, or use a sub-hash.
+ #
+ # User.joins(:posts).where({ "posts.published" => true })
+ # User.joins(:posts).where({ posts: { published: true } })
+ #
+ # === no argument
+ #
+ # If no argument is passed, #where returns a new instance of WhereChain, that
+ # can be chained with #not to return a new relation that negates the where clause.
+ #
+ # User.where.not(name: "Jon")
+ # # SELECT * FROM users WHERE name != 'Jon'
+ #
+ # See WhereChain for more details on #not.
+ #
+ # === blank condition
+ #
+ # If the condition is any blank-ish object, then #where is a no-op and returns
+ # the current relation.
+ def where(opts = :chain, *rest)
+ if opts == :chain
+ WhereChain.new(spawn)
+ elsif opts.blank?
+ self
+ else
+ spawn.where!(opts, *rest)
+ end
+ end
+
+ def where!(opts, *rest) # :nodoc:
+ references!(PredicateBuilder.references(opts)) if Hash === opts
+
+ self.where_values += build_where(opts, rest)
+ self
+ end
+
+ # Allows you to change a previously set where condition for a given attribute, instead of appending to that condition.
+ #
+ # Post.where(trashed: true).where(trashed: false) # => WHERE `trashed` = 1 AND `trashed` = 0
+ # Post.where(trashed: true).rewhere(trashed: false) # => WHERE `trashed` = 0
+ # Post.where(active: true).where(trashed: true).rewhere(trashed: false) # => WHERE `active` = 1 AND `trashed` = 0
+ #
+ # This is short-hand for unscope(where: conditions.keys).where(conditions). Note that unlike reorder, we're only unscoping
+ # the named conditions -- not the entire where statement.
+ def rewhere(conditions)
+ unscope(where: conditions.keys).where(conditions)
+ end
+
+ # Allows to specify a HAVING clause. Note that you can't use HAVING
+ # without also specifying a GROUP clause.
+ #
+ # Order.having('SUM(price) > 30').group('user_id')
+ def having(opts, *rest)
+ opts.blank? ? self : spawn.having!(opts, *rest)
+ end
+
+ def having!(opts, *rest) # :nodoc:
+ references!(PredicateBuilder.references(opts)) if Hash === opts
+
+ self.having_values += build_where(opts, rest)
+ self
+ end
+
+ # Specifies a limit for the number of records to retrieve.
+ #
+ # User.limit(10) # generated SQL has 'LIMIT 10'
+ #
+ # User.limit(10).limit(20) # generated SQL has 'LIMIT 20'
+ def limit(value)
+ spawn.limit!(value)
+ end
+
+ def limit!(value) # :nodoc:
+ self.limit_value = value
+ self
+ end
+
+ # Specifies the number of rows to skip before returning rows.
+ #
+ # User.offset(10) # generated SQL has "OFFSET 10"
+ #
+ # Should be used with order.
+ #
+ # User.offset(10).order("name ASC")
+ def offset(value)
+ spawn.offset!(value)
+ end
+
+ def offset!(value) # :nodoc:
+ self.offset_value = value
+ self
+ end
+
+ # Specifies locking settings (default to +true+). For more information
+ # on locking, please see +ActiveRecord::Locking+.
+ def lock(locks = true)
+ spawn.lock!(locks)
+ end
+
+ def lock!(locks = true) # :nodoc:
+ case locks
+ when String, TrueClass, NilClass
+ self.lock_value = locks || true
+ else
+ self.lock_value = false
+ end
+
+ self
+ end
+
+ # Returns a chainable relation with zero records.
+ #
+ # The returned relation implements the Null Object pattern. It is an
+ # object with defined null behavior and always returns an empty array of
+ # records without querying the database.
+ #
+ # Any subsequent condition chained to the returned relation will continue
+ # generating an empty relation and will not fire any query to the database.
+ #
+ # Used in cases where a method or scope could return zero records but the
+ # result needs to be chainable.
+ #
+ # For example:
+ #
+ # @posts = current_user.visible_posts.where(name: params[:name])
+ # # => the visible_posts method is expected to return a chainable Relation
+ #
+ # def visible_posts
+ # case role
+ # when 'Country Manager'
+ # Post.where(country: country)
+ # when 'Reviewer'
+ # Post.published
+ # when 'Bad User'
+ # Post.none # It can't be chained if [] is returned.
+ # end
+ # end
+ #
+ def none
+ extending(NullRelation)
+ end
+
+ def none! # :nodoc:
+ extending!(NullRelation)
+ end
+
+ # Sets readonly attributes for the returned relation. If value is
+ # true (default), attempting to update a record will result in an error.
+ #
+ # users = User.readonly
+ # users.first.save
+ # => ActiveRecord::ReadOnlyRecord: ActiveRecord::ReadOnlyRecord
+ def readonly(value = true)
+ spawn.readonly!(value)
+ end
+
+ def readonly!(value = true) # :nodoc:
+ self.readonly_value = value
+ self
+ end
+
+ # Sets attributes to be used when creating new records from a
+ # relation object.
+ #
+ # users = User.where(name: 'Oscar')
+ # users.new.name # => 'Oscar'
+ #
+ # users = users.create_with(name: 'DHH')
+ # users.new.name # => 'DHH'
+ #
+ # You can pass +nil+ to +create_with+ to reset attributes:
+ #
+ # users = users.create_with(nil)
+ # users.new.name # => 'Oscar'
+ def create_with(value)
+ spawn.create_with!(value)
+ end
+
+ def create_with!(value) # :nodoc:
+ self.create_with_value = value ? create_with_value.merge(value) : {}
+ self
+ end
+
+ # Specifies table from which the records will be fetched. For example:
+ #
+ # Topic.select('title').from('posts')
+ # # => SELECT title FROM posts
+ #
+ # Can accept other relation objects. For example:
+ #
+ # Topic.select('title').from(Topic.approved)
+ # # => SELECT title FROM (SELECT * FROM topics WHERE approved = 't') subquery
+ #
+ # Topic.select('a.title').from(Topic.approved, :a)
+ # # => SELECT a.title FROM (SELECT * FROM topics WHERE approved = 't') a
+ #
+ def from(value, subquery_name = nil)
+ spawn.from!(value, subquery_name)
+ end
+
+ def from!(value, subquery_name = nil) # :nodoc:
+ self.from_value = [value, subquery_name]
+ self
+ end
+
+ # Specifies whether the records should be unique or not. For example:
+ #
+ # User.select(:name)
+ # # => Might return two records with the same name
+ #
+ # User.select(:name).distinct
+ # # => Returns 1 record per distinct name
+ #
+ # User.select(:name).distinct.distinct(false)
+ # # => You can also remove the uniqueness
+ def distinct(value = true)
+ spawn.distinct!(value)
+ end
+ alias uniq distinct
+
+ # Like #distinct, but modifies relation in place.
+ def distinct!(value = true) # :nodoc:
+ self.distinct_value = value
+ self
+ end
+ alias uniq! distinct!
+
+ # Used to extend a scope with additional methods, either through
+ # a module or through a block provided.
+ #
+ # The object returned is a relation, which can be further extended.
+ #
+ # === Using a module
+ #
+ # module Pagination
+ # def page(number)
+ # # pagination code goes here
+ # end
+ # end
+ #
+ # scope = Model.all.extending(Pagination)
+ # scope.page(params[:page])
+ #
+ # You can also pass a list of modules:
+ #
+ # scope = Model.all.extending(Pagination, SomethingElse)
+ #
+ # === Using a block
+ #
+ # scope = Model.all.extending do
+ # def page(number)
+ # # pagination code goes here
+ # end
+ # end
+ # scope.page(params[:page])
+ #
+ # You can also use a block and a module list:
+ #
+ # scope = Model.all.extending(Pagination) do
+ # def per_page(number)
+ # # pagination code goes here
+ # end
+ # end
+ def extending(*modules, &block)
+ if modules.any? || block
+ spawn.extending!(*modules, &block)
+ else
+ self
+ end
+ end
+
+ def extending!(*modules, &block) # :nodoc:
+ modules << Module.new(&block) if block
+ modules.flatten!
+
+ self.extending_values += modules
+ extend(*extending_values) if extending_values.any?
+
+ self
+ end
+
+ # Reverse the existing order clause on the relation.
+ #
+ # User.order('name ASC').reverse_order # generated SQL has 'ORDER BY name DESC'
+ def reverse_order
+ spawn.reverse_order!
+ end
+
+ def reverse_order! # :nodoc:
+ orders = order_values.uniq
+ orders.reject!(&:blank?)
+ self.order_values = reverse_sql_order(orders)
+ self
+ end
+
+ # Returns the Arel object associated with the relation.
+ def arel # :nodoc:
+ @arel ||= build_arel
+ end
+
+ private
+
+ def build_arel
+ arel = Arel::SelectManager.new(table.engine, table)
+
+ build_joins(arel, joins_values.flatten) unless joins_values.empty?
+
+ collapse_wheres(arel, (where_values - [''])) #TODO: Add uniq with real value comparison / ignore uniqs that have binds
+
+ arel.having(*having_values.uniq.reject(&:blank?)) unless having_values.empty?
+
+ arel.take(connection.sanitize_limit(limit_value)) if limit_value
+ arel.skip(offset_value.to_i) if offset_value
+
+ arel.group(*group_values.uniq.reject(&:blank?)) unless group_values.empty?
+
+ build_order(arel)
+
+ build_select(arel, select_values.uniq)
+
+ arel.distinct(distinct_value)
+ arel.from(build_from) if from_value
+ arel.lock(lock_value) if lock_value
+
+ # Reorder bind indexes if joins produced bind values
+ if arel.bind_values.any?
+ bvs = arel.bind_values + bind_values
+ arel.ast.grep(Arel::Nodes::BindParam).each_with_index do |bp, i|
+ column = bvs[i].first
+ bp.replace connection.substitute_at(column, i)
+ end
+ end
+
+ arel
+ end
+
+ def symbol_unscoping(scope)
+ if !VALID_UNSCOPING_VALUES.include?(scope)
+ raise ArgumentError, "Called unscope() with invalid unscoping argument ':#{scope}'. Valid arguments are :#{VALID_UNSCOPING_VALUES.to_a.join(", :")}."
+ end
+
+ single_val_method = Relation::SINGLE_VALUE_METHODS.include?(scope)
+ unscope_code = "#{scope}_value#{'s' unless single_val_method}="
+
+ case scope
+ when :order
+ result = []
+ when :where
+ self.bind_values = []
+ else
+ result = [] unless single_val_method
+ end
+
+ self.send(unscope_code, result)
+ end
+
+ def where_unscoping(target_value)
+ target_value = target_value.to_s
+
+ where_values.reject! do |rel|
+ case rel
+ when Arel::Nodes::In, Arel::Nodes::NotIn, Arel::Nodes::Equality, Arel::Nodes::NotEqual
+ subrelation = (rel.left.kind_of?(Arel::Attributes::Attribute) ? rel.left : rel.right)
+ subrelation.name == target_value
+ end
+ end
+
+ bind_values.reject! { |col,_| col.name == target_value }
+ end
+
+ def custom_join_ast(table, joins)
+ joins = joins.reject(&:blank?)
+
+ return [] if joins.empty?
+
+ joins.map! do |join|
+ case join
+ when Array
+ join = Arel.sql(join.join(' ')) if array_of_strings?(join)
+ when String
+ join = Arel.sql(join)
+ end
+ table.create_string_join(join)
+ end
+ end
+
+ def collapse_wheres(arel, wheres)
+ predicates = wheres.map do |where|
+ next where if ::Arel::Nodes::Equality === where
+ where = Arel.sql(where) if String === where
+ Arel::Nodes::Grouping.new(where)
+ end
+
+ arel.where(Arel::Nodes::And.new(predicates)) if predicates.present?
+ end
+
+ def build_where(opts, other = [])
+ case opts
+ when String, Array
+ [@klass.send(:sanitize_sql, other.empty? ? opts : ([opts] + other))]
+ when Hash
+ opts = PredicateBuilder.resolve_column_aliases(klass, opts)
+
+ bv_len = bind_values.length
+ tmp_opts, bind_values = create_binds(opts, bv_len)
+ self.bind_values += bind_values
+
+ attributes = @klass.send(:expand_hash_conditions_for_aggregates, tmp_opts)
+ attributes.values.grep(ActiveRecord::Relation) do |rel|
+ self.bind_values += rel.bind_values
+ end
+
+ PredicateBuilder.build_from_hash(klass, attributes, table)
+ else
+ [opts]
+ end
+ end
+
+ def create_binds(opts, idx)
+ bindable, non_binds = opts.partition do |column, value|
+ case value
+ when String, Integer, ActiveRecord::StatementCache::Substitute
+ @klass.columns_hash.include? column.to_s
+ else
+ false
+ end
+ end
+
+ new_opts = {}
+ binds = []
+
+ bindable.each_with_index do |(column,value), index|
+ binds.push [@klass.columns_hash[column.to_s], value]
+ new_opts[column] = connection.substitute_at(column, index + idx)
+ end
+
+ non_binds.each { |column,value| new_opts[column] = value }
+
+ [new_opts, binds]
+ end
+
+ def build_from
+ opts, name = from_value
+ case opts
+ when Relation
+ name ||= 'subquery'
+ self.bind_values = opts.bind_values + self.bind_values
+ opts.arel.as(name.to_s)
+ else
+ opts
+ end
+ end
+
+ def build_joins(manager, joins)
+ buckets = joins.group_by do |join|
+ case join
+ when String
+ :string_join
+ when Hash, Symbol, Array
+ :association_join
+ when ActiveRecord::Associations::JoinDependency
+ :stashed_join
+ when Arel::Nodes::Join
+ :join_node
+ else
+ raise 'unknown class: %s' % join.class.name
+ end
+ end
+
+ association_joins = buckets[:association_join] || []
+ stashed_association_joins = buckets[:stashed_join] || []
+ join_nodes = (buckets[:join_node] || []).uniq
+ string_joins = (buckets[:string_join] || []).map(&:strip).uniq
+
+ join_list = join_nodes + custom_join_ast(manager, string_joins)
+
+ join_dependency = ActiveRecord::Associations::JoinDependency.new(
+ @klass,
+ association_joins,
+ join_list
+ )
+
+ join_infos = join_dependency.join_constraints stashed_association_joins
+
+ join_infos.each do |info|
+ info.joins.each { |join| manager.from(join) }
+ manager.bind_values.concat info.binds
+ end
+
+ manager.join_sources.concat(join_list)
+
+ manager
+ end
+
+ def build_select(arel, selects)
+ if !selects.empty?
+ expanded_select = selects.map do |field|
+ columns_hash.key?(field.to_s) ? arel_table[field] : field
+ end
+ arel.project(*expanded_select)
+ else
+ arel.project(@klass.arel_table[Arel.star])
+ end
+ end
+
+ def reverse_sql_order(order_query)
+ order_query = ["#{quoted_table_name}.#{quoted_primary_key} ASC"] if order_query.empty?
+
+ order_query.flat_map do |o|
+ case o
+ when Arel::Nodes::Ordering
+ o.reverse
+ when String
+ o.to_s.split(',').map! do |s|
+ s.strip!
+ s.gsub!(/\sasc\Z/i, ' DESC') || s.gsub!(/\sdesc\Z/i, ' ASC') || s.concat(' DESC')
+ end
+ else
+ o
+ end
+ end
+ end
+
+ def array_of_strings?(o)
+ o.is_a?(Array) && o.all? { |obj| obj.is_a?(String) }
+ end
+
+ def build_order(arel)
+ orders = order_values.uniq
+ orders.reject!(&:blank?)
+
+ arel.order(*orders) unless orders.empty?
+ end
+
+ VALID_DIRECTIONS = [:asc, :desc, :ASC, :DESC,
+ 'asc', 'desc', 'ASC', 'DESC'] # :nodoc:
+
+ def validate_order_args(args)
+ args.each do |arg|
+ next unless arg.is_a?(Hash)
+ arg.each do |_key, value|
+ raise ArgumentError, "Direction \"#{value}\" is invalid. Valid " \
+ "directions are: #{VALID_DIRECTIONS.inspect}" unless VALID_DIRECTIONS.include?(value)
+ end
+ end
+ end
+
+ def preprocess_order_args(order_args)
+ order_args.flatten!
+ validate_order_args(order_args)
+
+ references = order_args.grep(String)
+ references.map! { |arg| arg =~ /^([a-zA-Z]\w*)\.(\w+)/ && $1 }.compact!
+ references!(references) if references.any?
+
+ # if a symbol is given we prepend the quoted table name
+ order_args.map! do |arg|
+ case arg
+ when Symbol
+ arg = klass.attribute_alias(arg) if klass.attribute_alias?(arg)
+ table[arg].asc
+ when Hash
+ arg.map { |field, dir|
+ field = klass.attribute_alias(field) if klass.attribute_alias?(field)
+ table[field].send(dir.downcase)
+ }
+ else
+ arg
+ end
+ end.flatten!
+ end
+
+ # Checks to make sure that the arguments are not blank. Note that if some
+ # blank-like object were initially passed into the query method, then this
+ # method will not raise an error.
+ #
+ # Example:
+ #
+ # Post.references() # => raises an error
+ # Post.references([]) # => does not raise an error
+ #
+ # This particular method should be called with a method_name and the args
+ # passed into that method as an input. For example:
+ #
+ # def references(*args)
+ # check_if_method_has_arguments!("references", args)
+ # ...
+ # end
+ def check_if_method_has_arguments!(method_name, args)
+ if args.blank?
+ raise ArgumentError, "The method .#{method_name}() must contain arguments."
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/relation/spawn_methods.rb b/activerecord/lib/active_record/relation/spawn_methods.rb
new file mode 100644
index 0000000000..57d66bce4b
--- /dev/null
+++ b/activerecord/lib/active_record/relation/spawn_methods.rb
@@ -0,0 +1,75 @@
+require 'active_support/core_ext/hash/except'
+require 'active_support/core_ext/hash/slice'
+require 'active_record/relation/merger'
+
+module ActiveRecord
+ module SpawnMethods
+
+ # This is overridden by Associations::CollectionProxy
+ def spawn #:nodoc:
+ clone
+ end
+
+ # Merges in the conditions from <tt>other</tt>, if <tt>other</tt> is an <tt>ActiveRecord::Relation</tt>.
+ # Returns an array representing the intersection of the resulting records with <tt>other</tt>, if <tt>other</tt> is an array.
+ # Post.where(published: true).joins(:comments).merge( Comment.where(spam: false) )
+ # # Performs a single join query with both where conditions.
+ #
+ # recent_posts = Post.order('created_at DESC').first(5)
+ # Post.where(published: true).merge(recent_posts)
+ # # Returns the intersection of all published posts with the 5 most recently created posts.
+ # # (This is just an example. You'd probably want to do this with a single query!)
+ #
+ # Procs will be evaluated by merge:
+ #
+ # Post.where(published: true).merge(-> { joins(:comments) })
+ # # => Post.where(published: true).joins(:comments)
+ #
+ # This is mainly intended for sharing common conditions between multiple associations.
+ def merge(other)
+ if other.is_a?(Array)
+ to_a & other
+ elsif other
+ spawn.merge!(other)
+ else
+ self
+ end
+ end
+
+ def merge!(other) # :nodoc:
+ if !other.is_a?(Relation) && other.respond_to?(:to_proc)
+ instance_exec(&other)
+ else
+ klass = other.is_a?(Hash) ? Relation::HashMerger : Relation::Merger
+ klass.new(self, other).merge
+ end
+ end
+
+ # Removes from the query the condition(s) specified in +skips+.
+ #
+ # Post.order('id asc').except(:order) # discards the order condition
+ # Post.where('id > 10').order('id asc').except(:where) # discards the where condition but keeps the order
+ def except(*skips)
+ relation_with values.except(*skips)
+ end
+
+ # Removes any condition from the query other than the one(s) specified in +onlies+.
+ #
+ # Post.order('id asc').only(:where) # discards the order condition
+ # Post.order('id asc').only(:where, :order) # uses the specified order
+ def only(*onlies)
+ if onlies.any? { |o| o == :where }
+ onlies << :bind
+ end
+ relation_with values.slice(*onlies)
+ end
+
+ private
+
+ def relation_with(values) # :nodoc:
+ result = Relation.create(klass, table, values)
+ result.extend(*extending_values) if extending_values.any?
+ result
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/result.rb b/activerecord/lib/active_record/result.rb
new file mode 100644
index 0000000000..8405fdaeb9
--- /dev/null
+++ b/activerecord/lib/active_record/result.rb
@@ -0,0 +1,127 @@
+module ActiveRecord
+ ###
+ # This class encapsulates a Result returned from calling +exec_query+ on any
+ # database connection adapter. For example:
+ #
+ # result = ActiveRecord::Base.connection.exec_query('SELECT id, title, body FROM posts')
+ # result # => #<ActiveRecord::Result:0xdeadbeef>
+ #
+ # # Get the column names of the result:
+ # result.columns
+ # # => ["id", "title", "body"]
+ #
+ # # Get the record values of the result:
+ # result.rows
+ # # => [[1, "title_1", "body_1"],
+ # [2, "title_2", "body_2"],
+ # ...
+ # ]
+ #
+ # # Get an array of hashes representing the result (column => value):
+ # result.to_hash
+ # # => [{"id" => 1, "title" => "title_1", "body" => "body_1"},
+ # {"id" => 2, "title" => "title_2", "body" => "body_2"},
+ # ...
+ # ]
+ #
+ # # ActiveRecord::Result also includes Enumerable.
+ # result.each do |row|
+ # puts row['title'] + " " + row['body']
+ # end
+ class Result
+ include Enumerable
+
+ IDENTITY_TYPE = Type::Value.new # :nodoc:
+
+ attr_reader :columns, :rows, :column_types
+
+ def initialize(columns, rows, column_types = {})
+ @columns = columns
+ @rows = rows
+ @hash_rows = nil
+ @column_types = column_types
+ end
+
+ def each
+ if block_given?
+ hash_rows.each { |row| yield row }
+ else
+ hash_rows.to_enum { @rows.size }
+ end
+ end
+
+ def to_hash
+ hash_rows
+ end
+
+ alias :map! :map
+ alias :collect! :map
+
+ # Returns true if there are no records.
+ def empty?
+ rows.empty?
+ end
+
+ def to_ary
+ hash_rows
+ end
+
+ def [](idx)
+ hash_rows[idx]
+ end
+
+ def last
+ hash_rows.last
+ end
+
+ def cast_values(type_overrides = {}) # :nodoc:
+ types = columns.map { |name| column_type(name, type_overrides) }
+ result = rows.map do |values|
+ types.zip(values).map { |type, value| type.type_cast_from_database(value) }
+ end
+
+ columns.one? ? result.map!(&:first) : result
+ end
+
+ def initialize_copy(other)
+ @columns = columns.dup
+ @rows = rows.dup
+ @column_types = column_types.dup
+ @hash_rows = nil
+ end
+
+ private
+
+ def column_type(name, type_overrides = {})
+ type_overrides.fetch(name) do
+ column_types.fetch(name, IDENTITY_TYPE)
+ end
+ end
+
+ def hash_rows
+ @hash_rows ||=
+ begin
+ # We freeze the strings to prevent them getting duped when
+ # used as keys in ActiveRecord::Base's @attributes hash
+ columns = @columns.map { |c| c.dup.freeze }
+ @rows.map { |row|
+ # In the past we used Hash[columns.zip(row)]
+ # though elegant, the verbose way is much more efficient
+ # both time and memory wise cause it avoids a big array allocation
+ # this method is called a lot and needs to be micro optimised
+ hash = {}
+
+ index = 0
+ length = columns.length
+
+ while index < length
+ hash[columns[index]] = row[index]
+ index += 1
+ end
+
+ hash
+ }
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/runtime_registry.rb b/activerecord/lib/active_record/runtime_registry.rb
new file mode 100644
index 0000000000..9d605b826a
--- /dev/null
+++ b/activerecord/lib/active_record/runtime_registry.rb
@@ -0,0 +1,22 @@
+require 'active_support/per_thread_registry'
+
+module ActiveRecord
+ # This is a thread locals registry for Active Record. For example:
+ #
+ # ActiveRecord::RuntimeRegistry.connection_handler
+ #
+ # returns the connection handler local to the current thread.
+ #
+ # See the documentation of <tt>ActiveSupport::PerThreadRegistry</tt>
+ # for further details.
+ class RuntimeRegistry # :nodoc:
+ extend ActiveSupport::PerThreadRegistry
+
+ attr_accessor :connection_handler, :sql_runtime, :connection_id
+
+ [:connection_handler, :sql_runtime, :connection_id].each do |val|
+ class_eval %{ def self.#{val}; instance.#{val}; end }, __FILE__, __LINE__
+ class_eval %{ def self.#{val}=(x); instance.#{val}=x; end }, __FILE__, __LINE__
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/sanitization.rb b/activerecord/lib/active_record/sanitization.rb
new file mode 100644
index 0000000000..ff70cbed0f
--- /dev/null
+++ b/activerecord/lib/active_record/sanitization.rb
@@ -0,0 +1,188 @@
+module ActiveRecord
+ module Sanitization
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ def quote_value(value, column) #:nodoc:
+ connection.quote(value, column)
+ end
+
+ # Used to sanitize objects before they're used in an SQL SELECT statement. Delegates to <tt>connection.quote</tt>.
+ def sanitize(object) #:nodoc:
+ connection.quote(object)
+ end
+
+ protected
+
+ # Accepts an array, hash, or string of SQL conditions and sanitizes
+ # them into a valid SQL fragment for a WHERE clause.
+ # ["name='%s' and group_id='%s'", "foo'bar", 4] returns "name='foo''bar' and group_id='4'"
+ # { name: "foo'bar", group_id: 4 } returns "name='foo''bar' and group_id='4'"
+ # "name='foo''bar' and group_id='4'" returns "name='foo''bar' and group_id='4'"
+ def sanitize_sql_for_conditions(condition, table_name = self.table_name)
+ return nil if condition.blank?
+
+ case condition
+ when Array; sanitize_sql_array(condition)
+ when Hash; sanitize_sql_hash_for_conditions(condition, table_name)
+ else condition
+ end
+ end
+ alias_method :sanitize_sql, :sanitize_sql_for_conditions
+ alias_method :sanitize_conditions, :sanitize_sql
+
+ # Accepts an array, hash, or string of SQL conditions and sanitizes
+ # them into a valid SQL fragment for a SET clause.
+ # { name: nil, group_id: 4 } returns "name = NULL , group_id='4'"
+ def sanitize_sql_for_assignment(assignments, default_table_name = self.table_name)
+ case assignments
+ when Array; sanitize_sql_array(assignments)
+ when Hash; sanitize_sql_hash_for_assignment(assignments, default_table_name)
+ else assignments
+ end
+ end
+
+ # Accepts a hash of SQL conditions and replaces those attributes
+ # that correspond to a +composed_of+ relationship with their expanded
+ # aggregate attribute values.
+ # Given:
+ # class Person < ActiveRecord::Base
+ # composed_of :address, class_name: "Address",
+ # mapping: [%w(address_street street), %w(address_city city)]
+ # end
+ # Then:
+ # { address: Address.new("813 abc st.", "chicago") }
+ # # => { address_street: "813 abc st.", address_city: "chicago" }
+ def expand_hash_conditions_for_aggregates(attrs)
+ expanded_attrs = {}
+ attrs.each do |attr, value|
+ if aggregation = reflect_on_aggregation(attr.to_sym)
+ mapping = aggregation.mapping
+ mapping.each do |field_attr, aggregate_attr|
+ if mapping.size == 1 && !value.respond_to?(aggregate_attr)
+ expanded_attrs[field_attr] = value
+ else
+ expanded_attrs[field_attr] = value.send(aggregate_attr)
+ end
+ end
+ else
+ expanded_attrs[attr] = value
+ end
+ end
+ expanded_attrs
+ end
+
+ # Sanitizes a hash of attribute/value pairs into SQL conditions for a WHERE clause.
+ # { name: "foo'bar", group_id: 4 }
+ # # => "name='foo''bar' and group_id= 4"
+ # { status: nil, group_id: [1,2,3] }
+ # # => "status IS NULL and group_id IN (1,2,3)"
+ # { age: 13..18 }
+ # # => "age BETWEEN 13 AND 18"
+ # { 'other_records.id' => 7 }
+ # # => "`other_records`.`id` = 7"
+ # { other_records: { id: 7 } }
+ # # => "`other_records`.`id` = 7"
+ # And for value objects on a composed_of relationship:
+ # { address: Address.new("123 abc st.", "chicago") }
+ # # => "address_street='123 abc st.' and address_city='chicago'"
+ def sanitize_sql_hash_for_conditions(attrs, default_table_name = self.table_name)
+ attrs = PredicateBuilder.resolve_column_aliases self, attrs
+ attrs = expand_hash_conditions_for_aggregates(attrs)
+
+ table = Arel::Table.new(table_name, arel_engine).alias(default_table_name)
+ PredicateBuilder.build_from_hash(self, attrs, table).map { |b|
+ connection.visitor.compile b
+ }.join(' AND ')
+ end
+ alias_method :sanitize_sql_hash, :sanitize_sql_hash_for_conditions
+
+ # Sanitizes a hash of attribute/value pairs into SQL conditions for a SET clause.
+ # { status: nil, group_id: 1 }
+ # # => "status = NULL , group_id = 1"
+ def sanitize_sql_hash_for_assignment(attrs, table)
+ c = connection
+ attrs.map do |attr, value|
+ "#{c.quote_table_name_for_assignment(table, attr)} = #{quote_bound_value(value, c, columns_hash[attr.to_s])}"
+ end.join(', ')
+ end
+
+ # Sanitizes a +string+ so that it is safe to use within an SQL
+ # LIKE statement. This method uses +escape_character+ to escape all occurrences of "\", "_" and "%"
+ def sanitize_sql_like(string, escape_character = "\\")
+ pattern = Regexp.union(escape_character, "%", "_")
+ string.gsub(pattern) { |x| [escape_character, x].join }
+ end
+
+ # Accepts an array of conditions. The array has each value
+ # sanitized and interpolated into the SQL statement.
+ # ["name='%s' and group_id='%s'", "foo'bar", 4] returns "name='foo''bar' and group_id='4'"
+ def sanitize_sql_array(ary)
+ statement, *values = ary
+ if values.first.is_a?(Hash) && statement =~ /:\w+/
+ replace_named_bind_variables(statement, values.first)
+ elsif statement.include?('?')
+ replace_bind_variables(statement, values)
+ elsif statement.blank?
+ statement
+ else
+ statement % values.collect { |value| connection.quote_string(value.to_s) }
+ end
+ end
+
+ def replace_bind_variables(statement, values) #:nodoc:
+ raise_if_bind_arity_mismatch(statement, statement.count('?'), values.size)
+ bound = values.dup
+ c = connection
+ statement.gsub('?') do
+ replace_bind_variable(bound.shift, c)
+ end
+ end
+
+ def replace_bind_variable(value, c = connection) #:nodoc:
+ if ActiveRecord::Relation === value
+ value.to_sql
+ else
+ quote_bound_value(value, c)
+ end
+ end
+
+ def replace_named_bind_variables(statement, bind_vars) #:nodoc:
+ statement.gsub(/(:?):([a-zA-Z]\w*)/) do
+ if $1 == ':' # skip postgresql casts
+ $& # return the whole match
+ elsif bind_vars.include?(match = $2.to_sym)
+ replace_bind_variable(bind_vars[match])
+ else
+ raise PreparedStatementInvalid, "missing value for :#{match} in #{statement}"
+ end
+ end
+ end
+
+ def quote_bound_value(value, c = connection, column = nil) #:nodoc:
+ if column
+ c.quote(value, column)
+ elsif value.respond_to?(:map) && !value.acts_like?(:string)
+ if value.respond_to?(:empty?) && value.empty?
+ c.quote(nil)
+ else
+ value.map { |v| c.quote(v) }.join(',')
+ end
+ else
+ c.quote(value)
+ end
+ end
+
+ def raise_if_bind_arity_mismatch(statement, expected, provided) #:nodoc:
+ unless expected == provided
+ raise PreparedStatementInvalid, "wrong number of bind variables (#{provided} for #{expected}) in: #{statement}"
+ end
+ end
+ end
+
+ # TODO: Deprecate this
+ def quoted_id
+ self.class.quote_value(id, column_for_attribute(self.class.primary_key))
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/schema.rb b/activerecord/lib/active_record/schema.rb
new file mode 100644
index 0000000000..0a5546a760
--- /dev/null
+++ b/activerecord/lib/active_record/schema.rb
@@ -0,0 +1,64 @@
+module ActiveRecord
+ # = Active Record Schema
+ #
+ # Allows programmers to programmatically define a schema in a portable
+ # DSL. This means you can define tables, indexes, etc. without using SQL
+ # directly, so your applications can more easily support multiple
+ # databases.
+ #
+ # Usage:
+ #
+ # ActiveRecord::Schema.define do
+ # create_table :authors do |t|
+ # t.string :name, null: false
+ # end
+ #
+ # add_index :authors, :name, :unique
+ #
+ # create_table :posts do |t|
+ # t.integer :author_id, null: false
+ # t.string :subject
+ # t.text :body
+ # t.boolean :private, default: false
+ # end
+ #
+ # add_index :posts, :author_id
+ # end
+ #
+ # ActiveRecord::Schema is only supported by database adapters that also
+ # support migrations, the two features being very similar.
+ class Schema < Migration
+
+ # Returns the migrations paths.
+ #
+ # ActiveRecord::Schema.new.migrations_paths
+ # # => ["db/migrate"] # Rails migration path by default.
+ def migrations_paths
+ ActiveRecord::Migrator.migrations_paths
+ end
+
+ def define(info, &block) # :nodoc:
+ instance_eval(&block)
+
+ unless info[:version].blank?
+ initialize_schema_migrations_table
+ connection.assume_migrated_upto_version(info[:version], migrations_paths)
+ end
+ end
+
+ # Eval the given block. All methods available to the current connection
+ # adapter are available within the block, so you can easily use the
+ # database definition DSL to build up your schema (+create_table+,
+ # +add_index+, etc.).
+ #
+ # The +info+ hash is optional, and if given is used to define metadata
+ # about the current schema (currently, only the schema's version):
+ #
+ # ActiveRecord::Schema.define(version: 20380119000001) do
+ # ...
+ # end
+ def self.define(info={}, &block)
+ new.define(info, &block)
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/schema_dumper.rb b/activerecord/lib/active_record/schema_dumper.rb
new file mode 100644
index 0000000000..fae6427ea1
--- /dev/null
+++ b/activerecord/lib/active_record/schema_dumper.rb
@@ -0,0 +1,262 @@
+require 'stringio'
+require 'active_support/core_ext/big_decimal'
+
+module ActiveRecord
+ # = Active Record Schema Dumper
+ #
+ # This class is used to dump the database schema for some connection to some
+ # output format (i.e., ActiveRecord::Schema).
+ class SchemaDumper #:nodoc:
+ private_class_method :new
+
+ ##
+ # :singleton-method:
+ # A list of tables which should not be dumped to the schema.
+ # Acceptable values are strings as well as regexp.
+ # This setting is only used if ActiveRecord::Base.schema_format == :ruby
+ cattr_accessor :ignore_tables
+ @@ignore_tables = []
+
+ class << self
+ def dump(connection=ActiveRecord::Base.connection, stream=STDOUT, config = ActiveRecord::Base)
+ new(connection, generate_options(config)).dump(stream)
+ stream
+ end
+
+ private
+ def generate_options(config)
+ {
+ table_name_prefix: config.table_name_prefix,
+ table_name_suffix: config.table_name_suffix
+ }
+ end
+ end
+
+ def dump(stream)
+ header(stream)
+ extensions(stream)
+ tables(stream)
+ trailer(stream)
+ stream
+ end
+
+ private
+
+ def initialize(connection, options = {})
+ @connection = connection
+ @types = @connection.native_database_types
+ @version = Migrator::current_version rescue nil
+ @options = options
+ end
+
+ def header(stream)
+ define_params = @version ? "version: #{@version}" : ""
+
+ if stream.respond_to?(:external_encoding) && stream.external_encoding
+ stream.puts "# encoding: #{stream.external_encoding.name}"
+ end
+
+ stream.puts <<HEADER
+# This file is auto-generated from the current state of the database. Instead
+# of editing this file, please use the migrations feature of Active Record to
+# incrementally modify your database, and then regenerate this schema definition.
+#
+# Note that this schema.rb definition is the authoritative source for your
+# database schema. If you need to create the application database on another
+# system, you should be using db:schema:load, not running all the migrations
+# from scratch. The latter is a flawed and unsustainable approach (the more migrations
+# you'll amass, the slower it'll run and the greater likelihood for issues).
+#
+# It's strongly recommended that you check this file into your version control system.
+
+ActiveRecord::Schema.define(#{define_params}) do
+
+HEADER
+ end
+
+ def trailer(stream)
+ stream.puts "end"
+ end
+
+ def extensions(stream)
+ return unless @connection.supports_extensions?
+ extensions = @connection.extensions
+ if extensions.any?
+ stream.puts " # These are extensions that must be enabled in order to support this database"
+ extensions.each do |extension|
+ stream.puts " enable_extension #{extension.inspect}"
+ end
+ stream.puts
+ end
+ end
+
+ def tables(stream)
+ sorted_tables = @connection.tables.sort
+
+ sorted_tables.each do |table_name|
+ table(table_name, stream) unless ignored?(table_name)
+ end
+
+ # dump foreign keys at the end to make sure all dependent tables exist.
+ if @connection.supports_foreign_keys?
+ sorted_tables.each do |tbl|
+ foreign_keys(tbl, stream)
+ end
+ end
+ end
+
+ def table(table, stream)
+ columns = @connection.columns(table)
+ begin
+ tbl = StringIO.new
+
+ # first dump primary key column
+ if @connection.respond_to?(:pk_and_sequence_for)
+ pk, _ = @connection.pk_and_sequence_for(table)
+ end
+ if !pk && @connection.respond_to?(:primary_key)
+ pk = @connection.primary_key(table)
+ end
+
+ tbl.print " create_table #{remove_prefix_and_suffix(table).inspect}"
+ pkcol = columns.detect { |c| c.name == pk }
+ if pkcol
+ if pk != 'id'
+ tbl.print %Q(, primary_key: "#{pk}")
+ elsif pkcol.sql_type == 'uuid'
+ tbl.print ", id: :uuid"
+ tbl.print %Q(, default: "#{pkcol.default_function}") if pkcol.default_function
+ end
+ else
+ tbl.print ", id: false"
+ end
+ tbl.print ", force: true"
+ tbl.puts " do |t|"
+
+ # then dump all non-primary key columns
+ column_specs = columns.map do |column|
+ raise StandardError, "Unknown type '#{column.sql_type}' for column '#{column.name}'" unless @connection.valid_type?(column.type)
+ next if column.name == pk
+ @connection.column_spec(column, @types)
+ end.compact
+
+ # find all migration keys used in this table
+ keys = @connection.migration_keys
+
+ # figure out the lengths for each column based on above keys
+ lengths = keys.map { |key|
+ column_specs.map { |spec|
+ spec[key] ? spec[key].length + 2 : 0
+ }.max
+ }
+
+ # the string we're going to sprintf our values against, with standardized column widths
+ format_string = lengths.map{ |len| "%-#{len}s" }
+
+ # find the max length for the 'type' column, which is special
+ type_length = column_specs.map{ |column| column[:type].length }.max
+
+ # add column type definition to our format string
+ format_string.unshift " t.%-#{type_length}s "
+
+ format_string *= ''
+
+ column_specs.each do |colspec|
+ values = keys.zip(lengths).map{ |key, len| colspec.key?(key) ? colspec[key] + ", " : " " * len }
+ values.unshift colspec[:type]
+ tbl.print((format_string % values).gsub(/,\s*$/, ''))
+ tbl.puts
+ end
+
+ tbl.puts " end"
+ tbl.puts
+
+ indexes(table, tbl)
+
+ tbl.rewind
+ stream.print tbl.read
+ rescue => e
+ stream.puts "# Could not dump table #{table.inspect} because of following #{e.class}"
+ stream.puts "# #{e.message}"
+ stream.puts
+ end
+
+ stream
+ end
+
+ def indexes(table, stream)
+ if (indexes = @connection.indexes(table)).any?
+ add_index_statements = indexes.map do |index|
+ statement_parts = [
+ ('add_index ' + remove_prefix_and_suffix(index.table).inspect),
+ index.columns.inspect,
+ ('name: ' + index.name.inspect),
+ ]
+ statement_parts << 'unique: true' if index.unique
+
+ index_lengths = (index.lengths || []).compact
+ statement_parts << ('length: ' + Hash[index.columns.zip(index.lengths)].inspect) unless index_lengths.empty?
+
+ index_orders = (index.orders || {})
+ statement_parts << ('order: ' + index.orders.inspect) unless index_orders.empty?
+
+ statement_parts << ('where: ' + index.where.inspect) if index.where
+
+ statement_parts << ('using: ' + index.using.inspect) if index.using
+
+ statement_parts << ('type: ' + index.type.inspect) if index.type
+
+ ' ' + statement_parts.join(', ')
+ end
+
+ stream.puts add_index_statements.sort.join("\n")
+ stream.puts
+ end
+ end
+
+ def foreign_keys(table, stream)
+ if (foreign_keys = @connection.foreign_keys(table)).any?
+ add_foreign_key_statements = foreign_keys.map do |foreign_key|
+ parts = [
+ 'add_foreign_key ' + remove_prefix_and_suffix(foreign_key.from_table).inspect,
+ remove_prefix_and_suffix(foreign_key.to_table).inspect,
+ ]
+
+ if foreign_key.column != @connection.foreign_key_column_for(foreign_key.to_table)
+ parts << ('column: ' + foreign_key.column.inspect)
+ end
+
+ if foreign_key.custom_primary_key?
+ parts << ('primary_key: ' + foreign_key.primary_key.inspect)
+ end
+
+ if foreign_key.name !~ /^fk_rails_[0-9a-f]{10}$/
+ parts << ('name: ' + foreign_key.name.inspect)
+ end
+
+ parts << ('on_update: ' + foreign_key.on_update.inspect) if foreign_key.on_update
+ parts << ('on_delete: ' + foreign_key.on_delete.inspect) if foreign_key.on_delete
+
+ ' ' + parts.join(', ')
+ end
+
+ stream.puts add_foreign_key_statements.sort.join("\n")
+ end
+ end
+
+ def remove_prefix_and_suffix(table)
+ table.gsub(/^(#{@options[:table_name_prefix]})(.+)(#{@options[:table_name_suffix]})$/, "\\2")
+ end
+
+ def ignored?(table_name)
+ ['schema_migrations', ignore_tables].flatten.any? do |ignored|
+ case ignored
+ when String; remove_prefix_and_suffix(table_name) == ignored
+ when Regexp; remove_prefix_and_suffix(table_name) =~ ignored
+ else
+ raise StandardError, 'ActiveRecord::SchemaDumper.ignore_tables accepts an array of String and / or Regexp values.'
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/schema_migration.rb b/activerecord/lib/active_record/schema_migration.rb
new file mode 100644
index 0000000000..b5038104ac
--- /dev/null
+++ b/activerecord/lib/active_record/schema_migration.rb
@@ -0,0 +1,56 @@
+require 'active_record/scoping/default'
+require 'active_record/scoping/named'
+require 'active_record/base'
+
+module ActiveRecord
+ class SchemaMigration < ActiveRecord::Base
+ class << self
+ def primary_key
+ nil
+ end
+
+ def table_name
+ "#{table_name_prefix}#{ActiveRecord::Base.schema_migrations_table_name}#{table_name_suffix}"
+ end
+
+ def index_name
+ "#{table_name_prefix}unique_#{ActiveRecord::Base.schema_migrations_table_name}#{table_name_suffix}"
+ end
+
+ def table_exists?
+ connection.table_exists?(table_name)
+ end
+
+ def create_table(limit=nil)
+ unless table_exists?
+ version_options = {null: false}
+ version_options[:limit] = limit if limit
+
+ connection.create_table(table_name, id: false) do |t|
+ t.column :version, :string, version_options
+ end
+ connection.add_index table_name, :version, unique: true, name: index_name
+ end
+ end
+
+ def drop_table
+ if table_exists?
+ connection.remove_index table_name, name: index_name
+ connection.drop_table(table_name)
+ end
+ end
+
+ def normalize_migration_number(number)
+ "%.3d" % number.to_i
+ end
+
+ def normalized_versions
+ pluck(:version).map { |v| normalize_migration_number v }
+ end
+ end
+
+ def version
+ super.to_i
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/scoping.rb b/activerecord/lib/active_record/scoping.rb
new file mode 100644
index 0000000000..3e43591672
--- /dev/null
+++ b/activerecord/lib/active_record/scoping.rb
@@ -0,0 +1,87 @@
+require 'active_support/per_thread_registry'
+
+module ActiveRecord
+ module Scoping
+ extend ActiveSupport::Concern
+
+ included do
+ include Default
+ include Named
+ end
+
+ module ClassMethods
+ def current_scope #:nodoc:
+ ScopeRegistry.value_for(:current_scope, base_class.to_s)
+ end
+
+ def current_scope=(scope) #:nodoc:
+ ScopeRegistry.set_value_for(:current_scope, base_class.to_s, scope)
+ end
+ end
+
+ def populate_with_current_scope_attributes
+ return unless self.class.scope_attributes?
+
+ self.class.scope_attributes.each do |att,value|
+ send("#{att}=", value) if respond_to?("#{att}=")
+ end
+ end
+
+ def initialize_internals_callback
+ super
+ populate_with_current_scope_attributes
+ end
+
+ # This class stores the +:current_scope+ and +:ignore_default_scope+ values
+ # for different classes. The registry is stored as a thread local, which is
+ # accessed through +ScopeRegistry.current+.
+ #
+ # This class allows you to store and get the scope values on different
+ # classes and different types of scopes. For example, if you are attempting
+ # to get the current_scope for the +Board+ model, then you would use the
+ # following code:
+ #
+ # registry = ActiveRecord::Scoping::ScopeRegistry
+ # registry.set_value_for(:current_scope, "Board", some_new_scope)
+ #
+ # Now when you run:
+ #
+ # registry.value_for(:current_scope, "Board")
+ #
+ # You will obtain whatever was defined in +some_new_scope+. The +value_for+
+ # and +set_value_for+ methods are delegated to the current +ScopeRegistry+
+ # object, so the above example code can also be called as:
+ #
+ # ActiveRecord::Scoping::ScopeRegistry.set_value_for(:current_scope,
+ # "Board", some_new_scope)
+ class ScopeRegistry # :nodoc:
+ extend ActiveSupport::PerThreadRegistry
+
+ VALID_SCOPE_TYPES = [:current_scope, :ignore_default_scope]
+
+ def initialize
+ @registry = Hash.new { |hash, key| hash[key] = {} }
+ end
+
+ # Obtains the value for a given +scope_name+ and +variable_name+.
+ def value_for(scope_type, variable_name)
+ raise_invalid_scope_type!(scope_type)
+ @registry[scope_type][variable_name]
+ end
+
+ # Sets the +value+ for a given +scope_type+ and +variable_name+.
+ def set_value_for(scope_type, variable_name, value)
+ raise_invalid_scope_type!(scope_type)
+ @registry[scope_type][variable_name] = value
+ end
+
+ private
+
+ def raise_invalid_scope_type!(scope_type)
+ if !VALID_SCOPE_TYPES.include?(scope_type)
+ raise ArgumentError, "Invalid scope type '#{scope_type}' sent to the registry. Scope types must be included in VALID_SCOPE_TYPES"
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/scoping/default.rb b/activerecord/lib/active_record/scoping/default.rb
new file mode 100644
index 0000000000..18190cb535
--- /dev/null
+++ b/activerecord/lib/active_record/scoping/default.rb
@@ -0,0 +1,134 @@
+module ActiveRecord
+ module Scoping
+ module Default
+ extend ActiveSupport::Concern
+
+ included do
+ # Stores the default scope for the class.
+ class_attribute :default_scopes, instance_writer: false, instance_predicate: false
+
+ self.default_scopes = []
+ end
+
+ module ClassMethods
+ # Returns a scope for the model without the previously set scopes.
+ #
+ # class Post < ActiveRecord::Base
+ # def self.default_scope
+ # where published: true
+ # end
+ # end
+ #
+ # Post.all # Fires "SELECT * FROM posts WHERE published = true"
+ # Post.unscoped.all # Fires "SELECT * FROM posts"
+ # Post.where(published: false).unscoped.all # Fires "SELECT * FROM posts"
+ #
+ # This method also accepts a block. All queries inside the block will
+ # not use the previously set scopes.
+ #
+ # Post.unscoped {
+ # Post.limit(10) # Fires "SELECT * FROM posts LIMIT 10"
+ # }
+ def unscoped
+ block_given? ? relation.scoping { yield } : relation
+ end
+
+ def before_remove_const #:nodoc:
+ self.current_scope = nil
+ end
+
+ protected
+
+ # Use this macro in your model to set a default scope for all operations on
+ # the model.
+ #
+ # class Article < ActiveRecord::Base
+ # default_scope { where(published: true) }
+ # end
+ #
+ # Article.all # => SELECT * FROM articles WHERE published = true
+ #
+ # The +default_scope+ is also applied while creating/building a record.
+ # It is not applied while updating a record.
+ #
+ # Article.new.published # => true
+ # Article.create.published # => true
+ #
+ # (You can also pass any object which responds to +call+ to the
+ # +default_scope+ macro, and it will be called when building the
+ # default scope.)
+ #
+ # If you use multiple +default_scope+ declarations in your model then
+ # they will be merged together:
+ #
+ # class Article < ActiveRecord::Base
+ # default_scope { where(published: true) }
+ # default_scope { where(rating: 'G') }
+ # end
+ #
+ # Article.all # => SELECT * FROM articles WHERE published = true AND rating = 'G'
+ #
+ # This is also the case with inheritance and module includes where the
+ # parent or module defines a +default_scope+ and the child or including
+ # class defines a second one.
+ #
+ # If you need to do more complex things with a default scope, you can
+ # alternatively define it as a class method:
+ #
+ # class Article < ActiveRecord::Base
+ # def self.default_scope
+ # # Should return a scope, you can call 'super' here etc.
+ # end
+ # end
+ def default_scope(scope = nil)
+ scope = Proc.new if block_given?
+
+ if scope.is_a?(Relation) || !scope.respond_to?(:call)
+ raise ArgumentError,
+ "Support for calling #default_scope without a block is removed. For example instead " \
+ "of `default_scope where(color: 'red')`, please use " \
+ "`default_scope { where(color: 'red') }`. (Alternatively you can just redefine " \
+ "self.default_scope.)"
+ end
+
+ self.default_scopes += [scope]
+ end
+
+ def build_default_scope(base_rel = relation) # :nodoc:
+ if !Base.is_a?(method(:default_scope).owner)
+ # The user has defined their own default scope method, so call that
+ evaluate_default_scope { default_scope }
+ elsif default_scopes.any?
+ evaluate_default_scope do
+ default_scopes.inject(base_rel) do |default_scope, scope|
+ default_scope.merge(base_rel.scoping { scope.call })
+ end
+ end
+ end
+ end
+
+ def ignore_default_scope? # :nodoc:
+ ScopeRegistry.value_for(:ignore_default_scope, self)
+ end
+
+ def ignore_default_scope=(ignore) # :nodoc:
+ ScopeRegistry.set_value_for(:ignore_default_scope, self, ignore)
+ end
+
+ # The ignore_default_scope flag is used to prevent an infinite recursion
+ # situation where a default scope references a scope which has a default
+ # scope which references a scope...
+ def evaluate_default_scope # :nodoc:
+ return if ignore_default_scope?
+
+ begin
+ self.ignore_default_scope = true
+ yield
+ ensure
+ self.ignore_default_scope = false
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/scoping/named.rb b/activerecord/lib/active_record/scoping/named.rb
new file mode 100644
index 0000000000..49cadb66d0
--- /dev/null
+++ b/activerecord/lib/active_record/scoping/named.rb
@@ -0,0 +1,160 @@
+require 'active_support/core_ext/array'
+require 'active_support/core_ext/hash/except'
+require 'active_support/core_ext/kernel/singleton_class'
+
+module ActiveRecord
+ # = Active Record \Named \Scopes
+ module Scoping
+ module Named
+ extend ActiveSupport::Concern
+
+ module ClassMethods
+ # Returns an <tt>ActiveRecord::Relation</tt> scope object.
+ #
+ # posts = Post.all
+ # posts.size # Fires "select count(*) from posts" and returns the count
+ # posts.each {|p| puts p.name } # Fires "select * from posts" and loads post objects
+ #
+ # fruits = Fruit.all
+ # fruits = fruits.where(color: 'red') if options[:red_only]
+ # fruits = fruits.limit(10) if limited?
+ #
+ # You can define a scope that applies to all finders using
+ # <tt>ActiveRecord::Base.default_scope</tt>.
+ def all
+ if current_scope
+ current_scope.clone
+ else
+ default_scoped
+ end
+ end
+
+ def default_scoped # :nodoc:
+ relation.merge(build_default_scope)
+ end
+
+ # Collects attributes from scopes that should be applied when creating
+ # an AR instance for the particular class this is called on.
+ def scope_attributes # :nodoc:
+ all.scope_for_create
+ end
+
+ # Are there default attributes associated with this scope?
+ def scope_attributes? # :nodoc:
+ current_scope || default_scopes.any?
+ end
+
+ # Adds a class method for retrieving and querying objects. A \scope
+ # represents a narrowing of a database query, such as
+ # <tt>where(color: :red).select('shirts.*').includes(:washing_instructions)</tt>.
+ #
+ # class Shirt < ActiveRecord::Base
+ # scope :red, -> { where(color: 'red') }
+ # scope :dry_clean_only, -> { joins(:washing_instructions).where('washing_instructions.dry_clean_only = ?', true) }
+ # end
+ #
+ # The above calls to +scope+ define class methods <tt>Shirt.red</tt> and
+ # <tt>Shirt.dry_clean_only</tt>. <tt>Shirt.red</tt>, in effect,
+ # represents the query <tt>Shirt.where(color: 'red')</tt>.
+ #
+ # You should always pass a callable object to the scopes defined
+ # with +scope+. This ensures that the scope is re-evaluated each
+ # time it is called.
+ #
+ # Note that this is simply 'syntactic sugar' for defining an actual
+ # class method:
+ #
+ # class Shirt < ActiveRecord::Base
+ # def self.red
+ # where(color: 'red')
+ # end
+ # end
+ #
+ # Unlike <tt>Shirt.find(...)</tt>, however, the object returned by
+ # <tt>Shirt.red</tt> is not an Array; it resembles the association object
+ # constructed by a +has_many+ declaration. For instance, you can invoke
+ # <tt>Shirt.red.first</tt>, <tt>Shirt.red.count</tt>,
+ # <tt>Shirt.red.where(size: 'small')</tt>. Also, just as with the
+ # association objects, named \scopes act like an Array, implementing
+ # Enumerable; <tt>Shirt.red.each(&block)</tt>, <tt>Shirt.red.first</tt>,
+ # and <tt>Shirt.red.inject(memo, &block)</tt> all behave as if
+ # <tt>Shirt.red</tt> really was an Array.
+ #
+ # These named \scopes are composable. For instance,
+ # <tt>Shirt.red.dry_clean_only</tt> will produce all shirts that are
+ # both red and dry clean only. Nested finds and calculations also work
+ # with these compositions: <tt>Shirt.red.dry_clean_only.count</tt>
+ # returns the number of garments for which these criteria obtain.
+ # Similarly with <tt>Shirt.red.dry_clean_only.average(:thread_count)</tt>.
+ #
+ # All scopes are available as class methods on the ActiveRecord::Base
+ # descendant upon which the \scopes were defined. But they are also
+ # available to +has_many+ associations. If,
+ #
+ # class Person < ActiveRecord::Base
+ # has_many :shirts
+ # end
+ #
+ # then <tt>elton.shirts.red.dry_clean_only</tt> will return all of
+ # Elton's red, dry clean only shirts.
+ #
+ # \Named scopes can also have extensions, just as with +has_many+
+ # declarations:
+ #
+ # class Shirt < ActiveRecord::Base
+ # scope :red, -> { where(color: 'red') } do
+ # def dom_id
+ # 'red_shirts'
+ # end
+ # end
+ # end
+ #
+ # Scopes can also be used while creating/building a record.
+ #
+ # class Article < ActiveRecord::Base
+ # scope :published, -> { where(published: true) }
+ # end
+ #
+ # Article.published.new.published # => true
+ # Article.published.create.published # => true
+ #
+ # \Class methods on your model are automatically available
+ # on scopes. Assuming the following setup:
+ #
+ # class Article < ActiveRecord::Base
+ # scope :published, -> { where(published: true) }
+ # scope :featured, -> { where(featured: true) }
+ #
+ # def self.latest_article
+ # order('published_at desc').first
+ # end
+ #
+ # def self.titles
+ # pluck(:title)
+ # end
+ # end
+ #
+ # We are able to call the methods like this:
+ #
+ # Article.published.featured.latest_article
+ # Article.featured.titles
+ def scope(name, body, &block)
+ if dangerous_class_method?(name)
+ raise ArgumentError, "You tried to define a scope named \"#{name}\" " \
+ "on the model \"#{self.name}\", but Active Record already defined " \
+ "a class method with the same name."
+ end
+
+ extension = Module.new(&block) if block
+
+ singleton_class.send(:define_method, name) do |*args|
+ scope = all.scoping { body.call(*args) }
+ scope = scope.extending(extension) if extension
+
+ scope || all
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/serialization.rb b/activerecord/lib/active_record/serialization.rb
new file mode 100644
index 0000000000..bd9079b596
--- /dev/null
+++ b/activerecord/lib/active_record/serialization.rb
@@ -0,0 +1,22 @@
+module ActiveRecord #:nodoc:
+ # = Active Record Serialization
+ module Serialization
+ extend ActiveSupport::Concern
+ include ActiveModel::Serializers::JSON
+
+ included do
+ self.include_root_in_json = false
+ end
+
+ def serializable_hash(options = nil)
+ options = options.try(:clone) || {}
+
+ options[:except] = Array(options[:except]).map { |n| n.to_s }
+ options[:except] |= Array(self.class.inheritance_column)
+
+ super(options)
+ end
+ end
+end
+
+require 'active_record/serializers/xml_serializer'
diff --git a/activerecord/lib/active_record/serializers/xml_serializer.rb b/activerecord/lib/active_record/serializers/xml_serializer.rb
new file mode 100644
index 0000000000..c2484d02ed
--- /dev/null
+++ b/activerecord/lib/active_record/serializers/xml_serializer.rb
@@ -0,0 +1,193 @@
+require 'active_support/core_ext/hash/conversions'
+
+module ActiveRecord #:nodoc:
+ module Serialization
+ include ActiveModel::Serializers::Xml
+
+ # Builds an XML document to represent the model. Some configuration is
+ # available through +options+. However more complicated cases should
+ # override ActiveRecord::Base#to_xml.
+ #
+ # By default the generated XML document will include the processing
+ # instruction and all the object's attributes. For example:
+ #
+ # <?xml version="1.0" encoding="UTF-8"?>
+ # <topic>
+ # <title>The First Topic</title>
+ # <author-name>David</author-name>
+ # <id type="integer">1</id>
+ # <approved type="boolean">false</approved>
+ # <replies-count type="integer">0</replies-count>
+ # <bonus-time type="dateTime">2000-01-01T08:28:00+12:00</bonus-time>
+ # <written-on type="dateTime">2003-07-16T09:28:00+1200</written-on>
+ # <content>Have a nice day</content>
+ # <author-email-address>david@loudthinking.com</author-email-address>
+ # <parent-id></parent-id>
+ # <last-read type="date">2004-04-15</last-read>
+ # </topic>
+ #
+ # This behavior can be controlled with <tt>:only</tt>, <tt>:except</tt>,
+ # <tt>:skip_instruct</tt>, <tt>:skip_types</tt>, <tt>:dasherize</tt> and <tt>:camelize</tt> .
+ # The <tt>:only</tt> and <tt>:except</tt> options are the same as for the
+ # +attributes+ method. The default is to dasherize all column names, but you
+ # can disable this setting <tt>:dasherize</tt> to +false+. Setting <tt>:camelize</tt>
+ # to +true+ will camelize all column names - this also overrides <tt>:dasherize</tt>.
+ # To not have the column type included in the XML output set <tt>:skip_types</tt> to +true+.
+ #
+ # For instance:
+ #
+ # topic.to_xml(skip_instruct: true, except: [ :id, :bonus_time, :written_on, :replies_count ])
+ #
+ # <topic>
+ # <title>The First Topic</title>
+ # <author-name>David</author-name>
+ # <approved type="boolean">false</approved>
+ # <content>Have a nice day</content>
+ # <author-email-address>david@loudthinking.com</author-email-address>
+ # <parent-id></parent-id>
+ # <last-read type="date">2004-04-15</last-read>
+ # </topic>
+ #
+ # To include first level associations use <tt>:include</tt>:
+ #
+ # firm.to_xml include: [ :account, :clients ]
+ #
+ # <?xml version="1.0" encoding="UTF-8"?>
+ # <firm>
+ # <id type="integer">1</id>
+ # <rating type="integer">1</rating>
+ # <name>37signals</name>
+ # <clients type="array">
+ # <client>
+ # <rating type="integer">1</rating>
+ # <name>Summit</name>
+ # </client>
+ # <client>
+ # <rating type="integer">1</rating>
+ # <name>Microsoft</name>
+ # </client>
+ # </clients>
+ # <account>
+ # <id type="integer">1</id>
+ # <credit-limit type="integer">50</credit-limit>
+ # </account>
+ # </firm>
+ #
+ # Additionally, the record being serialized will be passed to a Proc's second
+ # parameter. This allows for ad hoc additions to the resultant document that
+ # incorporate the context of the record being serialized. And by leveraging the
+ # closure created by a Proc, to_xml can be used to add elements that normally fall
+ # outside of the scope of the model -- for example, generating and appending URLs
+ # associated with models.
+ #
+ # proc = Proc.new { |options, record| options[:builder].tag!('name-reverse', record.name.reverse) }
+ # firm.to_xml procs: [ proc ]
+ #
+ # <firm>
+ # # ... normal attributes as shown above ...
+ # <name-reverse>slangis73</name-reverse>
+ # </firm>
+ #
+ # To include deeper levels of associations pass a hash like this:
+ #
+ # firm.to_xml include: {account: {}, clients: {include: :address}}
+ # <?xml version="1.0" encoding="UTF-8"?>
+ # <firm>
+ # <id type="integer">1</id>
+ # <rating type="integer">1</rating>
+ # <name>37signals</name>
+ # <clients type="array">
+ # <client>
+ # <rating type="integer">1</rating>
+ # <name>Summit</name>
+ # <address>
+ # ...
+ # </address>
+ # </client>
+ # <client>
+ # <rating type="integer">1</rating>
+ # <name>Microsoft</name>
+ # <address>
+ # ...
+ # </address>
+ # </client>
+ # </clients>
+ # <account>
+ # <id type="integer">1</id>
+ # <credit-limit type="integer">50</credit-limit>
+ # </account>
+ # </firm>
+ #
+ # To include any methods on the model being called use <tt>:methods</tt>:
+ #
+ # firm.to_xml methods: [ :calculated_earnings, :real_earnings ]
+ #
+ # <firm>
+ # # ... normal attributes as shown above ...
+ # <calculated-earnings>100000000000000000</calculated-earnings>
+ # <real-earnings>5</real-earnings>
+ # </firm>
+ #
+ # To call any additional Procs use <tt>:procs</tt>. The Procs are passed a
+ # modified version of the options hash that was given to +to_xml+:
+ #
+ # proc = Proc.new { |options| options[:builder].tag!('abc', 'def') }
+ # firm.to_xml procs: [ proc ]
+ #
+ # <firm>
+ # # ... normal attributes as shown above ...
+ # <abc>def</abc>
+ # </firm>
+ #
+ # Alternatively, you can yield the builder object as part of the +to_xml+ call:
+ #
+ # firm.to_xml do |xml|
+ # xml.creator do
+ # xml.first_name "David"
+ # xml.last_name "Heinemeier Hansson"
+ # end
+ # end
+ #
+ # <firm>
+ # # ... normal attributes as shown above ...
+ # <creator>
+ # <first_name>David</first_name>
+ # <last_name>Heinemeier Hansson</last_name>
+ # </creator>
+ # </firm>
+ #
+ # As noted above, you may override +to_xml+ in your ActiveRecord::Base
+ # subclasses to have complete control about what's generated. The general
+ # form of doing this is:
+ #
+ # class IHaveMyOwnXML < ActiveRecord::Base
+ # def to_xml(options = {})
+ # require 'builder'
+ # options[:indent] ||= 2
+ # xml = options[:builder] ||= ::Builder::XmlMarkup.new(indent: options[:indent])
+ # xml.instruct! unless options[:skip_instruct]
+ # xml.level_one do
+ # xml.tag!(:second_level, 'content')
+ # end
+ # end
+ # end
+ def to_xml(options = {}, &block)
+ XmlSerializer.new(self, options).serialize(&block)
+ end
+ end
+
+ class XmlSerializer < ActiveModel::Serializers::Xml::Serializer #:nodoc:
+ class Attribute < ActiveModel::Serializers::Xml::Serializer::Attribute #:nodoc:
+ def compute_type
+ klass = @serializable.class
+ column = klass.columns_hash[name] || Type::Value.new
+
+ type = ActiveSupport::XmlMini::TYPE_NAMES[value.class.name] || column.type
+
+ { :text => :string,
+ :time => :datetime }[type] || type
+ end
+ protected :compute_type
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/statement_cache.rb b/activerecord/lib/active_record/statement_cache.rb
new file mode 100644
index 0000000000..aece446384
--- /dev/null
+++ b/activerecord/lib/active_record/statement_cache.rb
@@ -0,0 +1,100 @@
+module ActiveRecord
+
+ # Statement cache is used to cache a single statement in order to avoid creating the AST again.
+ # Initializing the cache is done by passing the statement in the initialization block:
+ #
+ # cache = ActiveRecord::StatementCache.new do
+ # Book.where(name: "my book").limit(100)
+ # end
+ #
+ # The cached statement is executed by using the +execute+ method:
+ #
+ # cache.execute
+ #
+ # The relation returned by the block is cached, and for each +execute+ call the cached relation gets duped.
+ # Database is queried when +to_a+ is called on the relation.
+ class StatementCache
+ class Substitute; end
+
+ class Query
+ def initialize(sql)
+ @sql = sql
+ end
+
+ def sql_for(binds, connection)
+ @sql
+ end
+ end
+
+ class PartialQuery < Query
+ def initialize values
+ @values = values
+ @indexes = values.each_with_index.find_all { |thing,i|
+ Arel::Nodes::BindParam === thing
+ }.map(&:last)
+ end
+
+ def sql_for(binds, connection)
+ val = @values.dup
+ binds = binds.dup
+ @indexes.each { |i| val[i] = connection.quote(*binds.shift.reverse) }
+ val.join
+ end
+ end
+
+ def self.query(visitor, ast)
+ Query.new visitor.accept(ast, Arel::Collectors::SQLString.new).value
+ end
+
+ def self.partial_query(visitor, ast, collector)
+ collected = visitor.accept(ast, collector).value
+ PartialQuery.new collected
+ end
+
+ class Params
+ def bind; Substitute.new; end
+ end
+
+ class BindMap
+ def initialize(bind_values)
+ @indexes = []
+ @bind_values = bind_values
+
+ bind_values.each_with_index do |(_, value), i|
+ if Substitute === value
+ @indexes << i
+ end
+ end
+ end
+
+ def bind(values)
+ bvs = @bind_values.map { |pair| pair.dup }
+ @indexes.each_with_index { |offset,i| bvs[offset][1] = values[i] }
+ bvs
+ end
+ end
+
+ attr_reader :bind_map, :query_builder
+
+ def self.create(connection, block = Proc.new)
+ relation = block.call Params.new
+ bind_map = BindMap.new relation.bind_values
+ query_builder = connection.cacheable_query relation.arel
+ new query_builder, bind_map
+ end
+
+ def initialize(query_builder, bind_map)
+ @query_builder = query_builder
+ @bind_map = bind_map
+ end
+
+ def execute(params, klass, connection)
+ bind_values = bind_map.bind params
+
+ sql = query_builder.sql_for bind_values, connection
+
+ klass.find_by_sql sql, bind_values
+ end
+ alias :call :execute
+ end
+end
diff --git a/activerecord/lib/active_record/store.rb b/activerecord/lib/active_record/store.rb
new file mode 100644
index 0000000000..3c291f28e3
--- /dev/null
+++ b/activerecord/lib/active_record/store.rb
@@ -0,0 +1,205 @@
+require 'active_support/core_ext/hash/indifferent_access'
+
+module ActiveRecord
+ # Store gives you a thin wrapper around serialize for the purpose of storing hashes in a single column.
+ # It's like a simple key/value store baked into your record when you don't care about being able to
+ # query that store outside the context of a single record.
+ #
+ # You can then declare accessors to this store that are then accessible just like any other attribute
+ # of the model. This is very helpful for easily exposing store keys to a form or elsewhere that's
+ # already built around just accessing attributes on the model.
+ #
+ # Make sure that you declare the database column used for the serialized store as a text, so there's
+ # plenty of room.
+ #
+ # You can set custom coder to encode/decode your serialized attributes to/from different formats.
+ # JSON, YAML, Marshal are supported out of the box. Generally it can be any wrapper that provides +load+ and +dump+.
+ #
+ # NOTE - If you are using PostgreSQL specific columns like +hstore+ or +json+ there is no need for
+ # the serialization provided by +store+. Simply use +store_accessor+ instead to generate
+ # the accessor methods. Be aware that these columns use a string keyed hash and do not allow access
+ # using a symbol.
+ #
+ # Examples:
+ #
+ # class User < ActiveRecord::Base
+ # store :settings, accessors: [ :color, :homepage ], coder: JSON
+ # end
+ #
+ # u = User.new(color: 'black', homepage: '37signals.com')
+ # u.color # Accessor stored attribute
+ # u.settings[:country] = 'Denmark' # Any attribute, even if not specified with an accessor
+ #
+ # # There is no difference between strings and symbols for accessing custom attributes
+ # u.settings[:country] # => 'Denmark'
+ # u.settings['country'] # => 'Denmark'
+ #
+ # # Add additional accessors to an existing store through store_accessor
+ # class SuperUser < User
+ # store_accessor :settings, :privileges, :servants
+ # end
+ #
+ # The stored attribute names can be retrieved using +stored_attributes+.
+ #
+ # User.stored_attributes[:settings] # [:color, :homepage]
+ #
+ # == Overwriting default accessors
+ #
+ # All stored values are automatically available through accessors on the Active Record
+ # object, but sometimes you want to specialize this behavior. This can be done by overwriting
+ # the default accessors (using the same name as the attribute) and calling <tt>super</tt>
+ # to actually change things.
+ #
+ # class Song < ActiveRecord::Base
+ # # Uses a stored integer to hold the volume adjustment of the song
+ # store :settings, accessors: [:volume_adjustment]
+ #
+ # def volume_adjustment=(decibels)
+ # super(decibels.to_i)
+ # end
+ #
+ # def volume_adjustment
+ # super.to_i
+ # end
+ # end
+ module Store
+ extend ActiveSupport::Concern
+
+ included do
+ class << self
+ attr_accessor :local_stored_attributes
+ end
+ end
+
+ module ClassMethods
+ def store(store_attribute, options = {})
+ serialize store_attribute, IndifferentCoder.new(options[:coder])
+ store_accessor(store_attribute, options[:accessors]) if options.has_key? :accessors
+ end
+
+ def store_accessor(store_attribute, *keys)
+ keys = keys.flatten
+
+ _store_accessors_module.module_eval do
+ keys.each do |key|
+ define_method("#{key}=") do |value|
+ write_store_attribute(store_attribute, key, value)
+ end
+
+ define_method(key) do
+ read_store_attribute(store_attribute, key)
+ end
+ end
+ end
+
+ # assign new store attribute and create new hash to ensure that each class in the hierarchy
+ # has its own hash of stored attributes.
+ self.local_stored_attributes ||= {}
+ self.local_stored_attributes[store_attribute] ||= []
+ self.local_stored_attributes[store_attribute] |= keys
+ end
+
+ def _store_accessors_module # :nodoc:
+ @_store_accessors_module ||= begin
+ mod = Module.new
+ include mod
+ mod
+ end
+ end
+
+ def stored_attributes
+ parent = superclass.respond_to?(:stored_attributes) ? superclass.stored_attributes : {}
+ if self.local_stored_attributes
+ parent.merge!(self.local_stored_attributes) { |k, a, b| a | b }
+ end
+ parent
+ end
+ end
+
+ protected
+ def read_store_attribute(store_attribute, key)
+ accessor = store_accessor_for(store_attribute)
+ accessor.read(self, store_attribute, key)
+ end
+
+ def write_store_attribute(store_attribute, key, value)
+ accessor = store_accessor_for(store_attribute)
+ accessor.write(self, store_attribute, key, value)
+ end
+
+ private
+ def store_accessor_for(store_attribute)
+ type_for_attribute(store_attribute.to_s).accessor
+ end
+
+ class HashAccessor # :nodoc:
+ def self.read(object, attribute, key)
+ prepare(object, attribute)
+ object.public_send(attribute)[key]
+ end
+
+ def self.write(object, attribute, key, value)
+ prepare(object, attribute)
+ if value != read(object, attribute, key)
+ object.public_send :"#{attribute}_will_change!"
+ object.public_send(attribute)[key] = value
+ end
+ end
+
+ def self.prepare(object, attribute)
+ object.public_send :"#{attribute}=", {} unless object.send(attribute)
+ end
+ end
+
+ class StringKeyedHashAccessor < HashAccessor # :nodoc:
+ def self.read(object, attribute, key)
+ super object, attribute, key.to_s
+ end
+
+ def self.write(object, attribute, key, value)
+ super object, attribute, key.to_s, value
+ end
+ end
+
+ class IndifferentHashAccessor < ActiveRecord::Store::HashAccessor # :nodoc:
+ def self.prepare(object, store_attribute)
+ attribute = object.send(store_attribute)
+ unless attribute.is_a?(ActiveSupport::HashWithIndifferentAccess)
+ attribute = IndifferentCoder.as_indifferent_hash(attribute)
+ object.send :"#{store_attribute}=", attribute
+ end
+ attribute
+ end
+ end
+
+ class IndifferentCoder # :nodoc:
+ def initialize(coder_or_class_name)
+ @coder =
+ if coder_or_class_name.respond_to?(:load) && coder_or_class_name.respond_to?(:dump)
+ coder_or_class_name
+ else
+ ActiveRecord::Coders::YAMLColumn.new(coder_or_class_name || Object)
+ end
+ end
+
+ def dump(obj)
+ @coder.dump self.class.as_indifferent_hash(obj)
+ end
+
+ def load(yaml)
+ self.class.as_indifferent_hash(@coder.load(yaml || ''))
+ end
+
+ def self.as_indifferent_hash(obj)
+ case obj
+ when ActiveSupport::HashWithIndifferentAccess
+ obj
+ when Hash
+ obj.with_indifferent_access
+ else
+ ActiveSupport::HashWithIndifferentAccess.new
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/tasks/database_tasks.rb b/activerecord/lib/active_record/tasks/database_tasks.rb
new file mode 100644
index 0000000000..892c78e479
--- /dev/null
+++ b/activerecord/lib/active_record/tasks/database_tasks.rb
@@ -0,0 +1,276 @@
+module ActiveRecord
+ module Tasks # :nodoc:
+ class DatabaseAlreadyExists < StandardError; end # :nodoc:
+ class DatabaseNotSupported < StandardError; end # :nodoc:
+
+ # <tt>ActiveRecord::Tasks::DatabaseTasks</tt> is a utility class, which encapsulates
+ # logic behind common tasks used to manage database and migrations.
+ #
+ # The tasks defined here are used with Rake tasks provided by Active Record.
+ #
+ # In order to use DatabaseTasks, a few config values need to be set. All the needed
+ # config values are set by Rails already, so it's necessary to do it only if you
+ # want to change the defaults or when you want to use Active Record outside of Rails
+ # (in such case after configuring the database tasks, you can also use the rake tasks
+ # defined in Active Record).
+ #
+ # The possible config values are:
+ #
+ # * +env+: current environment (like Rails.env).
+ # * +database_configuration+: configuration of your databases (as in +config/database.yml+).
+ # * +db_dir+: your +db+ directory.
+ # * +fixtures_path+: a path to fixtures directory.
+ # * +migrations_paths+: a list of paths to directories with migrations.
+ # * +seed_loader+: an object which will load seeds, it needs to respond to the +load_seed+ method.
+ # * +root+: a path to the root of the application.
+ #
+ # Example usage of +DatabaseTasks+ outside Rails could look as such:
+ #
+ # include ActiveRecord::Tasks
+ # DatabaseTasks.database_configuration = YAML.load_file('my_database_config.yml')
+ # DatabaseTasks.db_dir = 'db'
+ # # other settings...
+ #
+ # DatabaseTasks.create_current('production')
+ module DatabaseTasks
+ extend self
+
+ attr_writer :current_config, :db_dir, :migrations_paths, :fixtures_path, :root, :env, :seed_loader
+ attr_accessor :database_configuration
+
+ LOCAL_HOSTS = ['127.0.0.1', 'localhost']
+
+ def register_task(pattern, task)
+ @tasks ||= {}
+ @tasks[pattern] = task
+ end
+
+ register_task(/mysql/, ActiveRecord::Tasks::MySQLDatabaseTasks)
+ register_task(/postgresql/, ActiveRecord::Tasks::PostgreSQLDatabaseTasks)
+ register_task(/sqlite/, ActiveRecord::Tasks::SQLiteDatabaseTasks)
+
+ def db_dir
+ @db_dir ||= Rails.application.config.paths["db"].first
+ end
+
+ def migrations_paths
+ @migrations_paths ||= Rails.application.paths['db/migrate'].to_a
+ end
+
+ def fixtures_path
+ @fixtures_path ||= if ENV['FIXTURES_PATH']
+ File.join(root, ENV['FIXTURES_PATH'])
+ else
+ File.join(root, 'test', 'fixtures')
+ end
+ end
+
+ def root
+ @root ||= Rails.root
+ end
+
+ def env
+ @env ||= Rails.env
+ end
+
+ def seed_loader
+ @seed_loader ||= Rails.application
+ end
+
+ def current_config(options = {})
+ options.reverse_merge! :env => env
+ if options.has_key?(:config)
+ @current_config = options[:config]
+ else
+ @current_config ||= ActiveRecord::Base.configurations[options[:env]]
+ end
+ end
+
+ def create(*arguments)
+ configuration = arguments.first
+ class_for_adapter(configuration['adapter']).new(*arguments).create
+ rescue DatabaseAlreadyExists
+ $stderr.puts "#{configuration['database']} already exists"
+ rescue Exception => error
+ $stderr.puts error, *(error.backtrace)
+ $stderr.puts "Couldn't create database for #{configuration.inspect}"
+ end
+
+ def create_all
+ each_local_configuration { |configuration| create configuration }
+ end
+
+ def create_current(environment = env)
+ each_current_configuration(environment) { |configuration|
+ create configuration
+ }
+ ActiveRecord::Base.establish_connection(environment.to_sym)
+ end
+
+ def drop(*arguments)
+ configuration = arguments.first
+ class_for_adapter(configuration['adapter']).new(*arguments).drop
+ rescue ActiveRecord::NoDatabaseError
+ $stderr.puts "Database '#{configuration['database']}' does not exist"
+ rescue Exception => error
+ $stderr.puts error, *(error.backtrace)
+ $stderr.puts "Couldn't drop #{configuration['database']}"
+ end
+
+ def drop_all
+ each_local_configuration { |configuration| drop configuration }
+ end
+
+ def drop_current(environment = env)
+ each_current_configuration(environment) { |configuration|
+ drop configuration
+ }
+ end
+
+ def migrate
+ verbose = ENV["VERBOSE"] ? ENV["VERBOSE"] == "true" : true
+ version = ENV["VERSION"] ? ENV["VERSION"].to_i : nil
+ scope = ENV['SCOPE']
+ Migration.verbose = verbose
+ Migrator.migrate(Migrator.migrations_paths, version) do |migration|
+ scope.blank? || scope == migration.scope
+ end
+ end
+
+ def charset_current(environment = env)
+ charset ActiveRecord::Base.configurations[environment]
+ end
+
+ def charset(*arguments)
+ configuration = arguments.first
+ class_for_adapter(configuration['adapter']).new(*arguments).charset
+ end
+
+ def collation_current(environment = env)
+ collation ActiveRecord::Base.configurations[environment]
+ end
+
+ def collation(*arguments)
+ configuration = arguments.first
+ class_for_adapter(configuration['adapter']).new(*arguments).collation
+ end
+
+ def purge(configuration)
+ class_for_adapter(configuration['adapter']).new(configuration).purge
+ end
+
+ def purge_all
+ each_local_configuration { |configuration|
+ purge configuration
+ }
+ end
+
+ def purge_current(environment = env)
+ each_current_configuration(environment) { |configuration|
+ purge configuration
+ }
+ end
+
+ def structure_dump(*arguments)
+ configuration = arguments.first
+ filename = arguments.delete_at 1
+ class_for_adapter(configuration['adapter']).new(*arguments).structure_dump(filename)
+ end
+
+ def structure_load(*arguments)
+ configuration = arguments.first
+ filename = arguments.delete_at 1
+ class_for_adapter(configuration['adapter']).new(*arguments).structure_load(filename)
+ end
+
+ def load_schema(format = ActiveRecord::Base.schema_format, file = nil)
+ ActiveSupport::Deprecation.warn(<<-MESSAGE.strip_heredoc)
+ This method will act on a specific connection in the future.
+ To act on the current connection, use `load_schema_current` instead.
+ MESSAGE
+ load_schema_current(format, file)
+ end
+
+ # This method is the successor of +load_schema+. We should rename it
+ # after +load_schema+ went through a deprecation cycle. (Rails > 4.2)
+ def load_schema_for(configuration, format = ActiveRecord::Base.schema_format, file = nil) # :nodoc:
+ case format
+ when :ruby
+ file ||= File.join(db_dir, "schema.rb")
+ check_schema_file(file)
+ purge(configuration)
+ ActiveRecord::Base.establish_connection(configuration)
+ load(file)
+ when :sql
+ file ||= File.join(db_dir, "structure.sql")
+ check_schema_file(file)
+ purge(configuration)
+ structure_load(configuration, file)
+ else
+ raise ArgumentError, "unknown format #{format.inspect}"
+ end
+ end
+
+ def load_schema_current(format = ActiveRecord::Base.schema_format, file = nil, environment = env)
+ each_current_configuration(environment) { |configuration|
+ load_schema_for configuration, format, file
+ }
+ end
+
+ def check_schema_file(filename)
+ unless File.exist?(filename)
+ message = %{#{filename} doesn't exist yet. Run `rake db:migrate` to create it, then try again.}
+ message << %{ If you do not intend to use a database, you should instead alter #{Rails.root}/config/application.rb to limit the frameworks that will be loaded.} if defined?(::Rails)
+ Kernel.abort message
+ end
+ end
+
+ def load_seed
+ if seed_loader
+ seed_loader.load_seed
+ else
+ raise "You tried to load seed data, but no seed loader is specified. Please specify seed " +
+ "loader with ActiveRecord::Tasks::DatabaseTasks.seed_loader = your_seed_loader\n" +
+ "Seed loader should respond to load_seed method"
+ end
+ end
+
+ private
+
+ def class_for_adapter(adapter)
+ key = @tasks.keys.detect { |pattern| adapter[pattern] }
+ unless key
+ raise DatabaseNotSupported, "Rake tasks not supported by '#{adapter}' adapter"
+ end
+ @tasks[key]
+ end
+
+ def each_current_configuration(environment)
+ environments = [environment]
+ # add test environment only if no RAILS_ENV was specified.
+ environments << 'test' if environment == 'development' && ENV['RAILS_ENV'].nil?
+
+ configurations = ActiveRecord::Base.configurations.values_at(*environments)
+ configurations.compact.each do |configuration|
+ yield configuration unless configuration['database'].blank?
+ end
+ end
+
+ def each_local_configuration
+ ActiveRecord::Base.configurations.each_value do |configuration|
+ next unless configuration['database']
+
+ if local_database?(configuration)
+ yield configuration
+ else
+ $stderr.puts "This task only modifies local databases. #{configuration['database']} is on a remote host."
+ end
+ end
+ end
+
+ def local_database?(configuration)
+ configuration['host'].blank? || LOCAL_HOSTS.include?(configuration['host'])
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/tasks/mysql_database_tasks.rb b/activerecord/lib/active_record/tasks/mysql_database_tasks.rb
new file mode 100644
index 0000000000..d890196f47
--- /dev/null
+++ b/activerecord/lib/active_record/tasks/mysql_database_tasks.rb
@@ -0,0 +1,144 @@
+module ActiveRecord
+ module Tasks # :nodoc:
+ class MySQLDatabaseTasks # :nodoc:
+ DEFAULT_CHARSET = ENV['CHARSET'] || 'utf8'
+ DEFAULT_COLLATION = ENV['COLLATION'] || 'utf8_unicode_ci'
+ ACCESS_DENIED_ERROR = 1045
+
+ delegate :connection, :establish_connection, to: ActiveRecord::Base
+
+ def initialize(configuration)
+ @configuration = configuration
+ end
+
+ def create
+ establish_connection configuration_without_database
+ connection.create_database configuration['database'], creation_options
+ establish_connection configuration
+ rescue ActiveRecord::StatementInvalid => error
+ if /database exists/ === error.message
+ raise DatabaseAlreadyExists
+ else
+ raise
+ end
+ rescue error_class => error
+ if error.respond_to?(:errno) && error.errno == ACCESS_DENIED_ERROR
+ $stdout.print error.error
+ establish_connection root_configuration_without_database
+ connection.create_database configuration['database'], creation_options
+ if configuration['username'] != 'root'
+ connection.execute grant_statement.gsub(/\s+/, ' ').strip
+ end
+ establish_connection configuration
+ else
+ $stderr.puts "Couldn't create database for #{configuration.inspect}, #{creation_options.inspect}"
+ $stderr.puts "(If you set the charset manually, make sure you have a matching collation)" if configuration['encoding']
+ end
+ end
+
+ def drop
+ establish_connection configuration
+ connection.drop_database configuration['database']
+ end
+
+ def purge
+ establish_connection configuration
+ connection.recreate_database configuration['database'], creation_options
+ end
+
+ def charset
+ connection.charset
+ end
+
+ def collation
+ connection.collation
+ end
+
+ def structure_dump(filename)
+ args = prepare_command_options('mysqldump')
+ args.concat(["--result-file", "#{filename}"])
+ args.concat(["--no-data"])
+ args.concat(["#{configuration['database']}"])
+ unless Kernel.system(*args)
+ $stderr.puts "Could not dump the database structure. "\
+ "Make sure `mysqldump` is in your PATH and check the command output for warnings."
+ end
+ end
+
+ def structure_load(filename)
+ args = prepare_command_options('mysql')
+ args.concat(['--execute', %{SET FOREIGN_KEY_CHECKS = 0; SOURCE #{filename}; SET FOREIGN_KEY_CHECKS = 1}])
+ args.concat(["--database", "#{configuration['database']}"])
+ Kernel.system(*args)
+ end
+
+ private
+
+ def configuration
+ @configuration
+ end
+
+ def configuration_without_database
+ configuration.merge('database' => nil)
+ end
+
+ def creation_options
+ Hash.new.tap do |options|
+ options[:charset] = configuration['encoding'] if configuration.include? 'encoding'
+ options[:collation] = configuration['collation'] if configuration.include? 'collation'
+
+ # Set default charset only when collation isn't set.
+ options[:charset] ||= DEFAULT_CHARSET unless options[:collation]
+
+ # Set default collation only when charset is also default.
+ options[:collation] ||= DEFAULT_COLLATION if options[:charset] == DEFAULT_CHARSET
+ end
+ end
+
+ def error_class
+ if configuration['adapter'] =~ /jdbc/
+ require 'active_record/railties/jdbcmysql_error'
+ ArJdbcMySQL::Error
+ elsif defined?(Mysql2)
+ Mysql2::Error
+ elsif defined?(Mysql)
+ Mysql::Error
+ else
+ StandardError
+ end
+ end
+
+ def grant_statement
+ <<-SQL
+GRANT ALL PRIVILEGES ON #{configuration['database']}.*
+ TO '#{configuration['username']}'@'localhost'
+IDENTIFIED BY '#{configuration['password']}' WITH GRANT OPTION;
+ SQL
+ end
+
+ def root_configuration_without_database
+ configuration_without_database.merge(
+ 'username' => 'root',
+ 'password' => root_password
+ )
+ end
+
+ def root_password
+ $stdout.print "Please provide the root password for your MySQL installation\n>"
+ $stdin.gets.strip
+ end
+
+ def prepare_command_options(command)
+ args = [command]
+ args.concat(['--user', configuration['username']]) if configuration['username']
+ args << "--password=#{configuration['password']}" if configuration['password']
+ args.concat(['--default-character-set', configuration['encoding']]) if configuration['encoding']
+ configuration.slice('host', 'port', 'socket').each do |k, v|
+ args.concat([ "--#{k}", v.to_s ]) if v
+ end
+
+ args
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb b/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb
new file mode 100644
index 0000000000..ce1de4b76e
--- /dev/null
+++ b/activerecord/lib/active_record/tasks/postgresql_database_tasks.rb
@@ -0,0 +1,90 @@
+require 'shellwords'
+
+module ActiveRecord
+ module Tasks # :nodoc:
+ class PostgreSQLDatabaseTasks # :nodoc:
+ DEFAULT_ENCODING = ENV['CHARSET'] || 'utf8'
+
+ delegate :connection, :establish_connection, :clear_active_connections!,
+ to: ActiveRecord::Base
+
+ def initialize(configuration)
+ @configuration = configuration
+ end
+
+ def create(master_established = false)
+ establish_master_connection unless master_established
+ connection.create_database configuration['database'],
+ configuration.merge('encoding' => encoding)
+ establish_connection configuration
+ rescue ActiveRecord::StatementInvalid => error
+ if /database .* already exists/ === error.message
+ raise DatabaseAlreadyExists
+ else
+ raise
+ end
+ end
+
+ def drop
+ establish_master_connection
+ connection.drop_database configuration['database']
+ end
+
+ def charset
+ connection.encoding
+ end
+
+ def collation
+ connection.collation
+ end
+
+ def purge
+ clear_active_connections!
+ drop
+ create true
+ end
+
+ def structure_dump(filename)
+ set_psql_env
+ search_path = configuration['schema_search_path']
+ unless search_path.blank?
+ search_path = search_path.split(",").map{|search_path_part| "--schema=#{Shellwords.escape(search_path_part.strip)}" }.join(" ")
+ end
+
+ command = "pg_dump -i -s -x -O -f #{Shellwords.escape(filename)} #{search_path} #{Shellwords.escape(configuration['database'])}"
+ raise 'Error dumping database' unless Kernel.system(command)
+
+ File.open(filename, "a") { |f| f << "SET search_path TO #{connection.schema_search_path};\n\n" }
+ end
+
+ def structure_load(filename)
+ set_psql_env
+ Kernel.system("psql -q -f #{Shellwords.escape(filename)} #{configuration['database']}")
+ end
+
+ private
+
+ def configuration
+ @configuration
+ end
+
+ def encoding
+ configuration['encoding'] || DEFAULT_ENCODING
+ end
+
+ def establish_master_connection
+ establish_connection configuration.merge(
+ 'database' => 'postgres',
+ 'schema_search_path' => 'public'
+ )
+ end
+
+ def set_psql_env
+ ENV['PGHOST'] = configuration['host'] if configuration['host']
+ ENV['PGPORT'] = configuration['port'].to_s if configuration['port']
+ ENV['PGPASSWORD'] = configuration['password'].to_s if configuration['password']
+ ENV['PGUSER'] = configuration['username'].to_s if configuration['username']
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/tasks/sqlite_database_tasks.rb b/activerecord/lib/active_record/tasks/sqlite_database_tasks.rb
new file mode 100644
index 0000000000..9ab64d0325
--- /dev/null
+++ b/activerecord/lib/active_record/tasks/sqlite_database_tasks.rb
@@ -0,0 +1,55 @@
+module ActiveRecord
+ module Tasks # :nodoc:
+ class SQLiteDatabaseTasks # :nodoc:
+ delegate :connection, :establish_connection, to: ActiveRecord::Base
+
+ def initialize(configuration, root = ActiveRecord::Tasks::DatabaseTasks.root)
+ @configuration, @root = configuration, root
+ end
+
+ def create
+ raise DatabaseAlreadyExists if File.exist?(configuration['database'])
+
+ establish_connection configuration
+ connection
+ end
+
+ def drop
+ require 'pathname'
+ path = Pathname.new configuration['database']
+ file = path.absolute? ? path.to_s : File.join(root, path)
+
+ FileUtils.rm(file) if File.exist?(file)
+ end
+
+ def purge
+ drop
+ create
+ end
+
+ def charset
+ connection.encoding
+ end
+
+ def structure_dump(filename)
+ dbfile = configuration['database']
+ `sqlite3 #{dbfile} .schema > #{filename}`
+ end
+
+ def structure_load(filename)
+ dbfile = configuration['database']
+ `sqlite3 #{dbfile} < "#{filename}"`
+ end
+
+ private
+
+ def configuration
+ @configuration
+ end
+
+ def root
+ @root
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/timestamp.rb b/activerecord/lib/active_record/timestamp.rb
new file mode 100644
index 0000000000..ddf3e1804c
--- /dev/null
+++ b/activerecord/lib/active_record/timestamp.rb
@@ -0,0 +1,120 @@
+module ActiveRecord
+ # = Active Record Timestamp
+ #
+ # Active Record automatically timestamps create and update operations if the
+ # table has fields named <tt>created_at/created_on</tt> or
+ # <tt>updated_at/updated_on</tt>.
+ #
+ # Timestamping can be turned off by setting:
+ #
+ # config.active_record.record_timestamps = false
+ #
+ # Timestamps are in UTC by default but you can use the local timezone by setting:
+ #
+ # config.active_record.default_timezone = :local
+ #
+ # == Time Zone aware attributes
+ #
+ # By default, ActiveRecord::Base keeps all the datetime columns time zone aware by executing following code.
+ #
+ # config.active_record.time_zone_aware_attributes = true
+ #
+ # This feature can easily be turned off by assigning value <tt>false</tt> .
+ #
+ # If your attributes are time zone aware and you desire to skip time zone conversion to the current Time.zone
+ # when reading certain attributes then you can do following:
+ #
+ # class Topic < ActiveRecord::Base
+ # self.skip_time_zone_conversion_for_attributes = [:written_on]
+ # end
+ module Timestamp
+ extend ActiveSupport::Concern
+
+ included do
+ class_attribute :record_timestamps
+ self.record_timestamps = true
+ end
+
+ def initialize_dup(other) # :nodoc:
+ super
+ clear_timestamp_attributes
+ end
+
+ private
+
+ def _create_record
+ if self.record_timestamps
+ current_time = current_time_from_proper_timezone
+
+ all_timestamp_attributes.each do |column|
+ if respond_to?(column) && respond_to?("#{column}=") && self.send(column).nil?
+ write_attribute(column.to_s, current_time)
+ end
+ end
+ end
+
+ super
+ end
+
+ def _update_record(*args)
+ if should_record_timestamps?
+ current_time = current_time_from_proper_timezone
+
+ timestamp_attributes_for_update_in_model.each do |column|
+ column = column.to_s
+ next if attribute_changed?(column)
+ write_attribute(column, current_time)
+ end
+ end
+ super
+ end
+
+ def should_record_timestamps?
+ self.record_timestamps && (!partial_writes? || changed?)
+ end
+
+ def timestamp_attributes_for_create_in_model
+ timestamp_attributes_for_create.select { |c| self.class.column_names.include?(c.to_s) }
+ end
+
+ def timestamp_attributes_for_update_in_model
+ timestamp_attributes_for_update.select { |c| self.class.column_names.include?(c.to_s) }
+ end
+
+ def all_timestamp_attributes_in_model
+ timestamp_attributes_for_create_in_model + timestamp_attributes_for_update_in_model
+ end
+
+ def timestamp_attributes_for_update
+ [:updated_at, :updated_on]
+ end
+
+ def timestamp_attributes_for_create
+ [:created_at, :created_on]
+ end
+
+ def all_timestamp_attributes
+ timestamp_attributes_for_create + timestamp_attributes_for_update
+ end
+
+ def max_updated_column_timestamp(timestamp_names = timestamp_attributes_for_update)
+ timestamp_names
+ .map { |attr| self[attr] }
+ .compact
+ .map(&:to_time)
+ .max
+ end
+
+ def current_time_from_proper_timezone
+ self.class.default_timezone == :utc ? Time.now.utc : Time.now
+ end
+
+ # Clear attributes and changed_attributes
+ def clear_timestamp_attributes
+ all_timestamp_attributes_in_model.each do |attribute_name|
+ self[attribute_name] = nil
+ changed_attributes.delete(attribute_name)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/transactions.rb b/activerecord/lib/active_record/transactions.rb
new file mode 100644
index 0000000000..7e4dc4c895
--- /dev/null
+++ b/activerecord/lib/active_record/transactions.rb
@@ -0,0 +1,397 @@
+module ActiveRecord
+ # See ActiveRecord::Transactions::ClassMethods for documentation.
+ module Transactions
+ extend ActiveSupport::Concern
+ ACTIONS = [:create, :destroy, :update]
+
+ included do
+ define_callbacks :commit, :rollback,
+ terminator: ->(_, result) { result == false },
+ scope: [:kind, :name]
+ end
+
+ # = Active Record Transactions
+ #
+ # Transactions are protective blocks where SQL statements are only permanent
+ # if they can all succeed as one atomic action. The classic example is a
+ # transfer between two accounts where you can only have a deposit if the
+ # withdrawal succeeded and vice versa. Transactions enforce the integrity of
+ # the database and guard the data against program errors or database
+ # break-downs. So basically you should use transaction blocks whenever you
+ # have a number of statements that must be executed together or not at all.
+ #
+ # For example:
+ #
+ # ActiveRecord::Base.transaction do
+ # david.withdrawal(100)
+ # mary.deposit(100)
+ # end
+ #
+ # This example will only take money from David and give it to Mary if neither
+ # +withdrawal+ nor +deposit+ raise an exception. Exceptions will force a
+ # ROLLBACK that returns the database to the state before the transaction
+ # began. Be aware, though, that the objects will _not_ have their instance
+ # data returned to their pre-transactional state.
+ #
+ # == Different Active Record classes in a single transaction
+ #
+ # Though the transaction class method is called on some Active Record class,
+ # the objects within the transaction block need not all be instances of
+ # that class. This is because transactions are per-database connection, not
+ # per-model.
+ #
+ # In this example a +balance+ record is transactionally saved even
+ # though +transaction+ is called on the +Account+ class:
+ #
+ # Account.transaction do
+ # balance.save!
+ # account.save!
+ # end
+ #
+ # The +transaction+ method is also available as a model instance method.
+ # For example, you can also do this:
+ #
+ # balance.transaction do
+ # balance.save!
+ # account.save!
+ # end
+ #
+ # == Transactions are not distributed across database connections
+ #
+ # A transaction acts on a single database connection. If you have
+ # multiple class-specific databases, the transaction will not protect
+ # interaction among them. One workaround is to begin a transaction
+ # on each class whose models you alter:
+ #
+ # Student.transaction do
+ # Course.transaction do
+ # course.enroll(student)
+ # student.units += course.units
+ # end
+ # end
+ #
+ # This is a poor solution, but fully distributed transactions are beyond
+ # the scope of Active Record.
+ #
+ # == +save+ and +destroy+ are automatically wrapped in a transaction
+ #
+ # Both +save+ and +destroy+ come wrapped in a transaction that ensures
+ # that whatever you do in validations or callbacks will happen under its
+ # protected cover. So you can use validations to check for values that
+ # the transaction depends on or you can raise exceptions in the callbacks
+ # to rollback, including <tt>after_*</tt> callbacks.
+ #
+ # As a consequence changes to the database are not seen outside your connection
+ # until the operation is complete. For example, if you try to update the index
+ # of a search engine in +after_save+ the indexer won't see the updated record.
+ # The +after_commit+ callback is the only one that is triggered once the update
+ # is committed. See below.
+ #
+ # == Exception handling and rolling back
+ #
+ # Also have in mind that exceptions thrown within a transaction block will
+ # be propagated (after triggering the ROLLBACK), so you should be ready to
+ # catch those in your application code.
+ #
+ # One exception is the <tt>ActiveRecord::Rollback</tt> exception, which will trigger
+ # a ROLLBACK when raised, but not be re-raised by the transaction block.
+ #
+ # *Warning*: one should not catch <tt>ActiveRecord::StatementInvalid</tt> exceptions
+ # inside a transaction block. <tt>ActiveRecord::StatementInvalid</tt> exceptions indicate that an
+ # error occurred at the database level, for example when a unique constraint
+ # is violated. On some database systems, such as PostgreSQL, database errors
+ # inside a transaction cause the entire transaction to become unusable
+ # until it's restarted from the beginning. Here is an example which
+ # demonstrates the problem:
+ #
+ # # Suppose that we have a Number model with a unique column called 'i'.
+ # Number.transaction do
+ # Number.create(i: 0)
+ # begin
+ # # This will raise a unique constraint error...
+ # Number.create(i: 0)
+ # rescue ActiveRecord::StatementInvalid
+ # # ...which we ignore.
+ # end
+ #
+ # # On PostgreSQL, the transaction is now unusable. The following
+ # # statement will cause a PostgreSQL error, even though the unique
+ # # constraint is no longer violated:
+ # Number.create(i: 1)
+ # # => "PGError: ERROR: current transaction is aborted, commands
+ # # ignored until end of transaction block"
+ # end
+ #
+ # One should restart the entire transaction if an
+ # <tt>ActiveRecord::StatementInvalid</tt> occurred.
+ #
+ # == Nested transactions
+ #
+ # +transaction+ calls can be nested. By default, this makes all database
+ # statements in the nested transaction block become part of the parent
+ # transaction. For example, the following behavior may be surprising:
+ #
+ # User.transaction do
+ # User.create(username: 'Kotori')
+ # User.transaction do
+ # User.create(username: 'Nemu')
+ # raise ActiveRecord::Rollback
+ # end
+ # end
+ #
+ # creates both "Kotori" and "Nemu". Reason is the <tt>ActiveRecord::Rollback</tt>
+ # exception in the nested block does not issue a ROLLBACK. Since these exceptions
+ # are captured in transaction blocks, the parent block does not see it and the
+ # real transaction is committed.
+ #
+ # In order to get a ROLLBACK for the nested transaction you may ask for a real
+ # sub-transaction by passing <tt>requires_new: true</tt>. If anything goes wrong,
+ # the database rolls back to the beginning of the sub-transaction without rolling
+ # back the parent transaction. If we add it to the previous example:
+ #
+ # User.transaction do
+ # User.create(username: 'Kotori')
+ # User.transaction(requires_new: true) do
+ # User.create(username: 'Nemu')
+ # raise ActiveRecord::Rollback
+ # end
+ # end
+ #
+ # only "Kotori" is created. This works on MySQL and PostgreSQL. SQLite3 version >= '3.6.8' also supports it.
+ #
+ # Most databases don't support true nested transactions. At the time of
+ # writing, the only database that we're aware of that supports true nested
+ # transactions, is MS-SQL. Because of this, Active Record emulates nested
+ # transactions by using savepoints on MySQL and PostgreSQL. See
+ # http://dev.mysql.com/doc/refman/5.6/en/savepoint.html
+ # for more information about savepoints.
+ #
+ # === Callbacks
+ #
+ # There are two types of callbacks associated with committing and rolling back transactions:
+ # +after_commit+ and +after_rollback+.
+ #
+ # +after_commit+ callbacks are called on every record saved or destroyed within a
+ # transaction immediately after the transaction is committed. +after_rollback+ callbacks
+ # are called on every record saved or destroyed within a transaction immediately after the
+ # transaction or savepoint is rolled back.
+ #
+ # These callbacks are useful for interacting with other systems since you will be guaranteed
+ # that the callback is only executed when the database is in a permanent state. For example,
+ # +after_commit+ is a good spot to put in a hook to clearing a cache since clearing it from
+ # within a transaction could trigger the cache to be regenerated before the database is updated.
+ #
+ # === Caveats
+ #
+ # If you're on MySQL, then do not use DDL operations in nested transactions
+ # blocks that are emulated with savepoints. That is, do not execute statements
+ # like 'CREATE TABLE' inside such blocks. This is because MySQL automatically
+ # releases all savepoints upon executing a DDL operation. When +transaction+
+ # is finished and tries to release the savepoint it created earlier, a
+ # database error will occur because the savepoint has already been
+ # automatically released. The following example demonstrates the problem:
+ #
+ # Model.connection.transaction do # BEGIN
+ # Model.connection.transaction(requires_new: true) do # CREATE SAVEPOINT active_record_1
+ # Model.connection.create_table(...) # active_record_1 now automatically released
+ # end # RELEASE savepoint active_record_1
+ # # ^^^^ BOOM! database error!
+ # end
+ #
+ # Note that "TRUNCATE" is also a MySQL DDL statement!
+ module ClassMethods
+ # See ActiveRecord::Transactions::ClassMethods for detailed documentation.
+ def transaction(options = {}, &block)
+ # See the ConnectionAdapters::DatabaseStatements#transaction API docs.
+ connection.transaction(options, &block)
+ end
+
+ # This callback is called after a record has been created, updated, or destroyed.
+ #
+ # You can specify that the callback should only be fired by a certain action with
+ # the +:on+ option:
+ #
+ # after_commit :do_foo, on: :create
+ # after_commit :do_bar, on: :update
+ # after_commit :do_baz, on: :destroy
+ #
+ # after_commit :do_foo_bar, on: [:create, :update]
+ # after_commit :do_bar_baz, on: [:update, :destroy]
+ #
+ # Note that transactional fixtures do not play well with this feature. Please
+ # use the +test_after_commit+ gem to have these hooks fired in tests.
+ def after_commit(*args, &block)
+ set_options_for_callbacks!(args)
+ set_callback(:commit, :after, *args, &block)
+ end
+
+ # This callback is called after a create, update, or destroy are rolled back.
+ #
+ # Please check the documentation of +after_commit+ for options.
+ def after_rollback(*args, &block)
+ set_options_for_callbacks!(args)
+ set_callback(:rollback, :after, *args, &block)
+ end
+
+ private
+
+ def set_options_for_callbacks!(args)
+ options = args.last
+ if options.is_a?(Hash) && options[:on]
+ fire_on = Array(options[:on])
+ assert_valid_transaction_action(fire_on)
+ options[:if] = Array(options[:if])
+ options[:if] << "transaction_include_any_action?(#{fire_on})"
+ end
+ end
+
+ def assert_valid_transaction_action(actions)
+ if (actions - ACTIONS).any?
+ raise ArgumentError, ":on conditions for after_commit and after_rollback callbacks have to be one of #{ACTIONS.join(",")}"
+ end
+ end
+ end
+
+ # See ActiveRecord::Transactions::ClassMethods for detailed documentation.
+ def transaction(options = {}, &block)
+ self.class.transaction(options, &block)
+ end
+
+ def destroy #:nodoc:
+ with_transaction_returning_status { super }
+ end
+
+ def save(*) #:nodoc:
+ rollback_active_record_state! do
+ with_transaction_returning_status { super }
+ end
+ end
+
+ def save!(*) #:nodoc:
+ with_transaction_returning_status { super }
+ end
+
+ def touch(*) #:nodoc:
+ with_transaction_returning_status { super }
+ end
+
+ # Reset id and @new_record if the transaction rolls back.
+ def rollback_active_record_state!
+ remember_transaction_record_state
+ yield
+ rescue Exception
+ restore_transaction_record_state
+ raise
+ ensure
+ clear_transaction_record_state
+ end
+
+ # Call the +after_commit+ callbacks.
+ #
+ # Ensure that it is not called if the object was never persisted (failed create),
+ # but call it after the commit of a destroyed object.
+ def committed! #:nodoc:
+ run_callbacks :commit if destroyed? || persisted?
+ ensure
+ force_clear_transaction_record_state
+ end
+
+ # Call the +after_rollback+ callbacks. The +force_restore_state+ argument indicates if the record
+ # state should be rolled back to the beginning or just to the last savepoint.
+ def rolledback!(force_restore_state = false) #:nodoc:
+ run_callbacks :rollback
+ ensure
+ restore_transaction_record_state(force_restore_state)
+ clear_transaction_record_state
+ end
+
+ # Add the record to the current transaction so that the +after_rollback+ and +after_commit+ callbacks
+ # can be called.
+ def add_to_transaction
+ if self.class.connection.add_transaction_record(self)
+ remember_transaction_record_state
+ end
+ end
+
+ # Executes +method+ within a transaction and captures its return value as a
+ # status flag. If the status is true the transaction is committed, otherwise
+ # a ROLLBACK is issued. In any case the status flag is returned.
+ #
+ # This method is available within the context of an ActiveRecord::Base
+ # instance.
+ def with_transaction_returning_status
+ status = nil
+ self.class.transaction do
+ add_to_transaction
+ begin
+ status = yield
+ rescue ActiveRecord::Rollback
+ clear_transaction_record_state
+ status = nil
+ end
+
+ raise ActiveRecord::Rollback unless status
+ end
+ status
+ end
+
+ protected
+
+ # Save the new record state and id of a record so it can be restored later if a transaction fails.
+ def remember_transaction_record_state #:nodoc:
+ @_start_transaction_state[:id] = id
+ unless @_start_transaction_state.include?(:new_record)
+ @_start_transaction_state[:new_record] = @new_record
+ end
+ unless @_start_transaction_state.include?(:destroyed)
+ @_start_transaction_state[:destroyed] = @destroyed
+ end
+ @_start_transaction_state[:level] = (@_start_transaction_state[:level] || 0) + 1
+ @_start_transaction_state[:frozen?] = frozen?
+ end
+
+ # Clear the new record state and id of a record.
+ def clear_transaction_record_state #:nodoc:
+ @_start_transaction_state[:level] = (@_start_transaction_state[:level] || 0) - 1
+ force_clear_transaction_record_state if @_start_transaction_state[:level] < 1
+ end
+
+ # Force to clear the transaction record state.
+ def force_clear_transaction_record_state #:nodoc:
+ @_start_transaction_state.clear
+ end
+
+ # Restore the new record state and id of a record that was previously saved by a call to save_record_state.
+ def restore_transaction_record_state(force = false) #:nodoc:
+ unless @_start_transaction_state.empty?
+ transaction_level = (@_start_transaction_state[:level] || 0) - 1
+ if transaction_level < 1 || force
+ restore_state = @_start_transaction_state
+ thaw unless restore_state[:frozen?]
+ @new_record = restore_state[:new_record]
+ @destroyed = restore_state[:destroyed]
+ write_attribute(self.class.primary_key, restore_state[:id])
+ end
+ end
+ end
+
+ # Determine if a record was created or destroyed in a transaction. State should be one of :new_record or :destroyed.
+ def transaction_record_state(state) #:nodoc:
+ @_start_transaction_state[state]
+ end
+
+ # Determine if a transaction included an action for :create, :update, or :destroy. Used in filtering callbacks.
+ def transaction_include_any_action?(actions) #:nodoc:
+ actions.any? do |action|
+ case action
+ when :create
+ transaction_record_state(:new_record)
+ when :destroy
+ destroyed?
+ when :update
+ !(transaction_record_state(:new_record) || destroyed?)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/translation.rb b/activerecord/lib/active_record/translation.rb
new file mode 100644
index 0000000000..ddcb5f2a7a
--- /dev/null
+++ b/activerecord/lib/active_record/translation.rb
@@ -0,0 +1,22 @@
+module ActiveRecord
+ module Translation
+ include ActiveModel::Translation
+
+ # Set the lookup ancestors for ActiveModel.
+ def lookup_ancestors #:nodoc:
+ klass = self
+ classes = [klass]
+ return classes if klass == ActiveRecord::Base
+
+ while klass != klass.base_class
+ classes << klass = klass.superclass
+ end
+ classes
+ end
+
+ # Set the i18n scope to overwrite ActiveModel.
+ def i18n_scope #:nodoc:
+ :activerecord
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type.rb b/activerecord/lib/active_record/type.rb
new file mode 100644
index 0000000000..f1384e0bb2
--- /dev/null
+++ b/activerecord/lib/active_record/type.rb
@@ -0,0 +1,20 @@
+require 'active_record/type/mutable'
+require 'active_record/type/numeric'
+require 'active_record/type/time_value'
+require 'active_record/type/value'
+
+require 'active_record/type/binary'
+require 'active_record/type/boolean'
+require 'active_record/type/date'
+require 'active_record/type/date_time'
+require 'active_record/type/decimal'
+require 'active_record/type/decimal_without_scale'
+require 'active_record/type/float'
+require 'active_record/type/integer'
+require 'active_record/type/serialized'
+require 'active_record/type/string'
+require 'active_record/type/text'
+require 'active_record/type/time'
+
+require 'active_record/type/type_map'
+require 'active_record/type/hash_lookup_type_map'
diff --git a/activerecord/lib/active_record/type/binary.rb b/activerecord/lib/active_record/type/binary.rb
new file mode 100644
index 0000000000..d29ff4e494
--- /dev/null
+++ b/activerecord/lib/active_record/type/binary.rb
@@ -0,0 +1,40 @@
+module ActiveRecord
+ module Type
+ class Binary < Value # :nodoc:
+ def type
+ :binary
+ end
+
+ def binary?
+ true
+ end
+
+ def type_cast(value)
+ if value.is_a?(Data)
+ value.to_s
+ else
+ super
+ end
+ end
+
+ def type_cast_for_database(value)
+ return if value.nil?
+ Data.new(super)
+ end
+
+ class Data # :nodoc:
+ def initialize(value)
+ @value = value.to_s
+ end
+
+ def to_s
+ @value
+ end
+
+ def hex
+ @value.unpack('H*')[0]
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/boolean.rb b/activerecord/lib/active_record/type/boolean.rb
new file mode 100644
index 0000000000..06dd17ed28
--- /dev/null
+++ b/activerecord/lib/active_record/type/boolean.rb
@@ -0,0 +1,19 @@
+module ActiveRecord
+ module Type
+ class Boolean < Value # :nodoc:
+ def type
+ :boolean
+ end
+
+ private
+
+ def cast_value(value)
+ if value == ''
+ nil
+ else
+ ConnectionAdapters::Column::TRUE_VALUES.include?(value)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/date.rb b/activerecord/lib/active_record/type/date.rb
new file mode 100644
index 0000000000..d90a6069b7
--- /dev/null
+++ b/activerecord/lib/active_record/type/date.rb
@@ -0,0 +1,46 @@
+module ActiveRecord
+ module Type
+ class Date < Value # :nodoc:
+ def type
+ :date
+ end
+
+ def klass
+ ::Date
+ end
+
+ def type_cast_for_schema(value)
+ "'#{value.to_s(:db)}'"
+ end
+
+ private
+
+ def cast_value(value)
+ if value.is_a?(::String)
+ return if value.empty?
+ fast_string_to_date(value) || fallback_string_to_date(value)
+ elsif value.respond_to?(:to_date)
+ value.to_date
+ else
+ value
+ end
+ end
+
+ def fast_string_to_date(string)
+ if string =~ ConnectionAdapters::Column::Format::ISO_DATE
+ new_date $1.to_i, $2.to_i, $3.to_i
+ end
+ end
+
+ def fallback_string_to_date(string)
+ new_date(*::Date._parse(string, false).values_at(:year, :mon, :mday))
+ end
+
+ def new_date(year, mon, mday)
+ if year && year != 0
+ ::Date.new(year, mon, mday) rescue nil
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/date_time.rb b/activerecord/lib/active_record/type/date_time.rb
new file mode 100644
index 0000000000..5f19608a33
--- /dev/null
+++ b/activerecord/lib/active_record/type/date_time.rb
@@ -0,0 +1,43 @@
+module ActiveRecord
+ module Type
+ class DateTime < Value # :nodoc:
+ include TimeValue
+
+ def type
+ :datetime
+ end
+
+ def type_cast_for_database(value)
+ zone_conversion_method = ActiveRecord::Base.default_timezone == :utc ? :getutc : :getlocal
+
+ if value.acts_like?(:time)
+ value.send(zone_conversion_method)
+ else
+ super
+ end
+ end
+
+ private
+
+ def cast_value(string)
+ return string unless string.is_a?(::String)
+ return if string.empty?
+
+ fast_string_to_time(string) || fallback_string_to_time(string)
+ end
+
+ # '0.123456' -> 123456
+ # '1.123456' -> 123456
+ def microseconds(time)
+ time[:sec_fraction] ? (time[:sec_fraction] * 1_000_000).to_i : 0
+ end
+
+ def fallback_string_to_time(string)
+ time_hash = ::Date._parse(string)
+ time_hash[:sec_fraction] = microseconds(time_hash)
+
+ new_time(*time_hash.values_at(:year, :mon, :mday, :hour, :min, :sec, :sec_fraction, :offset))
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/decimal.rb b/activerecord/lib/active_record/type/decimal.rb
new file mode 100644
index 0000000000..d10778eeb6
--- /dev/null
+++ b/activerecord/lib/active_record/type/decimal.rb
@@ -0,0 +1,40 @@
+module ActiveRecord
+ module Type
+ class Decimal < Value # :nodoc:
+ include Numeric
+
+ def type
+ :decimal
+ end
+
+ def type_cast_for_schema(value)
+ value.to_s
+ end
+
+ private
+
+ def cast_value(value)
+ case value
+ when ::Float
+ BigDecimal(value, float_precision)
+ when ::Numeric, ::String
+ BigDecimal(value, precision.to_i)
+ else
+ if value.respond_to?(:to_d)
+ value.to_d
+ else
+ cast_value(value.to_s)
+ end
+ end
+ end
+
+ def float_precision
+ if precision.to_i > ::Float::DIG + 1
+ ::Float::DIG + 1
+ else
+ precision.to_i
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/decimal_without_scale.rb b/activerecord/lib/active_record/type/decimal_without_scale.rb
new file mode 100644
index 0000000000..cabdcecdd7
--- /dev/null
+++ b/activerecord/lib/active_record/type/decimal_without_scale.rb
@@ -0,0 +1,11 @@
+require 'active_record/type/integer'
+
+module ActiveRecord
+ module Type
+ class DecimalWithoutScale < Integer # :nodoc:
+ def type
+ :decimal
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/float.rb b/activerecord/lib/active_record/type/float.rb
new file mode 100644
index 0000000000..42eb44b9a9
--- /dev/null
+++ b/activerecord/lib/active_record/type/float.rb
@@ -0,0 +1,19 @@
+module ActiveRecord
+ module Type
+ class Float < Value # :nodoc:
+ include Numeric
+
+ def type
+ :float
+ end
+
+ alias type_cast_for_database type_cast
+
+ private
+
+ def cast_value(value)
+ value.to_f
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/hash_lookup_type_map.rb b/activerecord/lib/active_record/type/hash_lookup_type_map.rb
new file mode 100644
index 0000000000..bf92680268
--- /dev/null
+++ b/activerecord/lib/active_record/type/hash_lookup_type_map.rb
@@ -0,0 +1,19 @@
+module ActiveRecord
+ module Type
+ class HashLookupTypeMap < TypeMap # :nodoc:
+ delegate :key?, to: :@mapping
+
+ def lookup(type, *args)
+ @mapping.fetch(type, proc { default_value }).call(type, *args)
+ end
+
+ def fetch(type, *args, &block)
+ @mapping.fetch(type, block).call(type, *args)
+ end
+
+ def alias_type(type, alias_type)
+ register_type(type) { |_, *args| lookup(alias_type, *args) }
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/integer.rb b/activerecord/lib/active_record/type/integer.rb
new file mode 100644
index 0000000000..08477d1303
--- /dev/null
+++ b/activerecord/lib/active_record/type/integer.rb
@@ -0,0 +1,23 @@
+module ActiveRecord
+ module Type
+ class Integer < Value # :nodoc:
+ include Numeric
+
+ def type
+ :integer
+ end
+
+ alias type_cast_for_database type_cast
+
+ private
+
+ def cast_value(value)
+ case value
+ when true then 1
+ when false then 0
+ else value.to_i rescue nil
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/mutable.rb b/activerecord/lib/active_record/type/mutable.rb
new file mode 100644
index 0000000000..066617ea59
--- /dev/null
+++ b/activerecord/lib/active_record/type/mutable.rb
@@ -0,0 +1,16 @@
+module ActiveRecord
+ module Type
+ module Mutable # :nodoc:
+ def type_cast_from_user(value)
+ type_cast_from_database(type_cast_for_database(value))
+ end
+
+ # +raw_old_value+ will be the `_before_type_cast` version of the
+ # value (likely a string). +new_value+ will be the current, type
+ # cast value.
+ def changed_in_place?(raw_old_value, new_value)
+ raw_old_value != type_cast_for_database(new_value)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/numeric.rb b/activerecord/lib/active_record/type/numeric.rb
new file mode 100644
index 0000000000..fa43266504
--- /dev/null
+++ b/activerecord/lib/active_record/type/numeric.rb
@@ -0,0 +1,36 @@
+module ActiveRecord
+ module Type
+ module Numeric # :nodoc:
+ def number?
+ true
+ end
+
+ def type_cast(value)
+ value = case value
+ when true then 1
+ when false then 0
+ when ::String then value.presence
+ else value
+ end
+ super(value)
+ end
+
+ def changed?(old_value, _new_value, new_value_before_type_cast) # :nodoc:
+ super || number_to_non_number?(old_value, new_value_before_type_cast)
+ end
+
+ private
+
+ def number_to_non_number?(old_value, new_value_before_type_cast)
+ old_value != nil && non_numeric_string?(new_value_before_type_cast)
+ end
+
+ def non_numeric_string?(value)
+ # 'wibble'.to_i will give zero, we want to make sure
+ # that we aren't marking int zero to string zero as
+ # changed.
+ value.to_s !~ /\A\d+\.?\d*\z/
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/serialized.rb b/activerecord/lib/active_record/type/serialized.rb
new file mode 100644
index 0000000000..abeea769c4
--- /dev/null
+++ b/activerecord/lib/active_record/type/serialized.rb
@@ -0,0 +1,51 @@
+module ActiveRecord
+ module Type
+ class Serialized < SimpleDelegator # :nodoc:
+ include Mutable
+
+ attr_reader :subtype, :coder
+
+ def initialize(subtype, coder)
+ @subtype = subtype
+ @coder = coder
+ super(subtype)
+ end
+
+ def type_cast_from_database(value)
+ if default_value?(value)
+ value
+ else
+ coder.load(super)
+ end
+ end
+
+ def type_cast_for_database(value)
+ return if value.nil?
+ unless default_value?(value)
+ super coder.dump(value)
+ end
+ end
+
+ def accessor
+ ActiveRecord::Store::IndifferentHashAccessor
+ end
+
+ def init_with(coder)
+ @subtype = coder['subtype']
+ @coder = coder['coder']
+ __setobj__(@subtype)
+ end
+
+ def encode_with(coder)
+ coder['subtype'] = @subtype
+ coder['coder'] = @coder
+ end
+
+ private
+
+ def default_value?(value)
+ value == coder.load(nil)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/string.rb b/activerecord/lib/active_record/type/string.rb
new file mode 100644
index 0000000000..150defb106
--- /dev/null
+++ b/activerecord/lib/active_record/type/string.rb
@@ -0,0 +1,36 @@
+module ActiveRecord
+ module Type
+ class String < Value # :nodoc:
+ def type
+ :string
+ end
+
+ def changed_in_place?(raw_old_value, new_value)
+ if new_value.is_a?(::String)
+ raw_old_value != new_value
+ end
+ end
+
+ def type_cast_for_database(value)
+ case value
+ when ::Numeric, ActiveSupport::Duration then value.to_s
+ when ::String then ::String.new(value)
+ when true then "1"
+ when false then "0"
+ else super
+ end
+ end
+
+ private
+
+ def cast_value(value)
+ case value
+ when true then "1"
+ when false then "0"
+ # String.new is slightly faster than dup
+ else ::String.new(value.to_s)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/text.rb b/activerecord/lib/active_record/type/text.rb
new file mode 100644
index 0000000000..26f980f060
--- /dev/null
+++ b/activerecord/lib/active_record/type/text.rb
@@ -0,0 +1,11 @@
+require 'active_record/type/string'
+
+module ActiveRecord
+ module Type
+ class Text < String # :nodoc:
+ def type
+ :text
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/time.rb b/activerecord/lib/active_record/type/time.rb
new file mode 100644
index 0000000000..41f7d97f0c
--- /dev/null
+++ b/activerecord/lib/active_record/type/time.rb
@@ -0,0 +1,26 @@
+module ActiveRecord
+ module Type
+ class Time < Value # :nodoc:
+ include TimeValue
+
+ def type
+ :time
+ end
+
+ private
+
+ def cast_value(value)
+ return value unless value.is_a?(::String)
+ return if value.empty?
+
+ dummy_time_value = "2000-01-01 #{value}"
+
+ fast_string_to_time(dummy_time_value) || begin
+ time_hash = ::Date._parse(dummy_time_value)
+ return if time_hash[:hour].nil?
+ new_time(*time_hash.values_at(:year, :mon, :mday, :hour, :min, :sec, :sec_fraction))
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/time_value.rb b/activerecord/lib/active_record/type/time_value.rb
new file mode 100644
index 0000000000..d611d72dd4
--- /dev/null
+++ b/activerecord/lib/active_record/type/time_value.rb
@@ -0,0 +1,38 @@
+module ActiveRecord
+ module Type
+ module TimeValue # :nodoc:
+ def klass
+ ::Time
+ end
+
+ def type_cast_for_schema(value)
+ "'#{value.to_s(:db)}'"
+ end
+
+ private
+
+ def new_time(year, mon, mday, hour, min, sec, microsec, offset = nil)
+ # Treat 0000-00-00 00:00:00 as nil.
+ return if year.nil? || (year == 0 && mon == 0 && mday == 0)
+
+ if offset
+ time = ::Time.utc(year, mon, mday, hour, min, sec, microsec) rescue nil
+ return unless time
+
+ time -= offset
+ Base.default_timezone == :utc ? time : time.getlocal
+ else
+ ::Time.public_send(Base.default_timezone, year, mon, mday, hour, min, sec, microsec) rescue nil
+ end
+ end
+
+ # Doesn't handle time zones.
+ def fast_string_to_time(string)
+ if string =~ ConnectionAdapters::Column::Format::ISO_DATETIME
+ microsec = ($7.to_r * 1_000_000).to_i
+ new_time $1.to_i, $2.to_i, $3.to_i, $4.to_i, $5.to_i, $6.to_i, microsec
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/type_map.rb b/activerecord/lib/active_record/type/type_map.rb
new file mode 100644
index 0000000000..88c5f9c497
--- /dev/null
+++ b/activerecord/lib/active_record/type/type_map.rb
@@ -0,0 +1,48 @@
+module ActiveRecord
+ module Type
+ class TypeMap # :nodoc:
+ def initialize
+ @mapping = {}
+ end
+
+ def lookup(lookup_key, *args)
+ matching_pair = @mapping.reverse_each.detect do |key, _|
+ key === lookup_key
+ end
+
+ if matching_pair
+ matching_pair.last.call(lookup_key, *args)
+ else
+ default_value
+ end
+ end
+
+ def register_type(key, value = nil, &block)
+ raise ::ArgumentError unless value || block
+
+ if block
+ @mapping[key] = block
+ else
+ @mapping[key] = proc { value }
+ end
+ end
+
+ def alias_type(key, target_key)
+ register_type(key) do |sql_type, *args|
+ metadata = sql_type[/\(.*\)/, 0]
+ lookup("#{target_key}#{metadata}", *args)
+ end
+ end
+
+ def clear
+ @mapping.clear
+ end
+
+ private
+
+ def default_value
+ @default_value ||= Value.new
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/type/value.rb b/activerecord/lib/active_record/type/value.rb
new file mode 100644
index 0000000000..e0a783fb45
--- /dev/null
+++ b/activerecord/lib/active_record/type/value.rb
@@ -0,0 +1,94 @@
+module ActiveRecord
+ module Type
+ class Value # :nodoc:
+ attr_reader :precision, :scale, :limit
+
+ # Valid options are +precision+, +scale+, and +limit+. They are only
+ # used when dumping schema.
+ def initialize(options = {})
+ options.assert_valid_keys(:precision, :scale, :limit)
+ @precision = options[:precision]
+ @scale = options[:scale]
+ @limit = options[:limit]
+ end
+
+ # The simplified type that this object represents. Returns a symbol such
+ # as +:string+ or +:integer+
+ def type; end
+
+ # Type casts a string from the database into the appropriate ruby type.
+ # Classes which do not need separate type casting behavior for database
+ # and user provided values should override +cast_value+ instead.
+ def type_cast_from_database(value)
+ type_cast(value)
+ end
+
+ # Type casts a value from user input (e.g. from a setter). This value may
+ # be a string from the form builder, or an already type cast value
+ # provided manually to a setter.
+ #
+ # Classes which do not need separate type casting behavior for database
+ # and user provided values should override +type_cast+ or +cast_value+
+ # instead.
+ def type_cast_from_user(value)
+ type_cast(value)
+ end
+
+ # Cast a value from the ruby type to a type that the database knows how
+ # to understand. The returned value from this method should be a
+ # +String+, +Numeric+, +Date+, +Time+, +Symbol+, +true+, +false+, or
+ # +nil+
+ def type_cast_for_database(value)
+ value
+ end
+
+ # Type cast a value for schema dumping. This method is private, as we are
+ # hoping to remove it entirely.
+ def type_cast_for_schema(value) # :nodoc:
+ value.inspect
+ end
+
+ # These predicates are not documented, as I need to look further into
+ # their use, and see if they can be removed entirely.
+ def number? # :nodoc:
+ false
+ end
+
+ def binary? # :nodoc:
+ false
+ end
+
+ def klass # :nodoc:
+ end
+
+ # Determines whether a value has changed for dirty checking. +old_value+
+ # and +new_value+ will always be type-cast. Types should not need to
+ # override this method.
+ def changed?(old_value, new_value, _new_value_before_type_cast)
+ old_value != new_value
+ end
+
+ # Determines whether the mutable value has been modified since it was
+ # read. Returns +false+ by default. This method should not need to be
+ # overriden directly. Types which return a mutable value should include
+ # +Type::Mutable+, which will define this method.
+ def changed_in_place?(*)
+ false
+ end
+
+ private
+
+ def type_cast(value)
+ cast_value(value) unless value.nil?
+ end
+
+ # Convenience method for types which do not need separate type casting
+ # behavior for user and database inputs. Called by
+ # `type_cast_from_database` and `type_cast_from_user` for all values
+ # except `nil`.
+ def cast_value(value) # :doc:
+ value
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/validations.rb b/activerecord/lib/active_record/validations.rb
new file mode 100644
index 0000000000..7f7d49cdb4
--- /dev/null
+++ b/activerecord/lib/active_record/validations.rb
@@ -0,0 +1,89 @@
+module ActiveRecord
+ # = Active Record RecordInvalid
+ #
+ # Raised by <tt>save!</tt> and <tt>create!</tt> when the record is invalid. Use the
+ # +record+ method to retrieve the record which did not validate.
+ #
+ # begin
+ # complex_operation_that_calls_save!_internally
+ # rescue ActiveRecord::RecordInvalid => invalid
+ # puts invalid.record.errors
+ # end
+ class RecordInvalid < ActiveRecordError
+ attr_reader :record # :nodoc:
+ def initialize(record) # :nodoc:
+ @record = record
+ errors = @record.errors.full_messages.join(", ")
+ super(I18n.t(:"#{@record.class.i18n_scope}.errors.messages.record_invalid", :errors => errors, :default => :"errors.messages.record_invalid"))
+ end
+ end
+
+ # = Active Record Validations
+ #
+ # Active Record includes the majority of its validations from <tt>ActiveModel::Validations</tt>
+ # all of which accept the <tt>:on</tt> argument to define the context where the
+ # validations are active. Active Record will always supply either the context of
+ # <tt>:create</tt> or <tt>:update</tt> dependent on whether the model is a
+ # <tt>new_record?</tt>.
+ module Validations
+ extend ActiveSupport::Concern
+ include ActiveModel::Validations
+
+ # The validation process on save can be skipped by passing <tt>validate: false</tt>.
+ # The regular Base#save method is replaced with this when the validations
+ # module is mixed in, which it is by default.
+ def save(options={})
+ perform_validations(options) ? super : false
+ end
+
+ # Attempts to save the record just like Base#save but will raise a +RecordInvalid+
+ # exception instead of returning +false+ if the record is not valid.
+ def save!(options={})
+ perform_validations(options) ? super : raise_record_invalid
+ end
+
+ # Runs all the validations within the specified context. Returns +true+ if
+ # no errors are found, +false+ otherwise.
+ #
+ # Aliased as validate.
+ #
+ # If the argument is +false+ (default is +nil+), the context is set to <tt>:create</tt> if
+ # <tt>new_record?</tt> is +true+, and to <tt>:update</tt> if it is not.
+ #
+ # Validations with no <tt>:on</tt> option will run no matter the context. Validations with
+ # some <tt>:on</tt> option will only run in the specified context.
+ def valid?(context = nil)
+ context ||= (new_record? ? :create : :update)
+ output = super(context)
+ errors.empty? && output
+ end
+
+ alias_method :validate, :valid?
+
+ # Runs all the validations within the specified context. Returns +true+ if
+ # no errors are found, raises +RecordInvalid+ otherwise.
+ #
+ # If the argument is +false+ (default is +nil+), the context is set to <tt>:create</tt> if
+ # <tt>new_record?</tt> is +true+, and to <tt>:update</tt> if it is not.
+ #
+ # Validations with no <tt>:on</tt> option will run no matter the context. Validations with
+ # some <tt>:on</tt> option will only run in the specified context.
+ def validate!(context = nil)
+ valid?(context) || raise_record_invalid
+ end
+
+ protected
+
+ def raise_record_invalid
+ raise(RecordInvalid.new(self))
+ end
+
+ def perform_validations(options={}) # :nodoc:
+ options[:validate] == false || valid?(options[:context])
+ end
+ end
+end
+
+require "active_record/validations/associated"
+require "active_record/validations/uniqueness"
+require "active_record/validations/presence"
diff --git a/activerecord/lib/active_record/validations/associated.rb b/activerecord/lib/active_record/validations/associated.rb
new file mode 100644
index 0000000000..b4785d3ba4
--- /dev/null
+++ b/activerecord/lib/active_record/validations/associated.rb
@@ -0,0 +1,49 @@
+module ActiveRecord
+ module Validations
+ class AssociatedValidator < ActiveModel::EachValidator #:nodoc:
+ def validate_each(record, attribute, value)
+ if Array.wrap(value).reject {|r| r.marked_for_destruction? || r.valid?}.any?
+ record.errors.add(attribute, :invalid, options.merge(:value => value))
+ end
+ end
+ end
+
+ module ClassMethods
+ # Validates whether the associated object or objects are all valid.
+ # Works with any kind of association.
+ #
+ # class Book < ActiveRecord::Base
+ # has_many :pages
+ # belongs_to :library
+ #
+ # validates_associated :pages, :library
+ # end
+ #
+ # WARNING: This validation must not be used on both ends of an association.
+ # Doing so will lead to a circular dependency and cause infinite recursion.
+ #
+ # NOTE: This validation will not fail if the association hasn't been
+ # assigned. If you want to ensure that the association is both present and
+ # guaranteed to be valid, you also need to use +validates_presence_of+.
+ #
+ # Configuration options:
+ #
+ # * <tt>:message</tt> - A custom error message (default is: "is invalid").
+ # * <tt>:on</tt> - Specifies when this validation is active. Runs in all
+ # validation contexts by default (+nil+), other options are <tt>:create</tt>
+ # and <tt>:update</tt>.
+ # * <tt>:if</tt> - Specifies a method, proc or string to call to determine
+ # if the validation should occur (e.g. <tt>if: :allow_validation</tt>,
+ # or <tt>if: Proc.new { |user| user.signup_step > 2 }</tt>). The method,
+ # proc or string should return or evaluate to a +true+ or +false+ value.
+ # * <tt>:unless</tt> - Specifies a method, proc or string to call to
+ # determine if the validation should not occur (e.g. <tt>unless: :skip_validation</tt>,
+ # or <tt>unless: Proc.new { |user| user.signup_step <= 2 }</tt>). The
+ # method, proc or string should return or evaluate to a +true+ or +false+
+ # value.
+ def validates_associated(*attr_names)
+ validates_with AssociatedValidator, _merge_attributes(attr_names)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/validations/presence.rb b/activerecord/lib/active_record/validations/presence.rb
new file mode 100644
index 0000000000..e586744818
--- /dev/null
+++ b/activerecord/lib/active_record/validations/presence.rb
@@ -0,0 +1,65 @@
+module ActiveRecord
+ module Validations
+ class PresenceValidator < ActiveModel::Validations::PresenceValidator # :nodoc:
+ def validate(record)
+ super
+ attributes.each do |attribute|
+ next unless record.class._reflect_on_association(attribute)
+ associated_records = Array.wrap(record.send(attribute))
+
+ # Superclass validates presence. Ensure present records aren't about to be destroyed.
+ if associated_records.present? && associated_records.all? { |r| r.marked_for_destruction? }
+ record.errors.add(attribute, :blank, options)
+ end
+ end
+ end
+ end
+
+ module ClassMethods
+ # Validates that the specified attributes are not blank (as defined by
+ # Object#blank?), and, if the attribute is an association, that the
+ # associated object is not marked for destruction. Happens by default
+ # on save.
+ #
+ # class Person < ActiveRecord::Base
+ # has_one :face
+ # validates_presence_of :face
+ # end
+ #
+ # The face attribute must be in the object and it cannot be blank or marked
+ # for destruction.
+ #
+ # If you want to validate the presence of a boolean field (where the real values
+ # are true and false), you will want to use
+ # <tt>validates_inclusion_of :field_name, in: [true, false]</tt>.
+ #
+ # This is due to the way Object#blank? handles boolean values:
+ # <tt>false.blank? # => true</tt>.
+ #
+ # This validator defers to the ActiveModel validation for presence, adding the
+ # check to see that an associated object is not marked for destruction. This
+ # prevents the parent object from validating successfully and saving, which then
+ # deletes the associated object, thus putting the parent object into an invalid
+ # state.
+ #
+ # Configuration options:
+ # * <tt>:message</tt> - A custom error message (default is: "can't be blank").
+ # * <tt>:on</tt> - Specifies when this validation is active. Runs in all
+ # validation contexts by default (+nil+), other options are <tt>:create</tt>
+ # and <tt>:update</tt>.
+ # * <tt>:if</tt> - Specifies a method, proc or string to call to determine if
+ # the validation should occur (e.g. <tt>if: :allow_validation</tt>, or
+ # <tt>if: Proc.new { |user| user.signup_step > 2 }</tt>). The method, proc
+ # or string should return or evaluate to a +true+ or +false+ value.
+ # * <tt>:unless</tt> - Specifies a method, proc or string to call to determine
+ # if the validation should not occur (e.g. <tt>unless: :skip_validation</tt>,
+ # or <tt>unless: Proc.new { |user| user.signup_step <= 2 }</tt>). The method,
+ # proc or string should return or evaluate to a +true+ or +false+ value.
+ # * <tt>:strict</tt> - Specifies whether validation should be strict.
+ # See <tt>ActiveModel::Validation#validates!</tt> for more information.
+ def validates_presence_of(*attr_names)
+ validates_with PresenceValidator, _merge_attributes(attr_names)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/validations/uniqueness.rb b/activerecord/lib/active_record/validations/uniqueness.rb
new file mode 100644
index 0000000000..2dba4c7b94
--- /dev/null
+++ b/activerecord/lib/active_record/validations/uniqueness.rb
@@ -0,0 +1,229 @@
+module ActiveRecord
+ module Validations
+ class UniquenessValidator < ActiveModel::EachValidator # :nodoc:
+ def initialize(options)
+ if options[:conditions] && !options[:conditions].respond_to?(:call)
+ raise ArgumentError, "#{options[:conditions]} was passed as :conditions but is not callable. " \
+ "Pass a callable instead: `conditions: -> { where(approved: true) }`"
+ end
+ super({ case_sensitive: true }.merge!(options))
+ @klass = options[:class]
+ end
+
+ def validate_each(record, attribute, value)
+ finder_class = find_finder_class_for(record)
+ table = finder_class.arel_table
+ value = map_enum_attribute(finder_class, attribute, value)
+
+ relation = build_relation(finder_class, table, attribute, value)
+ relation = relation.and(table[finder_class.primary_key.to_sym].not_eq(record.id)) if record.persisted?
+ relation = scope_relation(record, table, relation)
+ relation = finder_class.unscoped.where(relation)
+ relation = relation.merge(options[:conditions]) if options[:conditions]
+
+ if relation.exists?
+ error_options = options.except(:case_sensitive, :scope, :conditions)
+ error_options[:value] = value
+
+ record.errors.add(attribute, :taken, error_options)
+ end
+ end
+
+ protected
+ # The check for an existing value should be run from a class that
+ # isn't abstract. This means working down from the current class
+ # (self), to the first non-abstract class. Since classes don't know
+ # their subclasses, we have to build the hierarchy between self and
+ # the record's class.
+ def find_finder_class_for(record) #:nodoc:
+ class_hierarchy = [record.class]
+
+ while class_hierarchy.first != @klass
+ class_hierarchy.unshift(class_hierarchy.first.superclass)
+ end
+
+ class_hierarchy.detect { |klass| !klass.abstract_class? }
+ end
+
+ def build_relation(klass, table, attribute, value) #:nodoc:
+ if reflection = klass._reflect_on_association(attribute)
+ attribute = reflection.foreign_key
+ value = value.attributes[reflection.klass.primary_key] unless value.nil?
+ end
+
+ attribute_name = attribute.to_s
+
+ # the attribute may be an aliased attribute
+ if klass.attribute_aliases[attribute_name]
+ attribute = klass.attribute_aliases[attribute_name]
+ attribute_name = attribute.to_s
+ end
+
+ column = klass.columns_hash[attribute_name]
+ value = klass.connection.type_cast(value, column)
+ if value.is_a?(String) && column.limit
+ value = value.to_s[0, column.limit]
+ end
+
+ if !options[:case_sensitive] && value.is_a?(String)
+ # will use SQL LOWER function before comparison, unless it detects a case insensitive collation
+ klass.connection.case_insensitive_comparison(table, attribute, column, value)
+ else
+ klass.connection.case_sensitive_comparison(table, attribute, column, value)
+ end
+ end
+
+ def scope_relation(record, table, relation)
+ Array(options[:scope]).each do |scope_item|
+ if reflection = record.class._reflect_on_association(scope_item)
+ scope_value = record.send(reflection.foreign_key)
+ scope_item = reflection.foreign_key
+ else
+ scope_value = record.read_attribute(scope_item)
+ end
+ relation = relation.and(table[scope_item].eq(scope_value))
+ end
+
+ relation
+ end
+
+ def map_enum_attribute(klass, attribute, value)
+ mapping = klass.defined_enums[attribute.to_s]
+ value = mapping[value] if value && mapping
+ value
+ end
+ end
+
+ module ClassMethods
+ # Validates whether the value of the specified attributes are unique
+ # across the system. Useful for making sure that only one user
+ # can be named "davidhh".
+ #
+ # class Person < ActiveRecord::Base
+ # validates_uniqueness_of :user_name
+ # end
+ #
+ # It can also validate whether the value of the specified attributes are
+ # unique based on a <tt>:scope</tt> parameter:
+ #
+ # class Person < ActiveRecord::Base
+ # validates_uniqueness_of :user_name, scope: :account_id
+ # end
+ #
+ # Or even multiple scope parameters. For example, making sure that a
+ # teacher can only be on the schedule once per semester for a particular
+ # class.
+ #
+ # class TeacherSchedule < ActiveRecord::Base
+ # validates_uniqueness_of :teacher_id, scope: [:semester_id, :class_id]
+ # end
+ #
+ # It is also possible to limit the uniqueness constraint to a set of
+ # records matching certain conditions. In this example archived articles
+ # are not being taken into consideration when validating uniqueness
+ # of the title attribute:
+ #
+ # class Article < ActiveRecord::Base
+ # validates_uniqueness_of :title, conditions: -> { where.not(status: 'archived') }
+ # end
+ #
+ # When the record is created, a check is performed to make sure that no
+ # record exists in the database with the given value for the specified
+ # attribute (that maps to a column). When the record is updated,
+ # the same check is made but disregarding the record itself.
+ #
+ # Configuration options:
+ #
+ # * <tt>:message</tt> - Specifies a custom error message (default is:
+ # "has already been taken").
+ # * <tt>:scope</tt> - One or more columns by which to limit the scope of
+ # the uniqueness constraint.
+ # * <tt>:conditions</tt> - Specify the conditions to be included as a
+ # <tt>WHERE</tt> SQL fragment to limit the uniqueness constraint lookup
+ # (e.g. <tt>conditions: -> { where(status: 'active') }</tt>).
+ # * <tt>:case_sensitive</tt> - Looks for an exact match. Ignored by
+ # non-text columns (+true+ by default).
+ # * <tt>:allow_nil</tt> - If set to +true+, skips this validation if the
+ # attribute is +nil+ (default is +false+).
+ # * <tt>:allow_blank</tt> - If set to +true+, skips this validation if the
+ # attribute is blank (default is +false+).
+ # * <tt>:if</tt> - Specifies a method, proc or string to call to determine
+ # if the validation should occur (e.g. <tt>if: :allow_validation</tt>,
+ # or <tt>if: Proc.new { |user| user.signup_step > 2 }</tt>). The method,
+ # proc or string should return or evaluate to a +true+ or +false+ value.
+ # * <tt>:unless</tt> - Specifies a method, proc or string to call to
+ # determine if the validation should not occur (e.g. <tt>unless: :skip_validation</tt>,
+ # or <tt>unless: Proc.new { |user| user.signup_step <= 2 }</tt>). The
+ # method, proc or string should return or evaluate to a +true+ or +false+
+ # value.
+ #
+ # === Concurrency and integrity
+ #
+ # Using this validation method in conjunction with ActiveRecord::Base#save
+ # does not guarantee the absence of duplicate record insertions, because
+ # uniqueness checks on the application level are inherently prone to race
+ # conditions. For example, suppose that two users try to post a Comment at
+ # the same time, and a Comment's title must be unique. At the database-level,
+ # the actions performed by these users could be interleaved in the following manner:
+ #
+ # User 1 | User 2
+ # ------------------------------------+--------------------------------------
+ # # User 1 checks whether there's |
+ # # already a comment with the title |
+ # # 'My Post'. This is not the case. |
+ # SELECT * FROM comments |
+ # WHERE title = 'My Post' |
+ # |
+ # | # User 2 does the same thing and also
+ # | # infers that their title is unique.
+ # | SELECT * FROM comments
+ # | WHERE title = 'My Post'
+ # |
+ # # User 1 inserts their comment. |
+ # INSERT INTO comments |
+ # (title, content) VALUES |
+ # ('My Post', 'hi!') |
+ # |
+ # | # User 2 does the same thing.
+ # | INSERT INTO comments
+ # | (title, content) VALUES
+ # | ('My Post', 'hello!')
+ # |
+ # | # ^^^^^^
+ # | # Boom! We now have a duplicate
+ # | # title!
+ #
+ # This could even happen if you use transactions with the 'serializable'
+ # isolation level. The best way to work around this problem is to add a unique
+ # index to the database table using
+ # ActiveRecord::ConnectionAdapters::SchemaStatements#add_index. In the
+ # rare case that a race condition occurs, the database will guarantee
+ # the field's uniqueness.
+ #
+ # When the database catches such a duplicate insertion,
+ # ActiveRecord::Base#save will raise an ActiveRecord::StatementInvalid
+ # exception. You can either choose to let this error propagate (which
+ # will result in the default Rails exception page being shown), or you
+ # can catch it and restart the transaction (e.g. by telling the user
+ # that the title already exists, and asking them to re-enter the title).
+ # This technique is also known as
+ # {optimistic concurrency control}[http://en.wikipedia.org/wiki/Optimistic_concurrency_control].
+ #
+ # The bundled ActiveRecord::ConnectionAdapters distinguish unique index
+ # constraint errors from other types of database errors by throwing an
+ # ActiveRecord::RecordNotUnique exception. For other adapters you will
+ # have to parse the (database-specific) exception message to detect such
+ # a case.
+ #
+ # The following bundled adapters throw the ActiveRecord::RecordNotUnique exception:
+ #
+ # * ActiveRecord::ConnectionAdapters::MysqlAdapter.
+ # * ActiveRecord::ConnectionAdapters::Mysql2Adapter.
+ # * ActiveRecord::ConnectionAdapters::SQLite3Adapter.
+ # * ActiveRecord::ConnectionAdapters::PostgreSQLAdapter.
+ def validates_uniqueness_of(*attr_names)
+ validates_with UniquenessValidator, _merge_attributes(attr_names)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/active_record/version.rb b/activerecord/lib/active_record/version.rb
new file mode 100644
index 0000000000..cf76a13b44
--- /dev/null
+++ b/activerecord/lib/active_record/version.rb
@@ -0,0 +1,8 @@
+require_relative 'gem_version'
+
+module ActiveRecord
+ # Returns the version of the currently loaded ActiveRecord as a <tt>Gem::Version</tt>
+ def self.version
+ gem_version
+ end
+end
diff --git a/activerecord/lib/rails/generators/active_record.rb b/activerecord/lib/rails/generators/active_record.rb
new file mode 100644
index 0000000000..dc29213235
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record.rb
@@ -0,0 +1,17 @@
+require 'rails/generators/named_base'
+require 'rails/generators/active_model'
+require 'rails/generators/active_record/migration'
+require 'active_record'
+
+module ActiveRecord
+ module Generators # :nodoc:
+ class Base < Rails::Generators::NamedBase # :nodoc:
+ include ActiveRecord::Generators::Migration
+
+ # Set the current directory as base for the inherited generators.
+ def self.base_root
+ File.dirname(__FILE__)
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/rails/generators/active_record/migration.rb b/activerecord/lib/rails/generators/active_record/migration.rb
new file mode 100644
index 0000000000..b7418cf42f
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/migration.rb
@@ -0,0 +1,18 @@
+require 'rails/generators/migration'
+
+module ActiveRecord
+ module Generators # :nodoc:
+ module Migration
+ extend ActiveSupport::Concern
+ include Rails::Generators::Migration
+
+ module ClassMethods
+ # Implement the required interface for Rails::Generators::Migration.
+ def next_migration_number(dirname)
+ next_migration_number = current_migration_number(dirname) + 1
+ ActiveRecord::Migration.next_migration_number(next_migration_number)
+ end
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/rails/generators/active_record/migration/migration_generator.rb b/activerecord/lib/rails/generators/active_record/migration/migration_generator.rb
new file mode 100644
index 0000000000..d3c853cfea
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/migration/migration_generator.rb
@@ -0,0 +1,70 @@
+require 'rails/generators/active_record'
+
+module ActiveRecord
+ module Generators # :nodoc:
+ class MigrationGenerator < Base # :nodoc:
+ argument :attributes, :type => :array, :default => [], :banner => "field[:type][:index] field[:type][:index]"
+
+ def create_migration_file
+ set_local_assigns!
+ validate_file_name!
+ migration_template @migration_template, "db/migrate/#{file_name}.rb"
+ end
+
+ protected
+ attr_reader :migration_action, :join_tables
+
+ # sets the default migration template that is being used for the generation of the migration
+ # depending on the arguments which would be sent out in the command line, the migration template
+ # and the table name instance variables are setup.
+
+ def set_local_assigns!
+ @migration_template = "migration.rb"
+ case file_name
+ when /^(add|remove)_.*_(?:to|from)_(.*)/
+ @migration_action = $1
+ @table_name = normalize_table_name($2)
+ when /join_table/
+ if attributes.length == 2
+ @migration_action = 'join'
+ @join_tables = pluralize_table_names? ? attributes.map(&:plural_name) : attributes.map(&:singular_name)
+
+ set_index_names
+ end
+ when /^create_(.+)/
+ @table_name = normalize_table_name($1)
+ @migration_template = "create_table_migration.rb"
+ end
+ end
+
+ def set_index_names
+ attributes.each_with_index do |attr, i|
+ attr.index_name = [attr, attributes[i - 1]].map{ |a| index_name_for(a) }
+ end
+ end
+
+ def index_name_for(attribute)
+ if attribute.foreign_key?
+ attribute.name
+ else
+ attribute.name.singularize.foreign_key
+ end.to_sym
+ end
+
+ private
+ def attributes_with_index
+ attributes.select { |a| !a.reference? && a.has_index? }
+ end
+
+ def validate_file_name!
+ unless file_name =~ /^[_a-z0-9]+$/
+ raise IllegalMigrationNameError.new(file_name)
+ end
+ end
+
+ def normalize_table_name(_table_name)
+ pluralize_table_names? ? _table_name.pluralize : _table_name.singularize
+ end
+ end
+ end
+end
diff --git a/activerecord/lib/rails/generators/active_record/migration/templates/create_table_migration.rb b/activerecord/lib/rails/generators/active_record/migration/templates/create_table_migration.rb
new file mode 100644
index 0000000000..fd94a2d038
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/migration/templates/create_table_migration.rb
@@ -0,0 +1,19 @@
+class <%= migration_class_name %> < ActiveRecord::Migration
+ def change
+ create_table :<%= table_name %> do |t|
+<% attributes.each do |attribute| -%>
+<% if attribute.password_digest? -%>
+ t.string :password_digest<%= attribute.inject_options %>
+<% else -%>
+ t.<%= attribute.type %> :<%= attribute.name %><%= attribute.inject_options %>
+<% end -%>
+<% end -%>
+<% if options[:timestamps] %>
+ t.timestamps
+<% end -%>
+ end
+<% attributes_with_index.each do |attribute| -%>
+ add_index :<%= table_name %>, :<%= attribute.index_name %><%= attribute.inject_index_options %>
+<% end -%>
+ end
+end
diff --git a/activerecord/lib/rails/generators/active_record/migration/templates/migration.rb b/activerecord/lib/rails/generators/active_record/migration/templates/migration.rb
new file mode 100644
index 0000000000..ae9c74fd05
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/migration/templates/migration.rb
@@ -0,0 +1,39 @@
+class <%= migration_class_name %> < ActiveRecord::Migration
+<%- if migration_action == 'add' -%>
+ def change
+<% attributes.each do |attribute| -%>
+ <%- if attribute.reference? -%>
+ add_reference :<%= table_name %>, :<%= attribute.name %><%= attribute.inject_options %>
+ <%- else -%>
+ add_column :<%= table_name %>, :<%= attribute.name %>, :<%= attribute.type %><%= attribute.inject_options %>
+ <%- if attribute.has_index? -%>
+ add_index :<%= table_name %>, :<%= attribute.index_name %><%= attribute.inject_index_options %>
+ <%- end -%>
+ <%- end -%>
+<%- end -%>
+ end
+<%- elsif migration_action == 'join' -%>
+ def change
+ create_join_table :<%= join_tables.first %>, :<%= join_tables.second %> do |t|
+ <%- attributes.each do |attribute| -%>
+ <%= '# ' unless attribute.has_index? -%>t.index <%= attribute.index_name %><%= attribute.inject_index_options %>
+ <%- end -%>
+ end
+ end
+<%- else -%>
+ def change
+<% attributes.each do |attribute| -%>
+<%- if migration_action -%>
+ <%- if attribute.reference? -%>
+ remove_reference :<%= table_name %>, :<%= attribute.name %><%= attribute.inject_options %>
+ <%- else -%>
+ <%- if attribute.has_index? -%>
+ remove_index :<%= table_name %>, :<%= attribute.index_name %><%= attribute.inject_index_options %>
+ <%- end -%>
+ remove_column :<%= table_name %>, :<%= attribute.name %>, :<%= attribute.type %><%= attribute.inject_options %>
+ <%- end -%>
+<%- end -%>
+<%- end -%>
+ end
+<%- end -%>
+end
diff --git a/activerecord/lib/rails/generators/active_record/model/model_generator.rb b/activerecord/lib/rails/generators/active_record/model/model_generator.rb
new file mode 100644
index 0000000000..7e8d68ce69
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/model/model_generator.rb
@@ -0,0 +1,52 @@
+require 'rails/generators/active_record'
+
+module ActiveRecord
+ module Generators # :nodoc:
+ class ModelGenerator < Base # :nodoc:
+ argument :attributes, :type => :array, :default => [], :banner => "field[:type][:index] field[:type][:index]"
+
+ check_class_collision
+
+ class_option :migration, :type => :boolean
+ class_option :timestamps, :type => :boolean
+ class_option :parent, :type => :string, :desc => "The parent class for the generated model"
+ class_option :indexes, :type => :boolean, :default => true, :desc => "Add indexes for references and belongs_to columns"
+
+
+ # creates the migration file for the model.
+
+ def create_migration_file
+ return unless options[:migration] && options[:parent].nil?
+ attributes.each { |a| a.attr_options.delete(:index) if a.reference? && !a.has_index? } if options[:indexes] == false
+ migration_template "../../migration/templates/create_table_migration.rb", "db/migrate/create_#{table_name}.rb"
+ end
+
+ def create_model_file
+ template 'model.rb', File.join('app/models', class_path, "#{file_name}.rb")
+ end
+
+ def create_module_file
+ return if regular_class_path.empty?
+ template 'module.rb', File.join('app/models', "#{class_path.join('/')}.rb") if behavior == :invoke
+ end
+
+ def attributes_with_index
+ attributes.select { |a| !a.reference? && a.has_index? }
+ end
+
+ def accessible_attributes
+ attributes.reject(&:reference?)
+ end
+
+ hook_for :test_framework
+
+ protected
+
+ # Used by the migration template to determine the parent name of the model
+ def parent_class_name
+ options[:parent] || "ActiveRecord::Base"
+ end
+
+ end
+ end
+end
diff --git a/activerecord/lib/rails/generators/active_record/model/templates/model.rb b/activerecord/lib/rails/generators/active_record/model/templates/model.rb
new file mode 100644
index 0000000000..808598699b
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/model/templates/model.rb
@@ -0,0 +1,10 @@
+<% module_namespacing do -%>
+class <%= class_name %> < <%= parent_class_name.classify %>
+<% attributes.select(&:reference?).each do |attribute| -%>
+ belongs_to :<%= attribute.name %><%= ', polymorphic: true' if attribute.polymorphic? %>
+<% end -%>
+<% if attributes.any?(&:password_digest?) -%>
+ has_secure_password
+<% end -%>
+end
+<% end -%>
diff --git a/activerecord/lib/rails/generators/active_record/model/templates/module.rb b/activerecord/lib/rails/generators/active_record/model/templates/module.rb
new file mode 100644
index 0000000000..a3bf1c37b6
--- /dev/null
+++ b/activerecord/lib/rails/generators/active_record/model/templates/module.rb
@@ -0,0 +1,7 @@
+<% module_namespacing do -%>
+module <%= class_path.map(&:camelize).join('::') %>
+ def self.table_name_prefix
+ '<%= namespaced? ? namespaced_class_path.join('_') : class_path.join('_') %>_'
+ end
+end
+<% end -%>